Merge branch 'master-AIO' into feature/IO-2433-esignature
This commit is contained in:
792
server/ai/bill-ocr/bill-ocr-generator.js
Normal file
792
server/ai/bill-ocr/bill-ocr-generator.js
Normal file
@@ -0,0 +1,792 @@
|
||||
|
||||
|
||||
const Fuse = require('fuse.js');
|
||||
|
||||
const { standardizedFieldsnames } = require('./bill-ocr-normalize');
|
||||
const InstanceManager = require("../../utils/instanceMgr").default;
|
||||
|
||||
const PRICE_PERCENT_MARGIN_TOLERANCE = 0.5; //Used to make sure prices and costs are likely.
|
||||
const PRICE_QUANTITY_MARGIN_TOLERANCE = 0.03; //Used to make sure that if there is a quantity, the price is likely a unit price.
|
||||
// Helper function to normalize fields
|
||||
const normalizePartNumber = (str) => {
|
||||
return str.replace(/[^a-zA-Z0-9]/g, '').toUpperCase();
|
||||
};
|
||||
|
||||
const normalizeText = (str) => {
|
||||
return str.replace(/[^a-zA-Z0-9\s]/g, '').replace(/\s+/g, ' ').trim().toUpperCase();
|
||||
};
|
||||
const normalizePrice = (str) => {
|
||||
if (typeof str !== 'string') return str;
|
||||
|
||||
let value = str.trim();
|
||||
|
||||
// Handle European-style decimal comma like "292,37".
|
||||
// Only treat the *last* comma as a decimal separator when:
|
||||
// - there's no '.' anywhere (so we don't fight normal US formatting like "1,234.56")
|
||||
// - and the suffix after the last comma is 1-2 digits (so "1,234" stays 1234)
|
||||
if (!value.includes('.') && value.includes(',')) {
|
||||
const lastCommaIndex = value.lastIndexOf(',');
|
||||
const decimalSuffix = value.slice(lastCommaIndex + 1).trim();
|
||||
|
||||
if (/^\d{1,2}$/.test(decimalSuffix)) {
|
||||
const before = value.slice(0, lastCommaIndex).replace(/,/g, '');
|
||||
value = `${before}.${decimalSuffix}`;
|
||||
} else {
|
||||
// Treat commas as thousands separators (or noise) and drop them.
|
||||
value = value.replace(/,/g, '');
|
||||
}
|
||||
}
|
||||
|
||||
return value.replace(/[^0-9.-]+/g, "");
|
||||
};
|
||||
|
||||
const roundToIncrement = (value, increment) => {
|
||||
if (typeof value !== 'number' || !isFinite(value) || typeof increment !== 'number' || !isFinite(increment) || increment <= 0) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const rounded = Math.round((value + Number.EPSILON) / increment) * increment;
|
||||
// Prevent float artifacts (e.g. 0.20500000000000002)
|
||||
const decimals = Math.max(0, Math.ceil(-Math.log10(increment)));
|
||||
return parseFloat(rounded.toFixed(decimals));
|
||||
};
|
||||
|
||||
//More complex function. Not necessary at the moment, keeping for reference.
|
||||
// const normalizePriceFinal = (str) => {
|
||||
// if (typeof str !== 'string') {
|
||||
// // If it's already a number, format to 2 decimals
|
||||
// const num = parseFloat(str);
|
||||
// return isNaN(num) ? 0 : num;
|
||||
// }
|
||||
|
||||
// // First, try to extract valid decimal number patterns (e.g., "123.45")
|
||||
// const decimalPattern = /\d+\.\d{1,2}/g;
|
||||
// const decimalMatches = str.match(decimalPattern);
|
||||
|
||||
// if (decimalMatches && decimalMatches.length > 0) {
|
||||
// // Found valid decimal number(s)
|
||||
// const numbers = decimalMatches.map(m => parseFloat(m)).filter(n => !isNaN(n) && n > 0);
|
||||
|
||||
// if (numbers.length === 1) {
|
||||
// return numbers[0];
|
||||
// }
|
||||
|
||||
// if (numbers.length > 1) {
|
||||
// // Check if all numbers are the same (e.g., "47.57.47.57" -> [47.57, 47.57])
|
||||
// const uniqueNumbers = [...new Set(numbers)];
|
||||
// if (uniqueNumbers.length === 1) {
|
||||
// return uniqueNumbers[0];
|
||||
// }
|
||||
|
||||
// // Check if numbers are very close (within 1% tolerance)
|
||||
// const avg = numbers.reduce((a, b) => a + b, 0) / numbers.length;
|
||||
// const allClose = numbers.every(num => Math.abs(num - avg) / avg < 0.01);
|
||||
|
||||
// if (allClose) {
|
||||
// return avg;
|
||||
// }
|
||||
|
||||
// // Return the first number (most likely correct)
|
||||
// return numbers[0];
|
||||
// }
|
||||
// }
|
||||
|
||||
// // Fallback: Split on common delimiters and extract all potential numbers
|
||||
// const parts = str.split(/[\/|\\,;]/).map(part => part.trim()).filter(part => part.length > 0);
|
||||
|
||||
// if (parts.length > 1) {
|
||||
// // Multiple values detected - extract and parse all valid numbers
|
||||
// const numbers = parts
|
||||
// .map(part => {
|
||||
// const cleaned = part.replace(/[^0-9.-]+/g, "");
|
||||
// const parsed = parseFloat(cleaned);
|
||||
// return isNaN(parsed) ? null : parsed;
|
||||
// })
|
||||
// .filter(num => num !== null && num > 0);
|
||||
|
||||
// if (numbers.length === 0) {
|
||||
// // No valid numbers found, try fallback to basic cleaning
|
||||
// const cleaned = str.replace(/[^0-9.-]+/g, "");
|
||||
// const parsed = parseFloat(cleaned);
|
||||
// return isNaN(parsed) ? 0 : parsed;
|
||||
// }
|
||||
|
||||
// if (numbers.length === 1) {
|
||||
// return numbers[0];
|
||||
// }
|
||||
|
||||
// // Multiple valid numbers
|
||||
// const uniqueNumbers = [...new Set(numbers)];
|
||||
|
||||
// if (uniqueNumbers.length === 1) {
|
||||
// return uniqueNumbers[0];
|
||||
// }
|
||||
|
||||
// // Check if numbers are very close (within 1% tolerance)
|
||||
// const avg = numbers.reduce((a, b) => a + b, 0) / numbers.length;
|
||||
// const allClose = numbers.every(num => Math.abs(num - avg) / avg < 0.01);
|
||||
|
||||
// if (allClose) {
|
||||
// return avg;
|
||||
// }
|
||||
|
||||
// // Return the first valid number
|
||||
// return numbers[0];
|
||||
// }
|
||||
|
||||
// // Single value or no delimiters, clean normally
|
||||
// const cleaned = str.replace(/[^0-9.-]+/g, "");
|
||||
// const parsed = parseFloat(cleaned);
|
||||
// return isNaN(parsed) ? 0 : parsed;
|
||||
// };
|
||||
|
||||
|
||||
|
||||
// Helper function to calculate Textract OCR confidence (0-100%)
|
||||
const calculateTextractConfidence = (textractLineItem) => {
|
||||
if (!textractLineItem || Object.keys(textractLineItem).length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const confidenceValues = [];
|
||||
|
||||
// Collect confidence from all fields in the line item
|
||||
Object.values(textractLineItem).forEach(field => {
|
||||
if (field.confidence && typeof field.confidence === 'number') {
|
||||
confidenceValues.push(field.confidence);
|
||||
}
|
||||
});
|
||||
|
||||
if (confidenceValues.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Check if critical normalized labels are present
|
||||
const hasActualCost = Object.values(textractLineItem).some(field => field.normalizedLabel === standardizedFieldsnames.actual_cost);
|
||||
const hasActualPrice = Object.values(textractLineItem).some(field => field.normalizedLabel === standardizedFieldsnames.actual_price);
|
||||
const hasLineDesc = Object.values(textractLineItem).some(field => field.normalizedLabel === standardizedFieldsnames.line_desc);
|
||||
const hasQuantity = textractLineItem?.QUANTITY?.value; //We don't normalize quantity, we just use what textract gives us.
|
||||
|
||||
// Calculate weighted average, giving more weight to important fields
|
||||
// If we can identify key fields (ITEM, PRODUCT_CODE, PRICE), weight them higher
|
||||
let totalWeight = 0;
|
||||
let weightedSum = 0;
|
||||
|
||||
Object.entries(textractLineItem).forEach(([key, field]) => {
|
||||
if (field.confidence && typeof field.confidence === 'number') {
|
||||
// Weight important fields higher
|
||||
let weight = 1;
|
||||
if (field.normalizedLabel === standardizedFieldsnames.actual_cost || field.normalizedLabel === standardizedFieldsnames.actual_price) {
|
||||
weight = 4;
|
||||
}
|
||||
else if (field.normalizedLabel === standardizedFieldsnames.part_no || field.normalizedLabel === standardizedFieldsnames.line_desc) {
|
||||
weight = 3.5;
|
||||
}
|
||||
else if (field.normalizedLabel === standardizedFieldsnames.quantity) {
|
||||
weight = 3.5;
|
||||
}
|
||||
// We generally ignore the key from textract. Keeping for future reference.
|
||||
// else if (key === 'ITEM' || key === 'PRODUCT_CODE') {
|
||||
// weight = 3; // Description and part number are most important
|
||||
// } else if (key === 'PRICE' || key === 'UNIT_PRICE' || key === 'QUANTITY') {
|
||||
// weight = 2; // Price and quantity moderately important
|
||||
// }
|
||||
|
||||
weightedSum += field.confidence * weight;
|
||||
totalWeight += weight;
|
||||
}
|
||||
});
|
||||
|
||||
let avgConfidence = totalWeight > 0 ? weightedSum / totalWeight : 0;
|
||||
|
||||
// Apply penalty if critical normalized labels are missing
|
||||
let missingFieldsPenalty = 1.0;
|
||||
let missingCount = 0;
|
||||
if (!hasActualCost) missingCount++;
|
||||
if (!hasActualPrice) missingCount++;
|
||||
if (!hasLineDesc) missingCount++;
|
||||
if (!hasQuantity) missingCount++;
|
||||
|
||||
// Each missing field reduces confidence by 20%
|
||||
if (missingCount > 0) {
|
||||
missingFieldsPenalty = 1.0 - (missingCount * 0.20);
|
||||
}
|
||||
|
||||
avgConfidence = avgConfidence * missingFieldsPenalty;
|
||||
|
||||
return Math.round(avgConfidence * 100) / 100; // Round to 2 decimal places
|
||||
};
|
||||
|
||||
const calculateMatchConfidence = (matches, bestMatch) => {
|
||||
if (!matches || matches.length === 0 || !bestMatch) {
|
||||
return 0; // No match = 0% confidence
|
||||
}
|
||||
|
||||
// Base confidence from the match score
|
||||
// finalScore is already weighted and higher is better
|
||||
// Normalize it to a 0-100 scale
|
||||
const baseScore = Math.min(bestMatch.finalScore * 10, 100); // Scale factor of 10, cap at 100
|
||||
|
||||
// Bonus for multiple field matches (up to +15%)
|
||||
const fieldMatchBonus = Math.min(bestMatch.fieldMatches.length * 5, 15);
|
||||
|
||||
// Bonus for having price data (+10%)
|
||||
const priceDataBonus = bestMatch.hasPriceData ? 10 : 0;
|
||||
|
||||
// Bonus for clear winner (gap between 1st and 2nd match)
|
||||
let confidenceMarginBonus = 0;
|
||||
if (matches.length > 1) {
|
||||
const scoreDiff = bestMatch.finalScore - matches[1].finalScore;
|
||||
// If the best match is significantly better than the second best, add bonus
|
||||
confidenceMarginBonus = Math.min(scoreDiff * 5, 10); // Up to +10%
|
||||
} else {
|
||||
// Only one match found, add small bonus
|
||||
confidenceMarginBonus = 5;
|
||||
}
|
||||
|
||||
// Calculate total match confidence
|
||||
let matchConfidence = baseScore + fieldMatchBonus + priceDataBonus + confidenceMarginBonus;
|
||||
|
||||
// Cap at 100% and round to 2 decimal places
|
||||
matchConfidence = Math.min(Math.round(matchConfidence * 100) / 100, 100);
|
||||
|
||||
// Ensure minimum of 1% if there's any match at all
|
||||
return Math.max(matchConfidence, 1);
|
||||
};
|
||||
|
||||
const calculateOverallConfidence = (ocrConfidence, matchConfidence) => {
|
||||
// If there's no match, OCR confidence doesn't matter much
|
||||
if (matchConfidence === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Overall confidence is affected by both how well Textract read the data
|
||||
// and how well we matched it to existing joblines
|
||||
// Use a weighted average: 60% OCR confidence, 40% match confidence
|
||||
// OCR confidence is more important because even perfect match is useless without good OCR
|
||||
const overall = (ocrConfidence * 0.6) + (matchConfidence * 0.4);
|
||||
|
||||
return Math.round(overall * 100) / 100;
|
||||
};
|
||||
|
||||
// Helper function to merge and deduplicate results with weighted scoring
|
||||
const mergeResults = (resultsArray, weights = []) => {
|
||||
const scoreMap = new Map();
|
||||
|
||||
resultsArray.forEach((results, index) => {
|
||||
const weight = weights[index] || 1;
|
||||
results.forEach(result => {
|
||||
const id = result.item.id;
|
||||
const weightedScore = result.score * weight;
|
||||
|
||||
if (!scoreMap.has(id)) {
|
||||
scoreMap.set(id, { item: result.item, score: weightedScore, count: 1 });
|
||||
} else {
|
||||
const existing = scoreMap.get(id);
|
||||
// Lower score is better in Fuse.js, so take the minimum
|
||||
existing.score = Math.min(existing.score, weightedScore);
|
||||
existing.count++;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Convert back to array and sort by score (lower is better)
|
||||
return Array.from(scoreMap.values())
|
||||
.sort((a, b) => {
|
||||
// Prioritize items found in multiple searches
|
||||
if (a.count !== b.count) return b.count - a.count;
|
||||
return a.score - b.score;
|
||||
})
|
||||
.slice(0, 5); // Return top 5 results
|
||||
};
|
||||
|
||||
async function generateBillFormData({ processedData, jobid: jobidFromProps, bodyshopid, partsorderid, req }) {
|
||||
const client = req.userGraphQLClient;
|
||||
|
||||
let jobid = jobidFromProps;
|
||||
//If no jobid, fetch it, and funnel it back.
|
||||
if (!jobid || jobid === null || jobid === undefined || jobid === "" || jobid === "null" || jobid === "undefined") {
|
||||
const ro_number = processedData.summary?.PO_NUMBER?.value || Object.values(processedData.summary).find(value => value.normalizedLabel === 'ro_number')?.value;
|
||||
if (!ro_number) {
|
||||
throw new Error("Could not find RO number in the extracted data to associate with the bill. Select an RO and try again.");
|
||||
}
|
||||
|
||||
const { jobs } = await client.request(`
|
||||
query QUERY_BILL_OCR_JOB_BY_RO($ro_number: String!) {
|
||||
jobs(where: {ro_number: {_eq: $ro_number}}) {
|
||||
id
|
||||
}
|
||||
}`, { ro_number });
|
||||
|
||||
if (jobs.length === 0) {
|
||||
throw new Error("No job found for the detected RO/PO number.");
|
||||
}
|
||||
jobid = jobs[0].id;
|
||||
}
|
||||
|
||||
const jobData = await client.request(`
|
||||
query QUERY_BILL_OCR_DATA($jobid: uuid!) {
|
||||
vendors {
|
||||
id
|
||||
name
|
||||
}
|
||||
jobs_by_pk(id: $jobid) {
|
||||
id
|
||||
bodyshop {
|
||||
id
|
||||
md_responsibility_centers
|
||||
cdk_dealerid
|
||||
pbs_serialnumber
|
||||
rr_dealerid
|
||||
}
|
||||
joblines {
|
||||
id
|
||||
line_desc
|
||||
removed
|
||||
act_price
|
||||
db_price
|
||||
oem_partno
|
||||
alt_partno
|
||||
part_type
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
`, {
|
||||
jobid, // TODO: Parts order IDs are currently ignore. If receving a parts order, it could be used to more precisely match to joblines.
|
||||
});
|
||||
|
||||
//Create fuses of line descriptions for matching.
|
||||
const jobLineDescFuse = new Fuse(
|
||||
jobData.jobs_by_pk.joblines.map(jl => ({ ...jl, line_desc_normalized: normalizeText(jl.line_desc || ""), oem_partno_normalized: normalizePartNumber(jl.oem_partno || ""), alt_partno_normalized: normalizePartNumber(jl.alt_partno || "") })),
|
||||
{
|
||||
keys: [{
|
||||
name: 'line_desc',
|
||||
weight: 6
|
||||
}, {
|
||||
name: 'oem_partno',
|
||||
weight: 8
|
||||
}, {
|
||||
name: 'alt_partno',
|
||||
weight: 5
|
||||
},
|
||||
{
|
||||
name: 'act_price',
|
||||
weight: 1
|
||||
},
|
||||
{
|
||||
name: 'line_desc_normalized',
|
||||
weight: 4
|
||||
},
|
||||
{
|
||||
name: 'oem_partno_normalized',
|
||||
weight: 6
|
||||
},
|
||||
{
|
||||
name: 'alt_partno_normalized',
|
||||
weight: 3
|
||||
}],
|
||||
threshold: 0.4, //Adjust as needed for matching sensitivity,
|
||||
includeScore: true,
|
||||
|
||||
}
|
||||
);
|
||||
const joblineMatches = joblineFuzzySearch({ fuseToSearch: jobLineDescFuse, processedData });
|
||||
|
||||
const vendorFuse = new Fuse(
|
||||
jobData.vendors.map(v => ({ ...v, name_normalized: normalizeText(v.name) })),
|
||||
{
|
||||
keys: [{ name: "name", weight: 3 }, { name: 'name_normalized', weight: 2 }],
|
||||
threshold: 0.4,
|
||||
includeScore: true,
|
||||
},
|
||||
|
||||
);
|
||||
|
||||
const vendorMatches = vendorFuse.search(normalizeText(processedData.summary?.VENDOR_NAME?.value || processedData.summary?.NAME?.value));
|
||||
|
||||
let vendorid;
|
||||
if (vendorMatches.length > 0) {
|
||||
vendorid = vendorMatches[0].item.id;
|
||||
}
|
||||
const { jobs_by_pk: job } = jobData;
|
||||
if (!job) {
|
||||
throw new Error('Job not found for bill form data generation.');
|
||||
}
|
||||
|
||||
|
||||
//Is there a subtotal level discount? If there is, we need to figure out what the percentage is, and apply that to the actual cost as a reduction
|
||||
const subtotalDiscountValueRaw = processedData.summary?.DISCOUNT?.value || processedData.summary?.SUBTOTAL_DISCOUNT?.value || 0;
|
||||
let discountPercentageDecimal = 0;
|
||||
if (subtotalDiscountValueRaw) {
|
||||
const subtotal = parseFloat(normalizePrice(processedData.summary?.SUBTOTAL?.value || 0)) || 0;
|
||||
const subtotalDiscountValue = parseFloat(normalizePrice(subtotalDiscountValueRaw)) || 0;
|
||||
if (subtotal > 0 && subtotalDiscountValue) {
|
||||
// Store discount percentage as a decimal (e.g. 20.5% => 0.205),
|
||||
// but only allow half-percent increments (0.005 steps).
|
||||
discountPercentageDecimal = Math.abs(subtotalDiscountValue / subtotal);
|
||||
discountPercentageDecimal = roundToIncrement(discountPercentageDecimal, 0.005);
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: How do we handle freight lines and core charges?
|
||||
//Create the form data structure for the bill posting screen.
|
||||
const billFormData = {
|
||||
"jobid": jobid,
|
||||
"vendorid": vendorid,
|
||||
"invoice_number": processedData.summary?.INVOICE_RECEIPT_ID?.value,
|
||||
"date": processedData.summary?.INVOICE_RECEIPT_DATE?.value,
|
||||
"is_credit_memo": false,
|
||||
"total": normalizePrice(processedData.summary?.INVOICE_TOTAL?.value || processedData.summary?.TOTAL?.value),
|
||||
"billlines": joblineMatches.map(jlMatchLine => {
|
||||
const { matches, textractLineItem, } = jlMatchLine
|
||||
//Matches should be pre-sorted, take the first one.
|
||||
const matchToUse = matches.length > 0 ? matches[0] : null;
|
||||
|
||||
// Calculate confidence scores
|
||||
const ocrConfidence = calculateTextractConfidence(textractLineItem);
|
||||
const matchConfidence = calculateMatchConfidence(matches, matchToUse);
|
||||
const overallConfidence = calculateOverallConfidence(ocrConfidence, matchConfidence);
|
||||
//TODO: Should be using the textract if there is an exact match on the normalized label.
|
||||
//if there isn't then we can do the below.
|
||||
|
||||
let actualPrice, actualCost;
|
||||
//TODO: What is several match on the normalized name? We need to pick the most likely one.
|
||||
const hasNormalizedActualPrice = Object.keys(textractLineItem).find(key => textractLineItem[key].normalizedLabel === 'actual_price');
|
||||
const hasNormalizedActualCost = Object.keys(textractLineItem).find(key => textractLineItem[key].normalizedLabel === 'actual_cost');
|
||||
|
||||
if (hasNormalizedActualPrice) {
|
||||
actualPrice = textractLineItem[hasNormalizedActualPrice].value;
|
||||
}
|
||||
if (hasNormalizedActualCost) {
|
||||
actualCost = textractLineItem[hasNormalizedActualCost].value;
|
||||
}
|
||||
|
||||
if (!hasNormalizedActualPrice || !hasNormalizedActualCost) {
|
||||
//This is if there was no match found for normalized labels.
|
||||
//Check all prices, and generally the higher one will be the actual price and the lower one will be the cost.
|
||||
//Need to make sure that other random items are excluded. This should be within a reasonable range of the matched jobline at matchToUse.item.act_price
|
||||
//Iterate over all of the text values, and check out which of them are currencies.
|
||||
//They'll be in the format starting with a $ sign usually.
|
||||
const currencyTextractLineItems = [] // {key, value}
|
||||
Object.keys(textractLineItem).forEach(key => {
|
||||
const currencyValue = textractLineItem[key].value?.startsWith('$') ? textractLineItem[key].value : null;
|
||||
if (currencyValue) {
|
||||
//Clean it and parse it
|
||||
const cleanValue = parseFloat(currencyValue.replace(/[^0-9.-]/g, '')) || 0;
|
||||
currencyTextractLineItems.push({ key, value: cleanValue })
|
||||
}
|
||||
})
|
||||
|
||||
//Sort them descending
|
||||
currencyTextractLineItems.sort((a, b) => b.value - a.value);
|
||||
//Most expensive should be the actual price, second most expensive should be the cost.
|
||||
if (!actualPrice) actualPrice = currencyTextractLineItems.length > 0 ? currencyTextractLineItems[0].value : 0;
|
||||
if (!actualCost) actualCost = currencyTextractLineItems.length > 1 ? currencyTextractLineItems[1].value : 0;
|
||||
|
||||
if (matchToUse) {
|
||||
//Double check that they're within 50% of the matched jobline price if there is one.
|
||||
const joblinePrice = parseFloat(matchToUse.item.act_price) || 0;
|
||||
if (!hasNormalizedActualPrice && actualPrice > 0 && (actualPrice < joblinePrice * (1 - PRICE_PERCENT_MARGIN_TOLERANCE) || actualPrice > joblinePrice * (1 + PRICE_PERCENT_MARGIN_TOLERANCE))) {
|
||||
actualPrice = joblinePrice; //Set to the jobline as a fallback.
|
||||
}
|
||||
if (!hasNormalizedActualCost && actualCost > 0 && (actualCost < joblinePrice * (1 - PRICE_PERCENT_MARGIN_TOLERANCE) || actualCost > joblinePrice * (1 + PRICE_PERCENT_MARGIN_TOLERANCE))) {
|
||||
actualCost = null //Blank it out if it's not likely.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//If there's nothing, just fall back to seeing if there's a price object from textract.
|
||||
|
||||
if (!actualPrice && textractLineItem.PRICE) {
|
||||
actualPrice = textractLineItem.PRICE.value;
|
||||
}
|
||||
if (!actualCost && textractLineItem.PRICE) {
|
||||
actualCost = textractLineItem.PRICE.value;
|
||||
}
|
||||
|
||||
//If quantity greater than 1, check if the actual cost is a multiple of the actual price, if so, divide it out to get the unit price.
|
||||
const quantity = parseInt(textractLineItem?.QUANTITY?.value);
|
||||
if (quantity && quantity > 1) {
|
||||
if (actualPrice && quantity && Math.abs((actualPrice / quantity) - (parseFloat(matchToUse?.item?.act_price) || 0)) / ((parseFloat(matchToUse?.item?.act_price) || 1)) < PRICE_QUANTITY_MARGIN_TOLERANCE) {
|
||||
actualPrice = actualPrice / quantity;
|
||||
}
|
||||
if (actualCost && quantity && Math.abs((actualCost / quantity) - (parseFloat(matchToUse?.item?.act_price) || 0)) / ((parseFloat(matchToUse?.item?.act_price) || 1)) < PRICE_QUANTITY_MARGIN_TOLERANCE) {
|
||||
actualCost = actualCost / quantity;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (discountPercentageDecimal > 0) {
|
||||
actualCost = actualCost * (1 - discountPercentageDecimal);
|
||||
}
|
||||
|
||||
const responsibilityCenters = job.bodyshop.md_responsibility_centers
|
||||
//TODO: Do we need to verify the lines to see if it is a unit price or total price (i.e. quantity * price)
|
||||
const lineObject = {
|
||||
"line_desc": matchToUse?.item?.line_desc || textractLineItem.ITEM?.value || "NO DESCRIPTION",
|
||||
"quantity": textractLineItem.QUANTITY?.value,
|
||||
"actual_price": normalizePrice(actualPrice),
|
||||
"actual_cost": normalizePrice(actualCost),
|
||||
"cost_center": matchToUse?.item?.part_type
|
||||
? bodyshopHasDmsKey(job.bodyshop)
|
||||
? matchToUse?.item?.part_type !== "PAE"
|
||||
? matchToUse?.item?.part_type
|
||||
: null
|
||||
: responsibilityCenters.defaults &&
|
||||
(responsibilityCenters.defaults.costs[matchToUse?.item?.part_type] || null)
|
||||
: null,
|
||||
"applicable_taxes": {
|
||||
"federal": InstanceManager({ imex: true, rome: false }),
|
||||
"state": false,
|
||||
"local": false
|
||||
},
|
||||
"joblineid": matchToUse?.item?.id || "noline",
|
||||
"confidence": `T${overallConfidence} - O${ocrConfidence} - J${matchConfidence}`
|
||||
}
|
||||
return lineObject
|
||||
})
|
||||
}
|
||||
|
||||
return billFormData
|
||||
|
||||
}
|
||||
|
||||
function joblineFuzzySearch({ fuseToSearch, processedData }) {
|
||||
const matches = []
|
||||
const searchStats = []; // Track search statistics
|
||||
|
||||
processedData.lineItems.forEach((lineItem, lineIndex) => {
|
||||
const lineStats = {
|
||||
lineNumber: lineIndex + 1,
|
||||
searches: []
|
||||
};
|
||||
|
||||
// Refined ITEM search (multi-word description)
|
||||
const refinedItemResults = (() => {
|
||||
if (!lineItem.ITEM?.value) return [];
|
||||
|
||||
const itemValue = lineItem.ITEM.value;
|
||||
const normalized = normalizeText(itemValue);
|
||||
|
||||
// 1: Full string search
|
||||
const fullSearch = fuseToSearch.search(normalized);
|
||||
lineStats.searches.push({ type: 'ITEM - Full String', term: normalized, results: fullSearch.length });
|
||||
|
||||
// 2: Search individual significant words (3+ chars)
|
||||
const words = normalized.split(' ').filter(w => w.length >= 3);
|
||||
const wordSearches = words.map(word => {
|
||||
const results = fuseToSearch.search(word);
|
||||
lineStats.searches.push({ type: 'ITEM - Individual Word', term: word, results: results.length });
|
||||
return results;
|
||||
});
|
||||
|
||||
// 3: Search without spaces entirely
|
||||
const noSpaceSearch = fuseToSearch.search(normalized.replace(/\s+/g, ''));
|
||||
lineStats.searches.push({ type: 'ITEM - No Spaces', term: normalized.replace(/\s+/g, ''), results: noSpaceSearch.length });
|
||||
|
||||
// Merge results with weights (full search weighted higher)
|
||||
return mergeResults(
|
||||
[fullSearch, ...wordSearches, noSpaceSearch],
|
||||
[1.0, ...words.map(() => 1.5), 1.2] // Full search best, individual words penalized slightly
|
||||
);
|
||||
})();
|
||||
|
||||
// Refined PRODUCT_CODE search (part numbers)
|
||||
const refinedProductCodeResults = (() => {
|
||||
if (!lineItem.PRODUCT_CODE?.value) return [];
|
||||
|
||||
const productCode = lineItem.PRODUCT_CODE.value;
|
||||
const normalized = normalizePartNumber(productCode);
|
||||
|
||||
// 1: Normalized search (no spaces/special chars)
|
||||
const normalizedSearch = fuseToSearch.search(normalized);
|
||||
lineStats.searches.push({ type: 'PRODUCT_CODE - Normalized', term: normalized, results: normalizedSearch.length });
|
||||
|
||||
// 2: Original with minimal cleaning
|
||||
const minimalClean = productCode.replace(/\s+/g, '').toUpperCase();
|
||||
const minimalSearch = fuseToSearch.search(minimalClean);
|
||||
lineStats.searches.push({ type: 'PRODUCT_CODE - Minimal Clean', term: minimalClean, results: minimalSearch.length });
|
||||
|
||||
// 3: Search with dashes (common in part numbers)
|
||||
const withDashes = productCode.replace(/[^a-zA-Z0-9-]/g, '').toUpperCase();
|
||||
const dashSearch = fuseToSearch.search(withDashes);
|
||||
lineStats.searches.push({ type: 'PRODUCT_CODE - With Dashes', term: withDashes, results: dashSearch.length });
|
||||
|
||||
// 4: Special chars to spaces (preserve word boundaries)
|
||||
const specialCharsToSpaces = productCode.replace(/[^a-zA-Z0-9\s]/g, ' ').replace(/\s+/g, ' ').trim().toUpperCase();
|
||||
const specialCharsSearch = fuseToSearch.search(specialCharsToSpaces);
|
||||
lineStats.searches.push({ type: 'PRODUCT_CODE - Special Chars to Spaces', term: specialCharsToSpaces, results: specialCharsSearch.length });
|
||||
|
||||
return mergeResults(
|
||||
[normalizedSearch, minimalSearch, dashSearch, specialCharsSearch],
|
||||
[1.0, 1.1, 1.2, 1.15] // Prefer fully normalized, special chars to spaces slightly weighted
|
||||
);
|
||||
})();
|
||||
|
||||
// Refined PRICE search
|
||||
const refinedPriceResults = (() => {
|
||||
if (!lineItem.PRICE?.value) return [];
|
||||
|
||||
const price = normalizePrice(lineItem.PRICE.value);
|
||||
|
||||
// 1: Exact price match
|
||||
const exactSearch = fuseToSearch.search(price);
|
||||
lineStats.searches.push({ type: 'PRICE - Exact', term: price, results: exactSearch.length });
|
||||
|
||||
// 2: Price with 2 decimal places
|
||||
const priceFloat = parseFloat(price);
|
||||
if (!isNaN(priceFloat)) {
|
||||
const formattedPrice = priceFloat.toFixed(2);
|
||||
const formattedSearch = fuseToSearch.search(formattedPrice);
|
||||
lineStats.searches.push({ type: 'PRICE - Formatted (2 decimals)', term: formattedPrice, results: formattedSearch.length });
|
||||
|
||||
return mergeResults([exactSearch, formattedSearch], [1.0, 1.1]);
|
||||
}
|
||||
|
||||
return exactSearch;
|
||||
})();
|
||||
|
||||
// Refined UNIT_PRICE search
|
||||
const refinedUnitPriceResults = (() => {
|
||||
if (!lineItem.UNIT_PRICE?.value) return [];
|
||||
|
||||
const unitPrice = normalizePrice(lineItem.UNIT_PRICE.value);
|
||||
|
||||
// 1: Exact price match
|
||||
const exactSearch = fuseToSearch.search(unitPrice);
|
||||
lineStats.searches.push({ type: 'UNIT_PRICE - Exact', term: unitPrice, results: exactSearch.length });
|
||||
|
||||
// 2: Price with 2 decimal places
|
||||
const priceFloat = parseFloat(unitPrice);
|
||||
if (!isNaN(priceFloat)) {
|
||||
const formattedPrice = priceFloat.toFixed(2);
|
||||
const formattedSearch = fuseToSearch.search(formattedPrice);
|
||||
lineStats.searches.push({ type: 'UNIT_PRICE - Formatted (2 decimals)', term: formattedPrice, results: formattedSearch.length });
|
||||
|
||||
return mergeResults([exactSearch, formattedSearch], [1.0, 1.1]);
|
||||
}
|
||||
|
||||
return exactSearch;
|
||||
})();
|
||||
|
||||
//Merge them all together and sort by the highest scores.
|
||||
const combinedScoreMap = new Map();
|
||||
|
||||
// Weight different field types differently
|
||||
const fieldWeights = {
|
||||
productCode: 5.0, // Most important - part numbers should match
|
||||
item: 3.0, // Second most important - description
|
||||
price: 1.0, // Less important - prices can vary
|
||||
unitPrice: 0.8 // Least important - similar to price
|
||||
};
|
||||
|
||||
[
|
||||
{ results: refinedProductCodeResults, weight: fieldWeights.productCode, field: 'productCode' },
|
||||
{ results: refinedItemResults, weight: fieldWeights.item, field: 'item' },
|
||||
{ results: refinedPriceResults, weight: fieldWeights.price, field: 'price' },
|
||||
{ results: refinedUnitPriceResults, weight: fieldWeights.unitPrice, field: 'unitPrice' }
|
||||
].forEach(({ results, weight, field }) => {
|
||||
results.forEach((result, index) => {
|
||||
const id = result.item.id;
|
||||
|
||||
// Position bonus (first result is better than fifth)
|
||||
const positionBonus = (5 - index) / 5;
|
||||
|
||||
// Lower score is better in Fuse.js, so invert it and apply weights
|
||||
const normalizedScore = (1 - result.score) * weight * positionBonus;
|
||||
|
||||
if (!combinedScoreMap.has(id)) {
|
||||
combinedScoreMap.set(id, {
|
||||
item: result.item,
|
||||
score: normalizedScore,
|
||||
fieldMatches: [field],
|
||||
matchCount: result.count || 1
|
||||
});
|
||||
} else {
|
||||
const existing = combinedScoreMap.get(id);
|
||||
existing.score += normalizedScore;
|
||||
existing.fieldMatches.push(field);
|
||||
existing.matchCount += (result.count || 1);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Convert to array and sort by best combined score
|
||||
const finalMatches = Array.from(combinedScoreMap.values())
|
||||
.map(entry => {
|
||||
// Apply penalty if item has no act_price or it's 0
|
||||
const hasPriceData = entry.item.act_price && parseFloat(entry.item.act_price) > 0;
|
||||
const priceDataPenalty = hasPriceData ? 1.0 : 0.5; // 50% penalty if no price
|
||||
|
||||
return {
|
||||
...entry,
|
||||
// Boost score for items that matched in multiple fields, penalize for missing price
|
||||
finalScore: entry.score * (1 + (entry.fieldMatches.length * 0.2)) * priceDataPenalty,
|
||||
hasPriceData
|
||||
};
|
||||
})
|
||||
.sort((a, b) => b.finalScore - a.finalScore)
|
||||
.slice(0, 5);
|
||||
|
||||
// Always push the textract line item, even if no matches found
|
||||
// This ensures all invoice lines are processed
|
||||
matches.push({
|
||||
matches: finalMatches,
|
||||
textractLineItem: lineItem,
|
||||
hasMatch: finalMatches.length > 0
|
||||
});
|
||||
|
||||
searchStats.push(lineStats);
|
||||
|
||||
})
|
||||
|
||||
// // Output search statistics table
|
||||
// console.log('\n═══════════════════════════════════════════════════════════════════════');
|
||||
// console.log(' FUSE.JS SEARCH STATISTICS');
|
||||
// console.log('═══════════════════════════════════════════════════════════════════════\n');
|
||||
|
||||
// searchStats.forEach(lineStat => {
|
||||
// console.log(`📄 Line Item #${lineStat.lineNumber}:`);
|
||||
// console.log('─'.repeat(75));
|
||||
|
||||
// if (lineStat.searches.length > 0) {
|
||||
// const tableData = lineStat.searches.map(search => ({
|
||||
// 'Search Type': search.type,
|
||||
// 'Search Term': search.term.substring(0, 40) + (search.term.length > 40 ? '...' : ''),
|
||||
// 'Results': search.results
|
||||
// }));
|
||||
// console.table(tableData);
|
||||
// } else {
|
||||
// console.log(' No searches performed for this line item.\n');
|
||||
// }
|
||||
// });
|
||||
|
||||
// // Summary statistics
|
||||
// const totalSearches = searchStats.reduce((sum, stat) => sum + stat.searches.length, 0);
|
||||
// const totalResults = searchStats.reduce((sum, stat) =>
|
||||
// sum + stat.searches.reduce((s, search) => s + search.results, 0), 0);
|
||||
// const avgResultsPerSearch = totalSearches > 0 ? (totalResults / totalSearches).toFixed(2) : 0;
|
||||
|
||||
// console.log('═══════════════════════════════════════════════════════════════════════');
|
||||
// console.log(' SUMMARY');
|
||||
// console.log('═══════════════════════════════════════════════════════════════════════');
|
||||
// console.table({
|
||||
// 'Total Line Items': processedData.lineItems.length,
|
||||
// 'Total Searches Performed': totalSearches,
|
||||
// 'Total Results Found': totalResults,
|
||||
// 'Average Results per Search': avgResultsPerSearch
|
||||
// });
|
||||
// console.log('═══════════════════════════════════════════════════════════════════════\n');
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
const bodyshopHasDmsKey = (bodyshop) =>
|
||||
bodyshop.cdk_dealerid || bodyshop.pbs_serialnumber || bodyshop.rr_dealerid;
|
||||
|
||||
|
||||
module.exports = {
|
||||
generateBillFormData,
|
||||
normalizePrice
|
||||
}
|
||||
159
server/ai/bill-ocr/bill-ocr-helpers.js
Normal file
159
server/ai/bill-ocr/bill-ocr-helpers.js
Normal file
@@ -0,0 +1,159 @@
|
||||
const PDFDocument = require('pdf-lib').PDFDocument;
|
||||
const logger = require("../../utils/logger");
|
||||
const TEXTRACT_REDIS_PREFIX = `textract:${process.env?.NODE_ENV}`
|
||||
const TEXTRACT_JOB_TTL = 10 * 60;
|
||||
|
||||
|
||||
/**
|
||||
* Generate Redis key for Textract job using textract job ID
|
||||
* @param {string} textractJobId
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTextractJobKey(textractJobId) {
|
||||
return `${TEXTRACT_REDIS_PREFIX}:${textractJobId}`;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Store Textract job data in Redis
|
||||
* @param {string} textractJobId
|
||||
* @param {Object} redisPubClient
|
||||
* @param {Object} jobData
|
||||
*/
|
||||
async function setTextractJob({ redisPubClient, textractJobId, jobData }) {
|
||||
if (!redisPubClient) {
|
||||
throw new Error('Redis client not initialized. Call initializeBillOcr first.');
|
||||
}
|
||||
const key = getTextractJobKey(textractJobId);
|
||||
await redisPubClient.set(key, JSON.stringify(jobData));
|
||||
await redisPubClient.expire(key, TEXTRACT_JOB_TTL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve Textract job data from Redis
|
||||
* @param {string} textractJobId
|
||||
* @param {Object} redisPubClient
|
||||
* @returns {Promise<Object|null>}
|
||||
*/
|
||||
async function getTextractJob({ redisPubClient, textractJobId }) {
|
||||
if (!redisPubClient) {
|
||||
throw new Error('Redis client not initialized. Call initializeBillOcr first.');
|
||||
}
|
||||
const key = getTextractJobKey(textractJobId);
|
||||
const data = await redisPubClient.get(key);
|
||||
return data ? JSON.parse(data) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect file type based on MIME type and file signature
|
||||
* @param {Object} file - Multer file object
|
||||
* @returns {string} 'pdf', 'image', or 'unknown'
|
||||
*/
|
||||
function getFileType(file) {
|
||||
// Check MIME type first
|
||||
const mimeType = file.mimetype?.toLowerCase();
|
||||
|
||||
if (mimeType === 'application/pdf') {
|
||||
return 'pdf';
|
||||
}
|
||||
|
||||
if (mimeType && mimeType.startsWith('image/')) {
|
||||
return 'image';
|
||||
}
|
||||
|
||||
// Fallback: Check file signature (magic bytes)
|
||||
const buffer = file.buffer;
|
||||
if (buffer && buffer.length > 4) {
|
||||
// PDF signature: %PDF
|
||||
if (buffer[0] === 0x25 && buffer[1] === 0x50 && buffer[2] === 0x44 && buffer[3] === 0x46) {
|
||||
return 'pdf';
|
||||
}
|
||||
|
||||
// JPEG signature: FF D8 FF
|
||||
if (buffer[0] === 0xFF && buffer[1] === 0xD8 && buffer[2] === 0xFF) {
|
||||
return 'image';
|
||||
}
|
||||
|
||||
// PNG signature: 89 50 4E 47
|
||||
if (buffer[0] === 0x89 && buffer[1] === 0x50 && buffer[2] === 0x4E && buffer[3] === 0x47) {
|
||||
return 'image';
|
||||
}
|
||||
|
||||
// HEIC/HEIF: Check for ftyp followed by heic/heix/hevc/hevx
|
||||
if (buffer.length > 12) {
|
||||
const ftypIndex = buffer.indexOf(Buffer.from('ftyp'));
|
||||
if (ftypIndex > 0 && ftypIndex < 12) {
|
||||
const brand = buffer.slice(ftypIndex + 4, ftypIndex + 8).toString('ascii');
|
||||
if (brand.startsWith('heic') || brand.startsWith('heix') ||
|
||||
brand.startsWith('hevc') || brand.startsWith('hevx') ||
|
||||
brand.startsWith('mif1')) {
|
||||
return 'image';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 'unknown';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of pages in a PDF buffer
|
||||
* @param {Buffer} pdfBuffer
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
async function getPdfPageCount(pdfBuffer) {
|
||||
try {
|
||||
const pdfDoc = await PDFDocument.load(pdfBuffer);
|
||||
return pdfDoc.getPageCount();
|
||||
} catch (error) {
|
||||
console.error('Error reading PDF page count:', error);
|
||||
throw new Error('Failed to read PDF: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are any jobs in IN_PROGRESS status
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function hasActiveJobs({ redisPubClient }) {
|
||||
if (!redisPubClient) {
|
||||
throw new Error('Redis client not initialized.');
|
||||
}
|
||||
|
||||
try {
|
||||
// Get all textract job keys
|
||||
const pattern = `${TEXTRACT_REDIS_PREFIX}:*`;
|
||||
const keys = await redisPubClient.keys(pattern);
|
||||
|
||||
if (!keys || keys.length === 0) {
|
||||
return false;
|
||||
}
|
||||
//TODO: Is there a better way to do this that supports clusters?
|
||||
// Check if any job has IN_PROGRESS status
|
||||
for (const key of keys) {
|
||||
const data = await redisPubClient.get(key);
|
||||
if (data) {
|
||||
const jobData = JSON.parse(data);
|
||||
if (jobData.status === 'IN_PROGRESS') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (error) {
|
||||
logger.log("bill-ocr-job-check-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTextractJobKey,
|
||||
setTextractJob,
|
||||
getTextractJob,
|
||||
getFileType,
|
||||
getPdfPageCount,
|
||||
hasActiveJobs,
|
||||
TEXTRACT_REDIS_PREFIX
|
||||
}
|
||||
|
||||
202
server/ai/bill-ocr/bill-ocr-normalize.js
Normal file
202
server/ai/bill-ocr/bill-ocr-normalize.js
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
const MIN_CONFIDENCE_VALUE = 50
|
||||
|
||||
function normalizeFieldName(fieldType) {
|
||||
//Placeholder normalization for now.
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
const standardizedFieldsnames = {
|
||||
actual_cost: "actual_cost",
|
||||
actual_price: "actual_price",
|
||||
line_desc: "line_desc",
|
||||
quantity: "quantity",
|
||||
part_no: "part_no",
|
||||
ro_number: "ro_number",
|
||||
}
|
||||
|
||||
function normalizeLabelName(labelText) {
|
||||
if (!labelText) return '';
|
||||
|
||||
// Convert to lowercase and trim whitespace
|
||||
let normalized = labelText.toLowerCase().trim();
|
||||
|
||||
// Remove special characters and replace spaces with underscores
|
||||
normalized = normalized.replace(/[^a-z0-9\s]/g, '').replace(/\s+/g, '_');
|
||||
|
||||
|
||||
// Common label normalizations
|
||||
const labelMap = {
|
||||
'qty': standardizedFieldsnames.quantity,
|
||||
'qnty': standardizedFieldsnames.quantity,
|
||||
'sale_qty': standardizedFieldsnames.quantity,
|
||||
'invoiced_qty': standardizedFieldsnames.quantity,
|
||||
'qty_shipped': standardizedFieldsnames.quantity,
|
||||
'quantity': standardizedFieldsnames.quantity,
|
||||
'filled': standardizedFieldsnames.quantity,
|
||||
'count': standardizedFieldsnames.quantity,
|
||||
'quant': standardizedFieldsnames.quantity,
|
||||
'desc': standardizedFieldsnames.line_desc,
|
||||
'description': standardizedFieldsnames.line_desc,
|
||||
'item': standardizedFieldsnames.line_desc,
|
||||
'part': standardizedFieldsnames.part_no,
|
||||
'part_no': standardizedFieldsnames.part_no,
|
||||
'part_num': standardizedFieldsnames.part_no,
|
||||
'part_number': standardizedFieldsnames.part_no,
|
||||
'item_no': standardizedFieldsnames.part_no,
|
||||
'price': standardizedFieldsnames.actual_price,
|
||||
//'amount': standardizedFieldsnames.actual_price,
|
||||
'list_price': standardizedFieldsnames.actual_price,
|
||||
'unit_price': standardizedFieldsnames.actual_price,
|
||||
'list': standardizedFieldsnames.actual_price,
|
||||
'retail_price': standardizedFieldsnames.actual_price,
|
||||
'retail': standardizedFieldsnames.actual_price,
|
||||
'net': standardizedFieldsnames.actual_cost,
|
||||
'selling_price': standardizedFieldsnames.actual_cost,
|
||||
'net_price': standardizedFieldsnames.actual_cost,
|
||||
'net_cost': standardizedFieldsnames.actual_cost,
|
||||
'total': standardizedFieldsnames.actual_cost,
|
||||
'po_no': standardizedFieldsnames.ro_number,
|
||||
'customer_po_no': standardizedFieldsnames.ro_number,
|
||||
'customer_po_no_': standardizedFieldsnames.ro_number
|
||||
|
||||
};
|
||||
|
||||
return labelMap[normalized] || `NOT_MAPPED => ${normalized}`; // TODO: Should we monitor unmapped labels?
|
||||
}
|
||||
|
||||
function processScanData(invoiceData) {
|
||||
// Process and clean the extracted data
|
||||
const processed = {
|
||||
summary: {},
|
||||
lineItems: []
|
||||
};
|
||||
|
||||
// Clean summary fields
|
||||
for (const [key, value] of Object.entries(invoiceData.summary)) {
|
||||
if (value.confidence > MIN_CONFIDENCE_VALUE) { // Only include fields with > 50% confidence
|
||||
processed.summary[key] = {
|
||||
value: value.value,
|
||||
label: value.label,
|
||||
normalizedLabel: value.normalizedLabel,
|
||||
confidence: value.confidence
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Process line items
|
||||
processed.lineItems = invoiceData.lineItems
|
||||
.map(item => {
|
||||
const processedItem = {};
|
||||
|
||||
for (const [key, value] of Object.entries(item)) {
|
||||
if (value.confidence > MIN_CONFIDENCE_VALUE) { // Only include fields with > 50% confidence
|
||||
let cleanValue = value.value;
|
||||
|
||||
// Parse numbers for quantity and price fields
|
||||
if (key === 'quantity') {
|
||||
cleanValue = parseFloat(cleanValue) || 0;
|
||||
} else if (key === 'retail_price' || key === 'actual_price') {
|
||||
// Remove currency symbols and parse
|
||||
cleanValue = parseFloat(cleanValue.replace(/[^0-9.-]/g, '')) || 0;
|
||||
}
|
||||
|
||||
processedItem[key] = {
|
||||
value: cleanValue,
|
||||
label: value.label,
|
||||
normalizedLabel: value.normalizedLabel,
|
||||
confidence: value.confidence
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return processedItem;
|
||||
})
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
function extractInvoiceData(textractResponse) {
|
||||
const invoiceData = {
|
||||
summary: {},
|
||||
lineItems: []
|
||||
};
|
||||
|
||||
if (!textractResponse.ExpenseDocuments || textractResponse.ExpenseDocuments.length === 0) {
|
||||
return invoiceData;
|
||||
}
|
||||
|
||||
// Process each page of the invoice
|
||||
textractResponse.ExpenseDocuments.forEach(expenseDoc => {
|
||||
// Extract summary fields (vendor, invoice number, date, total, etc.)
|
||||
if (expenseDoc.SummaryFields) {
|
||||
expenseDoc.SummaryFields.forEach(field => {
|
||||
const fieldType = field.Type?.Text || '';
|
||||
const fieldValue = field.ValueDetection?.Text || '';
|
||||
const fieldLabel = field.LabelDetection?.Text || '';
|
||||
const confidence = field.ValueDetection?.Confidence || 0;
|
||||
|
||||
// Map common invoice fields
|
||||
if (fieldType && fieldValue) {
|
||||
invoiceData.summary[fieldType] = {
|
||||
value: fieldValue,
|
||||
label: fieldLabel,
|
||||
normalizedLabel: normalizeLabelName(fieldLabel),
|
||||
confidence: confidence
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Extract line items
|
||||
if (expenseDoc.LineItemGroups) {
|
||||
expenseDoc.LineItemGroups.forEach(lineItemGroup => {
|
||||
if (lineItemGroup.LineItems) {
|
||||
lineItemGroup.LineItems.forEach(lineItem => {
|
||||
const item = {};
|
||||
const fieldNameCounts = {}; // Track field name occurrences
|
||||
|
||||
if (lineItem.LineItemExpenseFields) {
|
||||
lineItem.LineItemExpenseFields.forEach(field => {
|
||||
const fieldType = field.Type?.Text || '';
|
||||
const fieldValue = field.ValueDetection?.Text || '';
|
||||
const fieldLabel = field.LabelDetection?.Text || '';
|
||||
const confidence = field.ValueDetection?.Confidence || 0;
|
||||
|
||||
if (fieldType && fieldValue) {
|
||||
// Normalize field names
|
||||
let normalizedField = normalizeFieldName(fieldType);
|
||||
|
||||
// Ensure uniqueness by appending a counter if the field already exists
|
||||
if (Object.prototype.hasOwnProperty.call(item, normalizedField)) {
|
||||
fieldNameCounts[normalizedField] = (fieldNameCounts[normalizedField] || 1) + 1;
|
||||
normalizedField = `${normalizedField}_${fieldNameCounts[normalizedField]}`;
|
||||
}
|
||||
|
||||
item[normalizedField] = {
|
||||
value: fieldValue,
|
||||
label: fieldLabel,
|
||||
normalizedLabel: normalizeLabelName(fieldLabel),
|
||||
confidence: confidence
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (Object.keys(item).length > 0) {
|
||||
invoiceData.lineItems.push(item);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return invoiceData;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractInvoiceData,
|
||||
processScanData,
|
||||
standardizedFieldsnames
|
||||
}
|
||||
8
server/ai/bill-ocr/bill-ocr-readme.md
Normal file
8
server/ai/bill-ocr/bill-ocr-readme.md
Normal file
@@ -0,0 +1,8 @@
|
||||
Required Infrastructure setup
|
||||
1. Create an AI user that has access to the required S3 buckets and textract permissions.
|
||||
2. Had to create a queue and SNS topic. had to also create the role that had `sns:Publish`. Had to add `sqs:ReceiveMessage` and `sqs:DeleteMessage` to the profile.
|
||||
3. Created 2 roles for SNS. The textract role is the right one, the other was created manually based on incorrect instructions.
|
||||
|
||||
TODO:
|
||||
* Create a rome bucket for uploads, or move to the regular spot.
|
||||
* Add environment variables.
|
||||
465
server/ai/bill-ocr/bill-ocr.js
Normal file
465
server/ai/bill-ocr/bill-ocr.js
Normal file
@@ -0,0 +1,465 @@
|
||||
const { TextractClient, StartExpenseAnalysisCommand, GetExpenseAnalysisCommand, AnalyzeExpenseCommand } = require("@aws-sdk/client-textract");
|
||||
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
|
||||
const { SQSClient, ReceiveMessageCommand, DeleteMessageCommand } = require("@aws-sdk/client-sqs");
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { getTextractJobKey, setTextractJob, getTextractJob, getFileType, getPdfPageCount, hasActiveJobs } = require("./bill-ocr-helpers");
|
||||
const { extractInvoiceData, processScanData } = require("./bill-ocr-normalize");
|
||||
const { generateBillFormData } = require("./bill-ocr-generator");
|
||||
const logger = require("../../utils/logger");
|
||||
const _ = require("lodash");
|
||||
|
||||
// Initialize AWS clients
|
||||
const awsConfig = {
|
||||
region: process.env.AWS_AI_REGION || "ca-central-1",
|
||||
credentials: {
|
||||
accessKeyId: process.env.AWS_AI_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AWS_AI_SECRET_ACCESS_KEY,
|
||||
}
|
||||
};
|
||||
|
||||
const textractClient = new TextractClient(awsConfig);
|
||||
const s3Client = new S3Client(awsConfig);
|
||||
const sqsClient = new SQSClient(awsConfig);
|
||||
|
||||
let redisPubClient = null;
|
||||
|
||||
|
||||
/**
|
||||
* Initialize the bill-ocr module with Redis client
|
||||
* @param {Object} pubClient - Redis cluster client
|
||||
*/
|
||||
function initializeBillOcr(pubClient) {
|
||||
redisPubClient = pubClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if job exists by Textract job ID
|
||||
* @param {string} textractJobId
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function jobExists(textractJobId) {
|
||||
if (!redisPubClient) {
|
||||
throw new Error('Redis client not initialized. Call initializeBillOcr first.');
|
||||
}
|
||||
const key = getTextractJobKey(textractJobId);
|
||||
const exists = await redisPubClient.exists(key);
|
||||
|
||||
if (exists) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function handleBillOcr(req, res) {
|
||||
// Check if file was uploaded
|
||||
if (!req.file) {
|
||||
return res.status(400).send({ error: 'No file uploaded.' });
|
||||
}
|
||||
|
||||
// The uploaded file is available in request file
|
||||
const uploadedFile = req.file;
|
||||
const { jobid, bodyshopid, partsorderid } = req.body;
|
||||
logger.log("bill-ocr-start", "DEBUG", req.user.email, jobid, null);
|
||||
|
||||
try {
|
||||
const fileType = getFileType(uploadedFile);
|
||||
// Images are always processed synchronously (single page)
|
||||
if (fileType === 'image') {
|
||||
const processedData = await processSinglePageDocument(uploadedFile.buffer);
|
||||
const billForm = await generateBillFormData({ processedData: processedData, jobid, bodyshopid, partsorderid, req: req });
|
||||
logger.log("bill-ocr-single-complete", "DEBUG", req.user.email, jobid, { ..._.omit(processedData, "originalTextractResponse"), billForm });
|
||||
|
||||
return res.status(200).json({
|
||||
success: true,
|
||||
status: 'COMPLETED',
|
||||
data: { ...processedData, billForm },
|
||||
message: 'Invoice processing completed'
|
||||
});
|
||||
} else if (fileType === 'pdf') {
|
||||
// Check the number of pages in the PDF
|
||||
const pageCount = await getPdfPageCount(uploadedFile.buffer);
|
||||
|
||||
if (pageCount === 1) {
|
||||
// Process synchronously for single-page documents
|
||||
const processedData = await processSinglePageDocument(uploadedFile.buffer);
|
||||
const billForm = await generateBillFormData({ processedData: processedData, jobid, bodyshopid, partsorderid, req: req });
|
||||
logger.log("bill-ocr-single-complete", "DEBUG", req.user.email, jobid, { ..._.omit(processedData, "originalTextractResponse"), billForm });
|
||||
return res.status(200).json({
|
||||
success: true,
|
||||
status: 'COMPLETED',
|
||||
data: { ...processedData, billForm },
|
||||
message: 'Invoice processing completed'
|
||||
});
|
||||
}
|
||||
// Start the Textract job (non-blocking) for multi-page documents
|
||||
const jobInfo = await startTextractJob(uploadedFile.buffer, { jobid, bodyshopid, partsorderid });
|
||||
logger.log("bill-ocr-multipage-start", "DEBUG", req.user.email, jobid, jobInfo);
|
||||
|
||||
return res.status(202).json({
|
||||
success: true,
|
||||
textractJobId: jobInfo.jobId,
|
||||
message: 'Invoice processing started',
|
||||
statusUrl: `/ai/bill-ocr/status/${jobInfo.jobId}`
|
||||
});
|
||||
|
||||
} else {
|
||||
logger.log("bill-ocr-unsupported-filetype", "WARN", req.user.email, jobid, { fileType });
|
||||
|
||||
return res.status(400).json({
|
||||
error: 'Unsupported file type',
|
||||
message: 'Please upload a PDF or supported image file (JPEG, PNG, TIFF)'
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log("bill-ocr-error", "ERROR", req.user.email, jobid, { error: error.message, stack: error.stack });
|
||||
return res.status(500).json({
|
||||
error: 'Failed to start invoice processing',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBillOcrStatus(req, res) {
|
||||
const { textractJobId } = req.params;
|
||||
|
||||
if (!textractJobId) {
|
||||
logger.log("bill-ocr-status-error", "WARN", req.user.email, null, { error: 'No textractJobId found in params' });
|
||||
return res.status(400).json({ error: 'Job ID is required' });
|
||||
|
||||
}
|
||||
const jobStatus = await getTextractJob({ redisPubClient, textractJobId });
|
||||
|
||||
if (!jobStatus) {
|
||||
return res.status(404).json({ error: 'Job not found' });
|
||||
}
|
||||
|
||||
if (jobStatus.status === 'COMPLETED') {
|
||||
// Generate billForm on-demand if not already generated
|
||||
let billForm = jobStatus.data?.billForm;
|
||||
|
||||
if (!billForm && jobStatus.context) {
|
||||
try {
|
||||
billForm = await generateBillFormData({
|
||||
processedData: jobStatus.data,
|
||||
jobid: jobStatus.context.jobid,
|
||||
bodyshopid: jobStatus.context.bodyshopid,
|
||||
partsorderid: jobStatus.context.partsorderid,
|
||||
req: req // Now we have request context!
|
||||
});
|
||||
logger.log("bill-ocr-multipage-complete", "DEBUG", req.user.email, jobStatus.context.jobid, { ...jobStatus.data, billForm });
|
||||
|
||||
// Cache the billForm back to Redis for future requests
|
||||
await setTextractJob({
|
||||
redisPubClient,
|
||||
textractJobId,
|
||||
jobData: {
|
||||
...jobStatus,
|
||||
data: {
|
||||
...jobStatus.data,
|
||||
billForm
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.log("bill-ocr-multipage-error", "ERROR", req.user.email, jobStatus.context.jobid, { ...jobStatus.data, error: error.message, stack: error.stack });
|
||||
|
||||
return res.status(500).send({
|
||||
status: 'COMPLETED',
|
||||
error: 'Data processed but failed to generate bill form',
|
||||
message: error.message,
|
||||
data: jobStatus.data // Still return the raw processed data
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send({
|
||||
status: 'COMPLETED',
|
||||
data: {
|
||||
...jobStatus.data,
|
||||
billForm
|
||||
}
|
||||
});
|
||||
} else if (jobStatus.status === 'FAILED') {
|
||||
logger.log("bill-ocr-multipage-failed", "ERROR", req.user.email, jobStatus.context.jobid, { ...jobStatus.data, error: jobStatus.error, });
|
||||
|
||||
return res.status(500).json({
|
||||
status: 'FAILED',
|
||||
error: jobStatus.error
|
||||
});
|
||||
} else {
|
||||
return res.status(200).json({
|
||||
status: jobStatus.status
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single-page document synchronously using AnalyzeExpenseCommand
|
||||
* @param {Buffer} pdfBuffer
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function processSinglePageDocument(pdfBuffer) {
|
||||
const analyzeCommand = new AnalyzeExpenseCommand({
|
||||
Document: {
|
||||
Bytes: pdfBuffer
|
||||
}
|
||||
});
|
||||
|
||||
const result = await textractClient.send(analyzeCommand);
|
||||
const invoiceData = extractInvoiceData(result);
|
||||
const processedData = processScanData(invoiceData);
|
||||
|
||||
return {
|
||||
...processedData,
|
||||
originalTextractResponse: result
|
||||
};
|
||||
}
|
||||
|
||||
async function startTextractJob(pdfBuffer, context = {}) {
|
||||
// Upload PDF to S3 temporarily for Textract async processing
|
||||
const { bodyshopid, jobid } = context;
|
||||
const s3Bucket = process.env.AWS_AI_BUCKET;
|
||||
const snsTopicArn = process.env.AWS_TEXTRACT_SNS_TOPIC_ARN;
|
||||
const snsRoleArn = process.env.AWS_TEXTRACT_SNS_ROLE_ARN;
|
||||
|
||||
if (!s3Bucket) {
|
||||
throw new Error('AWS_AI_BUCKET environment variable is required');
|
||||
}
|
||||
if (!snsTopicArn) {
|
||||
throw new Error('AWS_TEXTRACT_SNS_TOPIC_ARN environment variable is required');
|
||||
}
|
||||
if (!snsRoleArn) {
|
||||
throw new Error('AWS_TEXTRACT_SNS_ROLE_ARN environment variable is required');
|
||||
}
|
||||
|
||||
const uploadId = uuidv4();
|
||||
const s3Key = `textract-temp/${bodyshopid}/${jobid}/${uploadId}.pdf`; //TODO Update Keys structure to something better.
|
||||
|
||||
// Upload to S3
|
||||
const uploadCommand = new PutObjectCommand({
|
||||
Bucket: s3Bucket,
|
||||
Key: s3Key,
|
||||
Body: pdfBuffer,
|
||||
ContentType: 'application/pdf' //Hard coded - we only support PDFs for multi-page
|
||||
});
|
||||
await s3Client.send(uploadCommand);
|
||||
|
||||
// Start async Textract expense analysis with SNS notification
|
||||
const startCommand = new StartExpenseAnalysisCommand({
|
||||
DocumentLocation: {
|
||||
S3Object: {
|
||||
Bucket: s3Bucket,
|
||||
Name: s3Key
|
||||
}
|
||||
},
|
||||
NotificationChannel: {
|
||||
SNSTopicArn: snsTopicArn,
|
||||
RoleArn: snsRoleArn
|
||||
},
|
||||
ClientRequestToken: uploadId
|
||||
});
|
||||
|
||||
const startResult = await textractClient.send(startCommand);
|
||||
const textractJobId = startResult.JobId;
|
||||
|
||||
// Store job info in Redis using textractJobId as the key
|
||||
await setTextractJob(
|
||||
{
|
||||
redisPubClient,
|
||||
textractJobId,
|
||||
jobData: {
|
||||
status: 'IN_PROGRESS',
|
||||
s3Key: s3Key,
|
||||
uploadId: uploadId,
|
||||
startedAt: new Date().toISOString(),
|
||||
context: context // Store the context for later use
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
jobId: textractJobId
|
||||
};
|
||||
}
|
||||
|
||||
// Process SQS messages from Textract completion notifications
|
||||
async function processSQSMessages() {
|
||||
const queueUrl = process.env.AWS_TEXTRACT_SQS_QUEUE_URL;
|
||||
|
||||
if (!queueUrl) {
|
||||
logger.log("bill-ocr-error", "ERROR", "api", null, { message: "AWS_TEXTRACT_SQS_QUEUE_URL not configured" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Only poll if there are active mutli page jobs in progress
|
||||
const hasActive = await hasActiveJobs({ redisPubClient });
|
||||
if (!hasActive) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const receiveCommand = new ReceiveMessageCommand({
|
||||
QueueUrl: queueUrl,
|
||||
MaxNumberOfMessages: 10,
|
||||
WaitTimeSeconds: 20,
|
||||
MessageAttributeNames: ['All']
|
||||
});
|
||||
|
||||
const result = await sqsClient.send(receiveCommand);
|
||||
|
||||
if (result.Messages && result.Messages.length > 0) {
|
||||
logger.log("bill-ocr-sqs-processing", "DEBUG", "api", null, { message: `Processing ${result.Messages.length} messages from SQS` });
|
||||
for (const message of result.Messages) {
|
||||
try {
|
||||
// Environment-level filtering: check if this message belongs to this environment
|
||||
const shouldProcess = await shouldProcessMessage(message);
|
||||
|
||||
if (shouldProcess) {
|
||||
await handleTextractNotification(message);
|
||||
// Delete message after successful processing
|
||||
const deleteCommand = new DeleteMessageCommand({
|
||||
QueueUrl: queueUrl,
|
||||
ReceiptHandle: message.ReceiptHandle
|
||||
});
|
||||
await sqsClient.send(deleteCommand);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
logger.log("bill-ocr-sqs-processing-error", "ERROR", "api", null, { message, error: error.message, stack: error.stack });
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log("bill-ocr-sqs-receiving-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a message should be processed by this environment
|
||||
* @param {Object} message - SQS message
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function shouldProcessMessage(message) {
|
||||
try {
|
||||
const body = JSON.parse(message.Body);
|
||||
const snsMessage = JSON.parse(body.Message);
|
||||
const textractJobId = snsMessage.JobId;
|
||||
|
||||
// Check if job exists in Redis for this environment (using environment-specific prefix)
|
||||
const exists = await jobExists(textractJobId);
|
||||
return exists;
|
||||
} catch (error) {
|
||||
logger.log("bill-ocr-message-check-error", "DEBUG", "api", null, { message: "Error checking if message should be processed", error: error.message, stack: error.stack });
|
||||
// If we can't parse the message, don't process it
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleTextractNotification(message) {
|
||||
const body = JSON.parse(message.Body);
|
||||
let snsMessage
|
||||
try {
|
||||
snsMessage = JSON.parse(body.Message);
|
||||
} catch (error) {
|
||||
logger.log("bill-ocr-handle-textract-error", "DEBUG", "api", null, { message: "Error parsing SNS message - invalid message format.", error: error.message, stack: error.stack, body });
|
||||
return;
|
||||
}
|
||||
|
||||
const textractJobId = snsMessage.JobId;
|
||||
const status = snsMessage.Status;
|
||||
|
||||
// Get job info from Redis
|
||||
const jobInfo = await getTextractJob({ redisPubClient, textractJobId });
|
||||
|
||||
if (!jobInfo) {
|
||||
logger.log("bill-ocr-job-not-found", "DEBUG", "api", null, { message: `Job info not found in Redis for Textract job ID: ${textractJobId}`, textractJobId, snsMessage });
|
||||
return;
|
||||
}
|
||||
|
||||
if (status === 'SUCCEEDED') {
|
||||
// Retrieve the results
|
||||
const { processedData, originalResponse } = await retrieveTextractResults(textractJobId);
|
||||
|
||||
// Store the processed data - billForm will be generated on-demand in the status endpoint
|
||||
await setTextractJob(
|
||||
{
|
||||
redisPubClient,
|
||||
textractJobId,
|
||||
jobData: {
|
||||
...jobInfo,
|
||||
status: 'COMPLETED',
|
||||
data: {
|
||||
...processedData,
|
||||
originalTextractResponse: originalResponse
|
||||
},
|
||||
completedAt: new Date().toISOString()
|
||||
}
|
||||
}
|
||||
);
|
||||
} else if (status === 'FAILED') {
|
||||
await setTextractJob(
|
||||
{
|
||||
redisPubClient,
|
||||
textractJobId,
|
||||
jobData: {
|
||||
...jobInfo,
|
||||
status: 'FAILED',
|
||||
error: snsMessage.StatusMessage || 'Textract job failed',
|
||||
completedAt: new Date().toISOString()
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function retrieveTextractResults(textractJobId) {
|
||||
// Handle pagination if there are multiple pages of results
|
||||
let allExpenseDocuments = [];
|
||||
let nextToken = null;
|
||||
|
||||
do {
|
||||
const getCommand = new GetExpenseAnalysisCommand({
|
||||
JobId: textractJobId,
|
||||
NextToken: nextToken
|
||||
});
|
||||
|
||||
const result = await textractClient.send(getCommand);
|
||||
|
||||
if (result.ExpenseDocuments) {
|
||||
allExpenseDocuments = allExpenseDocuments.concat(result.ExpenseDocuments);
|
||||
}
|
||||
|
||||
nextToken = result.NextToken;
|
||||
} while (nextToken);
|
||||
|
||||
// Store the complete original response
|
||||
const fullTextractResponse = { ExpenseDocuments: allExpenseDocuments };
|
||||
|
||||
// Extract invoice data from Textract response
|
||||
const invoiceData = extractInvoiceData(fullTextractResponse);
|
||||
|
||||
return {
|
||||
processedData: processScanData(invoiceData),
|
||||
originalResponse: fullTextractResponse
|
||||
};
|
||||
}
|
||||
|
||||
// Start SQS polling (call this when server starts)
|
||||
function startSQSPolling() {
|
||||
const pollInterval = setInterval(() => {
|
||||
processSQSMessages().catch(error => {
|
||||
logger.log("bill-ocr-sqs-poll-error", "ERROR", "api", null, { message: error.message, stack: error.stack });
|
||||
});
|
||||
}, 10000); // Poll every 10 seconds
|
||||
return pollInterval;
|
||||
}
|
||||
|
||||
|
||||
module.exports = {
|
||||
initializeBillOcr,
|
||||
handleBillOcr,
|
||||
handleBillOcrStatus,
|
||||
startSQSPolling
|
||||
};
|
||||
@@ -8,6 +8,5 @@ exports.podium = require("./podium").default;
|
||||
exports.emsUpload = require("./emsUpload").default;
|
||||
exports.carfax = require("./carfax").default;
|
||||
exports.carfaxRps = require("./carfax-rps").default;
|
||||
exports.vehicletype = require("./vehicletype/vehicletype").default;
|
||||
exports.documentAnalytics = require("./analytics/documents").default;
|
||||
exports.chatterApi = require("./chatter-api").default;
|
||||
|
||||
@@ -250,6 +250,8 @@ const CreateRepairOrderTag = (job, errorCallback) => {
|
||||
},
|
||||
InsuranceCompany: job.ins_co_nm || "",
|
||||
Claim: job.clm_no || "",
|
||||
Deductible: job.ded_amt || 0,
|
||||
PolicyNo: job.policy_no || "",
|
||||
DMSAllocation: job.dms_allocation || "",
|
||||
Contacts: {
|
||||
CSR: job.employee_csr_rel
|
||||
|
||||
@@ -1,126 +0,0 @@
|
||||
[
|
||||
"PROMASTER 1500",
|
||||
"PROMASTER 2500",
|
||||
"PROMASTER CITY",
|
||||
"NV 1500",
|
||||
"NV 200",
|
||||
"NV 2500",
|
||||
"NV 3500",
|
||||
"NV1500",
|
||||
"NV200",
|
||||
"NV2500",
|
||||
"NV3500",
|
||||
"SPRINTER",
|
||||
"E150 ECONOLINE CARGO VAN",
|
||||
"E150 ECONOLINE XL",
|
||||
"E250 ECONOLINE CARGO",
|
||||
"E250 ECONOLINE CARGO (AMALGAM)",
|
||||
"E250 ECONOLINE CARGO (INSPECT)",
|
||||
"E250 ECONOLINE CARGO VAN EXT",
|
||||
"E250 ECONOLINE SUPER CARGO VAN",
|
||||
"E350 CUTAWAY VAN",
|
||||
"E350 ECONO SD CARGO VAN EXT",
|
||||
"E350 ECONOLINE CARGO VAN",
|
||||
"E350 ECONOLINE CUTAWAY",
|
||||
"E350 ECONOLINE SD CARGO VAN",
|
||||
"E350 ECONOLINE SD XL",
|
||||
"E350 ECONOLINE SD XL EXT",
|
||||
"E350 ECONOLINE SD XLT",
|
||||
"E350 ECONOLINE SD XLT EXT",
|
||||
"E350 SD CUTAWAY",
|
||||
"E450",
|
||||
"E450 ECONOLINE",
|
||||
"E450 ECONOLINE SD",
|
||||
"E450 ECONOLINE SD CUTAWAY",
|
||||
"TRANSIT 150 WB 130 CARGO VAN",
|
||||
"TRANSIT 150 WB 130 XLT",
|
||||
"TRANSIT 150 WB 148 CARGO VAN",
|
||||
"TRANSIT 250 WB 130 CARGO VAN",
|
||||
"TRANSIT 250 WB 148 CARGO VAN",
|
||||
"TRANSIT 250 WB 148 EL CARGO",
|
||||
"TRANSIT 350 WB 148 CARGO VAN",
|
||||
"TRANSIT 350 WB 148 EL CARGO",
|
||||
"TRANSIT CONNECT XL CARGO VAN",
|
||||
"TRANSIT CONNECT XLT CARGO VAN",
|
||||
"250 TRANSIT",
|
||||
"CITY EXPRESS LS CARGO VAN",
|
||||
"CITY EXPRESS LT CARGO VAN",
|
||||
"EXPRESS 1500",
|
||||
"EXPRESS 1500 CARGO VAN",
|
||||
"EXPRESS 1500 LS",
|
||||
"EXPRESS 1500 LT",
|
||||
"EXPRESS 2500 CARGO VAN",
|
||||
"EXPRESS 2500 CARGO VAN EXT",
|
||||
"EXPRESS 2500 LS",
|
||||
"EXPRESS 2500 LT",
|
||||
"EXPRESS 3500",
|
||||
"EXPRESS 3500 CARGO VAN",
|
||||
"EXPRESS 3500 CARGO VAN EXT",
|
||||
"EXPRESS 3500 EXT",
|
||||
"EXPRESS 3500 LS",
|
||||
"EXPRESS 3500 LS EXT",
|
||||
"EXPRESS 3500 LT",
|
||||
"EXPRESS 3500 LT EXT",
|
||||
"G3500 EXPRESS CUTAWAY",
|
||||
"SAVANA 1500 CARGO VAN",
|
||||
"SAVANA 1500 SL",
|
||||
"SAVANA 1500 SLE",
|
||||
"SAVANA 2500",
|
||||
"2500 SAVANA",
|
||||
"SAVANA 2500 CARGO VAN",
|
||||
"SAVANA 2500 CARGO VAN EXT",
|
||||
"SAVANA 2500 LT",
|
||||
"SAVANA 2500 SLE",
|
||||
"SAVANA 3500",
|
||||
"SAVANA 3500 CARGO VAN",
|
||||
"SAVANA 3500 CARGO VAN EXT",
|
||||
"SAVANA 3500 EXT",
|
||||
"SAVANA 3500 LT EXT",
|
||||
"SAVANA 3500 SLE EXT",
|
||||
"SAVANA G3500 CUTAWAY",
|
||||
"SAVANA G4500 CUTAWAY",
|
||||
"EXPRESS 1500 LS CARGO VAN",
|
||||
"G20 SPORTVAN",
|
||||
"NV 3500 S V8 CARGO VAN",
|
||||
"E-150",
|
||||
"E-250",
|
||||
"E-350",
|
||||
"E-450",
|
||||
"E150",
|
||||
"E250",
|
||||
"E350",
|
||||
"TRANSIT",
|
||||
"CITY",
|
||||
"CITY EXPRESS",
|
||||
"EXPRESS",
|
||||
"EXPRESS 2500",
|
||||
"G3500",
|
||||
"SAVANA",
|
||||
"SAVANA 1500",
|
||||
"CHEVY EXPRESS G2500",
|
||||
"CLUBWAGON E350",
|
||||
"TRANSIT CONNECT",
|
||||
"SPRINTER 2500",
|
||||
"TRANSIT 150",
|
||||
"ECONOLINE E250",
|
||||
"TRANSIT 250",
|
||||
"ECONOLINE E350",
|
||||
"NV3500 HD",
|
||||
"TRANSIT 350HD",
|
||||
"ECONOLINE E150",
|
||||
"E250 ECONOLINE",
|
||||
"C/V",
|
||||
"E350 CHSCAB",
|
||||
"G1500 CHEVY EXPRESS",
|
||||
"2500 SPRINTER",
|
||||
"E150 ECONOLINE",
|
||||
"350 TRANSIT",
|
||||
"E450 CUTAWAY",
|
||||
"PROMASTER 3500",
|
||||
"CHEVY EXPRESS G3500",
|
||||
"SAVANA G3500",
|
||||
"1500 PROMASTER",
|
||||
"2500 EXPRESS",
|
||||
"3500 EXPRESS",
|
||||
"3500 SPRINTER"
|
||||
]
|
||||
@@ -1,33 +0,0 @@
|
||||
[
|
||||
"GRAND CARAVAN",
|
||||
"GRANDCARAVAN",
|
||||
"GRAND CARAVAN CREW",
|
||||
"GRAND CARAVAN CV",
|
||||
"GRAND CARAVAN CVP",
|
||||
"GRAND CARAVAN SE",
|
||||
"GRAND CARAVAN SXT",
|
||||
"CARAVAN CV",
|
||||
"SIENNA CE V6",
|
||||
"SIENNA LE V6",
|
||||
"SIENNA XLE V6",
|
||||
"SIENNA",
|
||||
"ODYSSEY",
|
||||
"SEDONA",
|
||||
"PACIFICA (NEW)",
|
||||
"QUEST",
|
||||
"CARAVAN",
|
||||
"MONTANA SV6",
|
||||
"FREESTAR",
|
||||
"UPLANDER",
|
||||
"MONTANA",
|
||||
"VOYAGER",
|
||||
"ENTOURAGE",
|
||||
"PACIFICA",
|
||||
"CARNIVAL",
|
||||
"VENTURE",
|
||||
"SAFARI",
|
||||
"VANAGON",
|
||||
"WINDSTAR",
|
||||
"TOWN&COUNTRY",
|
||||
"ROUTAN"
|
||||
]
|
||||
@@ -1,485 +0,0 @@
|
||||
[
|
||||
"EDGE SEL",
|
||||
"ESCAPE",
|
||||
"ESCAPE SE",
|
||||
"ESCAPE SEL",
|
||||
"ESCAPE XLT V6",
|
||||
"EXPEDITION",
|
||||
"EXPEDITION LIMITED",
|
||||
"EXPEDITION MAX",
|
||||
"EXPEDITION MAX LIMITED",
|
||||
"EXPLORER",
|
||||
"EXCURSION",
|
||||
"EXPLORER LIMITED",
|
||||
"EXPLORER PLATINUM ECOBOOST",
|
||||
"EXPLORER XLT",
|
||||
"FLEX",
|
||||
"FLEX SE",
|
||||
"ECOSPORT",
|
||||
"ESCAPE HYBRID",
|
||||
"MUSTANG MACH-E",
|
||||
"BRONCO",
|
||||
"BRONCO SPORT",
|
||||
"TRAILBLAZER",
|
||||
"BLAZER LT",
|
||||
"CHEROKEE",
|
||||
"CHEROKEE CLASSIC",
|
||||
"CHEROKEE COUNTRY",
|
||||
"CHEROKEE LIMITED",
|
||||
"CHEROKEE NORTH",
|
||||
"CHEROKEE OVERLAND",
|
||||
"CHEROKEE SPORT",
|
||||
"CHEROKEE TRAILHAWK",
|
||||
"CJ",
|
||||
"CJ7",
|
||||
"CJ7 RENEGADE",
|
||||
"COMMANDER",
|
||||
"COMMANDER LIMITED",
|
||||
"COMMANDER SPORT",
|
||||
"COMPASS",
|
||||
"COMPASS HIGH ALTITUDE",
|
||||
"COMPASS LATITUDE",
|
||||
"COMPASS LIMITED",
|
||||
"COMPASS NORTH",
|
||||
"COMPASS SPORT",
|
||||
"COMPASS TRAILHAWK",
|
||||
"GLADIATOR OVERLAND",
|
||||
"GLADIATOR RUBICON",
|
||||
"GRAND CHEROKEE LAREDO",
|
||||
"GRAND CHEROKEE LIMITED",
|
||||
"GRAND CHEROKEE OVERLAND",
|
||||
"GRAND CHEROKEE SE",
|
||||
"GRAND CHEROKEE SRT",
|
||||
"GRAND CHEROKEE SRT8",
|
||||
"GRAND CHEROKEE SUMMIT",
|
||||
"GRAND CHEROKEE TRACKHAWK",
|
||||
"GRAND CHEROKEE TRAILHAWK",
|
||||
"GRAND CHEROKEE",
|
||||
"GRANDCHEROKEE",
|
||||
"LIBERTY LIMITED",
|
||||
"LIBERTY RENEGADE",
|
||||
"LIBERTY SPORT",
|
||||
"LIBERTY",
|
||||
"PATRIOT",
|
||||
"PATRIOT HIGH ALTITUDE",
|
||||
"PATRIOT LATITUDE",
|
||||
"PATRIOT LIMITED",
|
||||
"PATRIOT NORTH",
|
||||
"PATRIOT SPORT",
|
||||
"RENEGADE LIMITED",
|
||||
"RENEGADE NORTH",
|
||||
"RENEGADE SPORT",
|
||||
"RENEGADE TRAILHAWK",
|
||||
"TJ",
|
||||
"TJ RUBICON",
|
||||
"TJ SAHARA",
|
||||
"TJ SPORT",
|
||||
"TJ UNLIMITED",
|
||||
"WRANGLER",
|
||||
"WRANGLER RUBICON",
|
||||
"WRANGLER SAHARA",
|
||||
"WRANGLER SPORT",
|
||||
"WRANGLER UNLIMITED",
|
||||
"WRANGLER UNLIMITED 70TH ANNIV",
|
||||
"WRANGLER UNLIMITED RUBICON",
|
||||
"WRANGLER UNLIMITED SAHARA",
|
||||
"WRANGLER UNLIMITED SPORT",
|
||||
"WRANGLER UNLIMITED X",
|
||||
"WRANGLER X",
|
||||
"YJ WRANGLER",
|
||||
"AVIATOR",
|
||||
"AVIATOR RESERVE",
|
||||
"MKC",
|
||||
"MKC RESERVE",
|
||||
"MKC SELECT",
|
||||
"MKT",
|
||||
"MKT ECOBOOST",
|
||||
"MKX",
|
||||
"MKX RESERVE",
|
||||
"NAUTILUS RESERVE",
|
||||
"NAUTILUS RESERVE V6",
|
||||
"NAVIGATOR",
|
||||
"NAVIGATOR L",
|
||||
"NAVIGATOR L RESERVE",
|
||||
"NAVIGATOR L SELECT",
|
||||
"NAVIGATOR RESERVE",
|
||||
"PILOT",
|
||||
"PILOT BLACK EDITION",
|
||||
"PILOT ELITE",
|
||||
"PILOT EX",
|
||||
"PILOT EX-L",
|
||||
"PILOT GRANITE",
|
||||
"PILOT LX",
|
||||
"PILOT SE",
|
||||
"PILOT SE-L",
|
||||
"PILOT TOURING",
|
||||
"DURANGO R/T",
|
||||
"DURANGO SLT PLUS",
|
||||
"DURANGO SRT",
|
||||
"DURANGO",
|
||||
"JOURNEY",
|
||||
"JOURNEY CROSSROAD",
|
||||
"JOURNEY CVP",
|
||||
"JOURNEY LIMITED",
|
||||
"JOURNEY R/T",
|
||||
"JOURNEY SXT",
|
||||
"NITRO SE",
|
||||
"NITRO",
|
||||
"K1500 SUBURBAN",
|
||||
"SUBURBAN 1500 LT",
|
||||
"SUBURBAN 1500 LTZ",
|
||||
"SUBURBAN 1500 PREMIER",
|
||||
"SUBURBAN 2500 LS",
|
||||
"TAHOE LT",
|
||||
"TRAVERSE LS",
|
||||
"TRAVERSE LT",
|
||||
"TRAVERSE PREMIER",
|
||||
"TRAX LT",
|
||||
"TRAX PREMIER",
|
||||
"UPLANDER LT EXT",
|
||||
"SUBURBAN",
|
||||
"TAHOE",
|
||||
"TRAVERSE",
|
||||
"TRAX",
|
||||
"UPLANDER",
|
||||
"YUKON",
|
||||
"YUKON DENALI",
|
||||
"YUKON XL",
|
||||
"YUKON XL DENALI",
|
||||
"EQUINOX LS",
|
||||
"EQUINOX LT",
|
||||
"EQUINOX PREMIER",
|
||||
"EQUINOX",
|
||||
"RAV4 LE",
|
||||
"RAV4 XLE",
|
||||
"HIGHLANDER SPORT V6",
|
||||
"4RUNNER SR5 V6",
|
||||
"RAV4",
|
||||
"RAV4 HYBRID",
|
||||
"RAV4 XLE HYBRID",
|
||||
"HIGHLANDER",
|
||||
"4RUNNER",
|
||||
"SEQUOIA",
|
||||
"PATHFINDER SE",
|
||||
"PATHFINDER SL",
|
||||
"PATHFINDER",
|
||||
"MURANO PLATINUM",
|
||||
"MURANO SV",
|
||||
"MURANO",
|
||||
"TUCSON",
|
||||
"TERRAIN",
|
||||
"SORENTO",
|
||||
"EDGE",
|
||||
"KICKS",
|
||||
"QASHQAI",
|
||||
"SANTA FE",
|
||||
"ARMADA",
|
||||
"TELLURIDE",
|
||||
"PALISADE",
|
||||
"SELTOS",
|
||||
"TORRENT",
|
||||
"C-HR",
|
||||
"SPORTAGE",
|
||||
"VENZA",
|
||||
"ACADIA",
|
||||
"CR-V",
|
||||
"HR-V",
|
||||
"CX-5",
|
||||
"CX-50",
|
||||
"CX-7",
|
||||
"CX-9",
|
||||
"CX-3",
|
||||
"Q3",
|
||||
"Q5",
|
||||
"Q7",
|
||||
"Q8",
|
||||
"JUKE SV",
|
||||
"JUKE",
|
||||
"ROGUE",
|
||||
"ROGUE SV",
|
||||
"XTERRA",
|
||||
"COROLLA CROSS",
|
||||
"ACADIA DENALI",
|
||||
"TAURUS X",
|
||||
"MACAN",
|
||||
"FJ CRUISER",
|
||||
"BRONCO SPORT BADLANDS",
|
||||
"ESCALADE",
|
||||
"RX 350",
|
||||
"KONA",
|
||||
"MDX",
|
||||
"RDX",
|
||||
"COOPER COUNTRYMAN",
|
||||
"V70",
|
||||
"OUTLANDER",
|
||||
"RIO5",
|
||||
"GLC300 COUPE",
|
||||
"ENCORE",
|
||||
"SRX",
|
||||
"SANTA FE SPORT",
|
||||
"NX 300",
|
||||
"WRANGLER UNLIMITE",
|
||||
"WRANGLER JK UNLIM",
|
||||
"RANGEROVER EVOQUE",
|
||||
"CROSSTREK",
|
||||
"FORESTER",
|
||||
"TIGUAN",
|
||||
"XV CROSSTREK",
|
||||
"ENDEAVOR",
|
||||
"RX 330",
|
||||
"ATLAS",
|
||||
"XC90",
|
||||
"TOUAREG",
|
||||
"STELVIO",
|
||||
"RANGE ROVER SPORT",
|
||||
"GLE350D",
|
||||
"EX35",
|
||||
"RVR",
|
||||
"MONTERO",
|
||||
"X-TRAIL",
|
||||
"GRAND VITARA",
|
||||
"TRIBUTE",
|
||||
"X3",
|
||||
"XC60",
|
||||
"GLK250 BLUETEC",
|
||||
"ENVOY",
|
||||
"ML350 BLUETEC",
|
||||
"ENVISION",
|
||||
"FX35",
|
||||
"X1",
|
||||
"VENUE",
|
||||
"TAOS",
|
||||
"KONA ELECTRIC",
|
||||
"OUTLANDER PHEV",
|
||||
"PASSPORT",
|
||||
"H3",
|
||||
"EXPLORERSPORTTRAC",
|
||||
"F-PACE",
|
||||
"ML320 BLUETEC",
|
||||
"REGAL SPORTBACK",
|
||||
"DISCOVERY SPORT",
|
||||
"RENDEZVOUS",
|
||||
"XC70",
|
||||
"COMPASS (NEW)",
|
||||
"CUBE",
|
||||
"V60 CROSS COUNTRY",
|
||||
"QX70",
|
||||
"X6",
|
||||
"ELEMENT",
|
||||
"RX 400H",
|
||||
"VUE",
|
||||
"RANGE ROVER VELAR",
|
||||
"E-PACE",
|
||||
"RAV4 PRIME",
|
||||
"LX 570",
|
||||
"GX 470",
|
||||
"EX37",
|
||||
"GLE43",
|
||||
"NAUTILUS",
|
||||
"XT6",
|
||||
"RX 450H",
|
||||
"ESCALADE ESV",
|
||||
"OUTLOOK",
|
||||
"CAYENNE",
|
||||
"XC90 PLUG-IN",
|
||||
"MODEL X",
|
||||
"MODEL Y",
|
||||
"GLC300",
|
||||
"SANTA FE HYBRID",
|
||||
"G63",
|
||||
"XV CROSSTREK HYBR",
|
||||
"JX35",
|
||||
"JIMMY",
|
||||
"TUCSON HYBRID",
|
||||
"XC40 ELECTRIC",
|
||||
"RX 300",
|
||||
"ML320",
|
||||
"WRANGLER JK UNLIMITED",
|
||||
"POLICE INTERCEPTOR UTILITY",
|
||||
"WRANGLER JK",
|
||||
"TRIBECA",
|
||||
"E-TRON SPORTBACK",
|
||||
"500X",
|
||||
"RX 350H",
|
||||
"GL350 BLUETEC",
|
||||
"WRANGLER UNLIMITED 4XE",
|
||||
"GV80",
|
||||
"GL550",
|
||||
"Q5 E",
|
||||
"H2 SUV",
|
||||
"Q5 HYBRID",
|
||||
"IONIQ 5",
|
||||
"SQ5 SPORTBACK",
|
||||
"LEVANTE",
|
||||
"TONALE",
|
||||
"GLE43 COUPE",
|
||||
"GRAND CHEROKEE WK",
|
||||
"DEFENDER",
|
||||
"NX 450H+",
|
||||
"ML400",
|
||||
"LX 600",
|
||||
"RX 450HL",
|
||||
"SORENTO HYBRID",
|
||||
"NX 350",
|
||||
"TRACKER",
|
||||
"GLE450",
|
||||
"Q5 SPORTBACK",
|
||||
"CR-V HYBRID",
|
||||
"LX 470",
|
||||
"EQS580 SUV",
|
||||
"H2",
|
||||
"EV9",
|
||||
"SORENTO PLUG-IN",
|
||||
"LYRIQ",
|
||||
"GLE550",
|
||||
"RX 500H",
|
||||
"X1 SAV",
|
||||
"E-TRON S SPORTBACK",
|
||||
"ML500",
|
||||
"GRAND HIGHLANDER HYBRID",
|
||||
"RS Q8",
|
||||
"GLS550",
|
||||
"GLS580",
|
||||
"IX",
|
||||
"CAYENNE COUPE",
|
||||
"SOLTERRA",
|
||||
"PATHFINDER HYBRID",
|
||||
"Q8 E-TRON",
|
||||
"TX 350",
|
||||
"TX 500H",
|
||||
"EQUINOX EV",
|
||||
"NAUTILUS HYBRID",
|
||||
"TRAVERSE LIMITED",
|
||||
"CX-70",
|
||||
"SANTA FE XL",
|
||||
"RENEGADE",
|
||||
"QX50",
|
||||
"ECLIPSE CROSS",
|
||||
"QX80",
|
||||
"X5",
|
||||
"X3",
|
||||
"X1",
|
||||
"X4",
|
||||
"ENCLAVE",
|
||||
"ENCORE GX",
|
||||
"CAYENNE HYBRID",
|
||||
"SOUL",
|
||||
"GX 460",
|
||||
"UX 250H",
|
||||
"XT5",
|
||||
"GLE53",
|
||||
"XT4",
|
||||
"SQ7",
|
||||
"NX 350H",
|
||||
"GLK350",
|
||||
"GLE350",
|
||||
"NX 300H",
|
||||
"NX 200T",
|
||||
"RANGE ROVER EVOQUE",
|
||||
"GLS450",
|
||||
"TERRAIN DENALI",
|
||||
"GRAND CHEROKEE L",
|
||||
"GLE400",
|
||||
"TUCSON PLUG-IN",
|
||||
"BLAZER",
|
||||
"ASCENT",
|
||||
"HIGHLANDER HYBRID",
|
||||
"ATLAS CROSS SPORT",
|
||||
"XC40",
|
||||
"VENZA HYBRID",
|
||||
"GLA45",
|
||||
"GLB250",
|
||||
"GRAND HIGHLANDER",
|
||||
"GV70",
|
||||
"NIRO",
|
||||
"NIRO EV",
|
||||
"GLA250",
|
||||
"ESCAPE PLUG-IN",
|
||||
"WAGONEER",
|
||||
"CX-30",
|
||||
"QX60",
|
||||
"GRAND CHEROKEE 4XE",
|
||||
"SPORTAGE HYBRID",
|
||||
"EV6",
|
||||
"TONALE PLUG-IN",
|
||||
"GLC43 COUPE",
|
||||
"X2",
|
||||
"RX 350L",
|
||||
"HORNET",
|
||||
"ENVISTA",
|
||||
"LEVANTE S",
|
||||
"SPORTAGE PLUG-IN",
|
||||
"ORLANDO",
|
||||
"X5 M",
|
||||
"EXPLORER HYBRID",
|
||||
"FREESTYLE",
|
||||
"CORSAIR",
|
||||
"K1500 YUKON XL",
|
||||
"RANGE ROVER",
|
||||
"SUV W/O LABOR",
|
||||
"ID.4",
|
||||
"CX-90",
|
||||
"X7",
|
||||
"CORSAIR PLUG-IN",
|
||||
"ESCALADE EXT",
|
||||
"QX55",
|
||||
"DISCOVERY",
|
||||
"BOLT EUV",
|
||||
"C40 ELECTRIC",
|
||||
"LR4",
|
||||
"GRAND WAGONEER",
|
||||
"XC60 PLUG-IN",
|
||||
"LR2",
|
||||
"EQE350 SUV",
|
||||
"COROLLA CROSS HYBRID",
|
||||
"SOUL EV",
|
||||
"GRECALE",
|
||||
"SUV W/O LABOR",
|
||||
"QX30",
|
||||
"SQ5",
|
||||
"NIRO PLUG-IN",
|
||||
"BORREGO",
|
||||
"CX-90 PLUG-IN",
|
||||
"XL-7",
|
||||
"SUV W/O LABOR",
|
||||
"SUV W/O LABOR",
|
||||
"I-PACE",
|
||||
"HORNET PLUG-IN",
|
||||
"UX 300H",
|
||||
"ML320 CDI",
|
||||
"VERACRUZ",
|
||||
"SQ8",
|
||||
"GLE53 COUPE",
|
||||
"ZDX",
|
||||
"9-7X",
|
||||
"ARIYA",
|
||||
"ASPEN",
|
||||
"AVIATOR PLUG-IN",
|
||||
"B9 TRIBECA",
|
||||
"BRAVADA",
|
||||
"ENVOY XL",
|
||||
"EQB350",
|
||||
"EQB350 SUV",
|
||||
"ESCALADE-V",
|
||||
"E-TRON",
|
||||
"FX37",
|
||||
"GL320 CDI",
|
||||
"GLADIATOR",
|
||||
"GLC43",
|
||||
"GLE450 COUPE",
|
||||
"GLE63",
|
||||
"GV60",
|
||||
"MKT TOWN CAR",
|
||||
"ML350",
|
||||
"ML550",
|
||||
"ML63",
|
||||
"NX 250",
|
||||
"Q4 E-TRON",
|
||||
"Q8 E-TRON SPORTBACK",
|
||||
"QX4",
|
||||
"QX56",
|
||||
"SANTA FE PLUG-IN",
|
||||
"UX 200",
|
||||
"WAGONEER L",
|
||||
"XB"
|
||||
]
|
||||
@@ -1,567 +0,0 @@
|
||||
[
|
||||
"MARK LT",
|
||||
|
||||
"F-150",
|
||||
"F-250",
|
||||
"F-350",
|
||||
"F-450",
|
||||
"F-550",
|
||||
"F-650",
|
||||
"F100 PICKUP",
|
||||
"F150 FX2 SUPERCAB",
|
||||
"F150 FX4 PICKUP",
|
||||
"F150 FX4 SUPERCAB",
|
||||
"F150 FX4 SUPERCREW",
|
||||
"F150 HARLEY DAVIDSON SUPERCAB",
|
||||
"F150 HARLEY DAVIDSON SUPERCREW",
|
||||
"F150 KING RANCH SUPERCREW",
|
||||
"F150 LARIAT FX4 SUPERCREW",
|
||||
"F150 LARIAT HARLEY DAVIDSON SC",
|
||||
"F150 LARIAT KING RANCH SUPCREW",
|
||||
"F150 LARIAT LIMITED SUPERCREW",
|
||||
"F150 LARIAT PICKUP",
|
||||
"F150 LARIAT SUPERCAB",
|
||||
"F150 LARIAT SUPERCAB (AMALGAM)",
|
||||
"F150 LARIAT SUPERCREW",
|
||||
"F150 LARIAT SUPERCREW (AMALGA)",
|
||||
"F150 LIMITED SUPERCREW",
|
||||
"F150 PICKUP",
|
||||
"F150 PLATINUM SUPERCREW",
|
||||
"F150 RAPTOR SUPERCAB",
|
||||
"F150 RAPTOR SUPERCREW",
|
||||
"F150 STX PICKUP",
|
||||
"F150 STX SUPERCAB",
|
||||
"F150 SUPERCAB",
|
||||
"F150 SUPERCREW",
|
||||
"F150 SUPERCREW (AMALGAMATED)",
|
||||
"F150 SVT RAPTOR SUPERCAB",
|
||||
"F150 XL PICKUP",
|
||||
"F150 XL SUPERCAB",
|
||||
"F150 XL SUPERCREW",
|
||||
"F150 XLT LARIAT SUPERCAB",
|
||||
"F150 XLT PICKUP",
|
||||
"F150 XLT SUPERCAB",
|
||||
"F150 XLT SUPERCREW",
|
||||
"F150 XLT SUPERCREW (AMALGAMAT)",
|
||||
"F150 XTR SUPERCAB",
|
||||
"F250 PICKUP",
|
||||
"F250 SD CREW CAB",
|
||||
"F250 SD FX4 CREW CAB",
|
||||
"F250 SD FX4 SUPERCAB",
|
||||
"F250 SD KING RANCH CREW CAB",
|
||||
"F250 SD LARIAT CREW CAB",
|
||||
"F250 SD LARIAT CREW CAB (AMAL)",
|
||||
"F250 SD LARIAT PICKUP",
|
||||
"F250 SD LARIAT SUPERCAB",
|
||||
"F250 SD LIMITED CREW CAB",
|
||||
"F250 SD PLATINUM CREW CAB",
|
||||
"F250 SD SUPERCAB",
|
||||
"F250 SD XL CREW CAB",
|
||||
"F250 SD XL PICKUP",
|
||||
"F250 SD XL SUPERCAB",
|
||||
"F250 SD XLT CREW CAB",
|
||||
"F250 SD XLT PICKUP",
|
||||
"F250 SD XLT SUPERCAB",
|
||||
"F250 SUPERCAB",
|
||||
"F250 XL CREW CAB",
|
||||
"F350 CREW CAB",
|
||||
"F350 PICKUP",
|
||||
"F350 PICKUP 2WD",
|
||||
"F350 SD CABELAS CREW CAB",
|
||||
"F350 SD CREW CAB",
|
||||
"F350 SD FX4 CREW CAB",
|
||||
"F350 SD FX4 SUPERCAB",
|
||||
"F350 SD HARLEY DAVIDSON",
|
||||
"F350 SD KING RANCH CREW CAB",
|
||||
"F350 SD LARIAT CREW CAB",
|
||||
"F350 SD LARIAT CREW CAB (AMAL)",
|
||||
"F350 SD LARIAT KING RANCH",
|
||||
"F350 SD LARIAT SUPERCAB",
|
||||
"F350 SD LIMITED CREW CAB",
|
||||
"F350 SD PICKUP",
|
||||
"F350 SD PLATINUM CREW CAB",
|
||||
"F350 SD SUPERCAB",
|
||||
"F350 SD XL CREW CAB",
|
||||
"F350 SD XL PICKUP",
|
||||
"F350 SD XL SUPERCAB",
|
||||
"F350 SD XLT CREW CAB",
|
||||
"F350 SD XLT SUPERCAB",
|
||||
"F350 SUPER DUTY",
|
||||
"F350 SUPER DUTY XL",
|
||||
"F350 XL PICKUP",
|
||||
"F450",
|
||||
"F450 Pickup",
|
||||
"F450 SD KING RANCH CREW CAB",
|
||||
"F450 SD LARIAT CREW CAB",
|
||||
"F450 SD PICKUP",
|
||||
"F450 SD PLATINUM CREW CAB",
|
||||
"F450 SD XL",
|
||||
"F450 SD XL CREW CAB",
|
||||
"F450 SD XL PICKUP",
|
||||
"F450 SD XLT CREW CAB",
|
||||
"F450 SUPER DUTY XLT",
|
||||
"F550",
|
||||
"F550 SD",
|
||||
"F550 SD XL",
|
||||
"F550 SD XL PICKUP",
|
||||
"F550 SD XLT CREW CAB",
|
||||
"F550 SD XLT SUPERCAB",
|
||||
"F550 SUPER DUTY",
|
||||
"F550 SUPER DUTY XL",
|
||||
"F550 SUPER DUTY XLT",
|
||||
"F550 SUPER DUTY XLT CREW CAB",
|
||||
"F550 XL",
|
||||
"F650 SD XLT SUPERCAB",
|
||||
"F68",
|
||||
"F750 XL",
|
||||
|
||||
"RANGER",
|
||||
"RANGER EDGE SUPERCAB",
|
||||
"RANGER FX4 SUPERCAB",
|
||||
"RANGER LARIAT SUPERCREW",
|
||||
"RANGER SPORT SUPERCAB",
|
||||
"RANGER STX SUPERCAB",
|
||||
"RANGER SUPERCAB",
|
||||
"RANGER XL",
|
||||
"RANGER XL SUPERCAB",
|
||||
"RANGER XLT",
|
||||
"RANGER XLT SUPERCAB",
|
||||
"RANGER XLT SUPERCREW",
|
||||
|
||||
"FRONTIER LE CREW CAB V6",
|
||||
"FRONTIER NISMO CREW CAB V6",
|
||||
"FRONTIER NISMO KING CAB V6",
|
||||
"FRONTIER PRO-4X CREW CAB V6",
|
||||
"FRONTIER PRO-4X KING CAB V6",
|
||||
"FRONTIER S KING CAB",
|
||||
"FRONTIER SC CREW CAB V6",
|
||||
"FRONTIER SC V6",
|
||||
"FRONTIER SE CREW CAB V6",
|
||||
"FRONTIER SE KING CAB V6",
|
||||
"FRONTIER SL CREW CAB V6",
|
||||
"FRONTIER SV CREW CAB V6",
|
||||
"FRONTIER SV KING CAB V6",
|
||||
"FRONTIER XE KING CAB",
|
||||
"FRONTIER XE KING CAB V6",
|
||||
"KING CAB",
|
||||
|
||||
"TITAN 5.6 LE CREW CAB",
|
||||
"TITAN 5.6 LE KING CAB",
|
||||
"TITAN 5.6 MIDNIGHT CREW CAB",
|
||||
"TITAN 5.6 PLATINUM RESERVE CC",
|
||||
"TITAN 5.6 PRO-4X CREW CAB",
|
||||
"TITAN 5.6 PRO-4X KING CAB",
|
||||
"TITAN 5.6 S CREW CAB",
|
||||
"TITAN 5.6 SE CREW CAB",
|
||||
"TITAN 5.6 SE KING CAB",
|
||||
"TITAN 5.6 SL CREW CAB",
|
||||
"TITAN 5.6 SV CREW CAB",
|
||||
"TITAN 5.6 SV KING CAB",
|
||||
"TITAN 5.6 XE CREW CAB",
|
||||
"TITAN 5.6 XE KING CAB",
|
||||
"TITAN XD PLATINUM CREW CAB",
|
||||
"TITAN XD PRO-4X CREW CAB",
|
||||
"TITAN XD S CREW CAB",
|
||||
"TITAN XD SL CREW CAB",
|
||||
"TITAN XD SV CREW CAB",
|
||||
|
||||
"PICKUP SR5",
|
||||
|
||||
"TACOMA",
|
||||
"TACOMA ACCESS CAB",
|
||||
"TACOMA DOUBLE CAB V6",
|
||||
"TACOMA LIMITED DOUBLE CAB V6",
|
||||
"TACOMA PRERUNNER DOUBLE CAB V6",
|
||||
"TACOMA PRERUNNER V6 ACCESS CAB",
|
||||
"TACOMA PRERUNNER XTRACAB",
|
||||
"TACOMA PRERUNNER XTRACAB V6",
|
||||
"TACOMA SR5 DOUBLE CAB V6",
|
||||
"TACOMA SR5 V6 ACCESS CAB",
|
||||
"TACOMA SR5 V6 XTRACAB",
|
||||
"TACOMA V6 ACCESS CAB",
|
||||
"TACOMA XTRACAB",
|
||||
"TACOMA XTRACAB V6",
|
||||
"TUNDRA ACCESS CAB V8",
|
||||
"TUNDRA DOUBLE CAB V8",
|
||||
"TUNDRA LIMITED ACCESS CAB V8",
|
||||
"TUNDRA LIMITED SR5 DBLCAB V8",
|
||||
"TUNDRA LIMITED V8",
|
||||
"TUNDRA LIMITED V8 CREWMAX",
|
||||
"TUNDRA LIMITED V8 DOUBLE CAB",
|
||||
"TUNDRA PLATINUM V8 CREWMAX",
|
||||
"TUNDRA SR DOUBLE CAB V8",
|
||||
"TUNDRA SR V8",
|
||||
"TUNDRA SR5 DOUBLE CAB V8",
|
||||
"TUNDRA SR5 TRD DOUBLE CAB V8",
|
||||
"TUNDRA SR5 V8 CREWMAX",
|
||||
"TUNDRA V8",
|
||||
"TUNDRA V8 CREWMAX",
|
||||
"XTRACAB LONG BOX",
|
||||
|
||||
"AVALANCHE 1500",
|
||||
"AVALANCHE 1500 LS",
|
||||
"AVALANCHE 1500 LS Z71",
|
||||
"AVALANCHE 1500 LT",
|
||||
"AVALANCHE 1500 LT Z71",
|
||||
"AVALANCHE 1500 LTZ",
|
||||
"C/R 10/1500 4+CAB",
|
||||
"C/R 10/1500 PICKUP",
|
||||
"C/R 20/2500 4+CAB",
|
||||
"C/R 20/2500 PICKUP",
|
||||
"C3500",
|
||||
|
||||
"COLORADO",
|
||||
"COLORADO EXT CAB",
|
||||
"COLORADO LS",
|
||||
"COLORADO LS CREW CAB",
|
||||
"COLORADO LS EXT CAB",
|
||||
"COLORADO LT",
|
||||
"COLORADO LT CREW CAB",
|
||||
"COLORADO LT EXT CAB",
|
||||
"COLORADO WT CREW CAB",
|
||||
"COLORADO WT EXT CAB",
|
||||
"COLORADO Z71 CREW CAB",
|
||||
"COLORADO Z71 EXT CAB",
|
||||
"COLORADO ZR2 CREW CAB",
|
||||
"COLORADO ZR2 EXT CAB",
|
||||
|
||||
"HHR LS PANEL",
|
||||
"K/V 10/1500 4+CAB",
|
||||
"K/V 10/1500 PICKUP",
|
||||
"K/V 20/2500 4+CAB",
|
||||
"K/V 20/2500 PICKUP",
|
||||
"K/V 30/3500 4+CAB",
|
||||
"Pickup K3500",
|
||||
"Pickup Silverado C2500 HD",
|
||||
"S10 4+CAB",
|
||||
"S10 LS 4+CAB",
|
||||
"SILVERADO 1500",
|
||||
"SILVERADO 1500 CHEYENNE CREW",
|
||||
"SILVERADO 1500 CREW CAB",
|
||||
"SILVERADO 1500 CREW CAB (AMAL)",
|
||||
"SILVERADO 1500 CUST TRAIL DC",
|
||||
"SILVERADO 1500 CUSTOM CREW CAB",
|
||||
"SILVERADO 1500 CUSTOM DC",
|
||||
"SILVERADO 1500 CUSTOM TRAIL CC",
|
||||
"SILVERADO 1500 DOUBLE (AMALGA)",
|
||||
"SILVERADO 1500 EXT CAB",
|
||||
"SILVERADO 1500 HD LS CREW CAB",
|
||||
"SILVERADO 1500 HD LT CREW CAB",
|
||||
"SILVERADO 1500 HIGH COUNTRY CC",
|
||||
"SILVERADO 1500 HYBRID CREW CAB",
|
||||
"SILVERADO 1500 LS",
|
||||
"SILVERADO 1500 LS CREW CAB",
|
||||
"SILVERADO 1500 LS DOUBLE CAB",
|
||||
"SILVERADO 1500 LS EXT CAB",
|
||||
"SILVERADO 1500 LT",
|
||||
"SILVERADO 1500 LT CC (AMALGAM)",
|
||||
"SILVERADO 1500 LT CREW CAB",
|
||||
"SILVERADO 1500 LT DOUBLE CAB",
|
||||
"SILVERADO 1500 LT EXT CAB",
|
||||
"SILVERADO 1500 LT TRAIL CC",
|
||||
"SILVERADO 1500 LT TRAIL DC",
|
||||
"SILVERADO 1500 LTZ CREW CAB",
|
||||
"SILVERADO 1500 LTZ DOUBLE CAB",
|
||||
"SILVERADO 1500 LTZ EXT CAB",
|
||||
"SILVERADO 1500 RST CREW CAB",
|
||||
"SILVERADO 1500 RST DOUBLE CAB",
|
||||
"SILVERADO 1500 SS EXT CAB",
|
||||
"SILVERADO 1500 WT",
|
||||
"SILVERADO 1500 WT CREW CAB",
|
||||
"SILVERADO 1500 WT DOUBLE CAB",
|
||||
"SILVERADO 1500 WT EXT CAB",
|
||||
"SILVERADO 2500 EXT CAB",
|
||||
"SILVERADO 2500 HD",
|
||||
"SILVERADO 2500 HD CREW CAB",
|
||||
"SILVERADO 2500 HD EXT CAB",
|
||||
"SILVERADO 2500 HD HC CREW CAB",
|
||||
"SILVERADO 2500 HD LS CREW CAB",
|
||||
"SILVERADO 2500 HD LS EXT CAB",
|
||||
"SILVERADO 2500 HD LT",
|
||||
"SILVERADO 2500 HD LT CREW CAB",
|
||||
"SILVERADO 2500 HD LT DBL CAB",
|
||||
"SILVERADO 2500 HD LT EXT CAB",
|
||||
"SILVERADO 2500 HD LTZ CREW CAB",
|
||||
"SILVERADO 2500 HD LTZ DBL CAB",
|
||||
"SILVERADO 2500 HD LTZ EXT CAB",
|
||||
"SILVERADO 2500 HD WT",
|
||||
"SILVERADO 2500 HD WT CREW CAB",
|
||||
"SILVERADO 2500 HD WT DBL CAB",
|
||||
"SILVERADO 2500 HD WT EXT CAB",
|
||||
"SILVERADO 3500",
|
||||
"SILVERADO 3500 CREW CAB",
|
||||
"SILVERADO 3500 CREW CAB (AMAL)",
|
||||
"SILVERADO 3500 EXT CAB",
|
||||
"SILVERADO 3500 HC CREW CAB",
|
||||
"SILVERADO 3500 HD (AMALGAMATE)",
|
||||
"SILVERADO 3500 LS",
|
||||
"SILVERADO 3500 LS CREW CAB",
|
||||
"SILVERADO 3500 LS EXT CAB",
|
||||
"SILVERADO 3500 LT CREW CAB",
|
||||
"SILVERADO 3500 LT DOUBLE CAB",
|
||||
"SILVERADO 3500 LT EXT CAB",
|
||||
"SILVERADO 3500 LTZ CREW CAB",
|
||||
"SILVERADO 3500 LTZ EXT CAB",
|
||||
"SILVERADO 3500 WT CREW CAB",
|
||||
"Silverado 3500HD",
|
||||
|
||||
"B250 SPORTSMAN",
|
||||
|
||||
"DAKOTA CLUB CAB",
|
||||
"DAKOTA LARAMIE V8 CLUB CAB",
|
||||
"DAKOTA LARAMIE V8 QUAD CAB",
|
||||
"DAKOTA QUAD CAB",
|
||||
"DAKOTA SLT CREW CAB",
|
||||
"DAKOTA SLT EXT CAB",
|
||||
"DAKOTA SLT PLUS QUAD CAB",
|
||||
"DAKOTA SLT PLUS V8 CLUB CAB",
|
||||
"DAKOTA SLT PLUS V8 QUAD CAB",
|
||||
"DAKOTA SLT QUAD CAB",
|
||||
"DAKOTA SLT V8 CLUB CAB",
|
||||
"DAKOTA SLT V8 CREW CAB",
|
||||
"DAKOTA SLT V8 EXT CAB",
|
||||
"DAKOTA SLT V8 QUAD CAB",
|
||||
"DAKOTA SPORT V8",
|
||||
"DAKOTA SPORT V8 CLUB CAB",
|
||||
"DAKOTA SPORT V8 QUAD CAB",
|
||||
"DAKOTA ST CLUB CAB",
|
||||
"DAKOTA ST QUAD CAB",
|
||||
"DAKOTA ST V8 QUAD CAB",
|
||||
"DAKOTA SXT CREW CAB",
|
||||
"DAKOTA SXT EXT CAB",
|
||||
"DAKOTA SXT V8 CREW CAB",
|
||||
"DAKOTA SXT V8 EXT CAB",
|
||||
"DAKOTA V8 CLUB CAB",
|
||||
"DAKOTA V8 QUAD CAB",
|
||||
|
||||
"RAM 1500",
|
||||
"RAM 1500 BIG HORN CREW CAB",
|
||||
"RAM 1500 BIG HORN QUAD CAB",
|
||||
"RAM 1500 CLUB CAB",
|
||||
"RAM 1500 CREW CAB (AMALGAMATE)",
|
||||
"RAM 1500 EXPRESS",
|
||||
"RAM 1500 LARAMIE CREW (AMALGA)",
|
||||
"RAM 1500 LARAMIE CREW CAB",
|
||||
"RAM 1500 LARAMIE LONGHORN CREW",
|
||||
"RAM 1500 LARAMIE MEGA CAB",
|
||||
"RAM 1500 LARAMIE QUAD CAB",
|
||||
"RAM 1500 LARAMIE SLT QUAD CAB",
|
||||
"RAM 1500 LIMITED CREW CAB",
|
||||
"RAM 1500 LONGHORN CREW CAB",
|
||||
"RAM 1500 OUTDOORSMAN CREW CAB",
|
||||
"RAM 1500 OUTDOORSMAN QC (AMAL)",
|
||||
"RAM 1500 OUTDOORSMAN QUAD CAB",
|
||||
"RAM 1500 QUAD CAB",
|
||||
"RAM 1500 R/T",
|
||||
"RAM 1500 REBEL CREW CAB",
|
||||
"RAM 1500 REBEL QUAD CAB",
|
||||
"RAM 1500 SLT",
|
||||
"RAM 1500 SLT CREW (AMALGAMATE)",
|
||||
"RAM 1500 SLT CREW CAB",
|
||||
"RAM 1500 SLT MEGA CAB",
|
||||
"RAM 1500 SLT QUAD (AMALGAMATE)",
|
||||
"RAM 1500 SLT QUAD CAB",
|
||||
"RAM 1500 SPORT",
|
||||
"RAM 1500 SPORT CLUB CAB",
|
||||
"RAM 1500 SPORT CREW CAB",
|
||||
"RAM 1500 SPORT CREW CAB (AMAL)",
|
||||
"RAM 1500 SPORT QUAD CAB",
|
||||
"RAM 1500 ST",
|
||||
"RAM 1500 ST CREW CAB",
|
||||
"RAM 1500 ST QUAD CAB",
|
||||
"RAM 1500 SXT CREW CAB",
|
||||
"RAM 1500 SXT QUAD CAB",
|
||||
"RAM 1500 TRADESMAN CREW CAB",
|
||||
"RAM 1500 TRADESMAN QUAD CAB",
|
||||
"RAM 1500 TRX QUAD CAB",
|
||||
"RAM 2500",
|
||||
"RAM 2500 BIG HORN CREW CAB",
|
||||
"RAM 2500 BIG HORN MEGA CAB",
|
||||
"RAM 2500 CLUB CAB",
|
||||
"RAM 2500 LARAMIE CREW CAB",
|
||||
"RAM 2500 LARAMIE LONGHORN CREW",
|
||||
"RAM 2500 LARAMIE LONGHORN MEGA",
|
||||
"RAM 2500 LARAMIE MEGA CAB",
|
||||
"RAM 2500 LARAMIE QUAD CAB",
|
||||
"RAM 2500 LARAMIE SLT",
|
||||
"RAM 2500 LARAMIE SLT QUAD CAB",
|
||||
"RAM 2500 LIMITED CREW CAB",
|
||||
"RAM 2500 OUTDOORSMAN CREW CAB",
|
||||
"RAM 2500 POWER WAGON CREW CAB",
|
||||
"RAM 2500 QUAD CAB",
|
||||
"RAM 2500 SLT",
|
||||
"RAM 2500 SLT CREW CAB",
|
||||
"RAM 2500 SLT MEGA CAB",
|
||||
"RAM 2500 SLT QUAD CAB",
|
||||
"RAM 2500 SLT QUAD CAB (AMALGA)",
|
||||
"RAM 2500 SPORT QUAD CAB",
|
||||
"RAM 2500 ST",
|
||||
"RAM 2500 ST CREW CAB",
|
||||
"RAM 2500 ST QUAD CAB",
|
||||
"RAM 2500 SXT QUAD CAB",
|
||||
"RAM 2500 TRADESMAN",
|
||||
"RAM 2500 TRADESMAN CREW CAB",
|
||||
"RAM 2500 TRX CREW CAB",
|
||||
"RAM 2500 TRX QUAD CAB",
|
||||
"RAM 3500",
|
||||
"RAM 3500 4WD",
|
||||
"RAM 3500 BIG HORN CREW CAB",
|
||||
"RAM 3500 CREW CAB",
|
||||
"RAM 3500 CREW CAB (AMALGAMATE)",
|
||||
"RAM 3500 LARAMIE CREW CAB",
|
||||
"RAM 3500 LARAMIE LONGHORN CREW",
|
||||
"RAM 3500 LARAMIE LONGHORN MEGA",
|
||||
"RAM 3500 LARAMIE MEGA CAB",
|
||||
"RAM 3500 LARAMIE QUAD CAB",
|
||||
"RAM 3500 LARAMIE SLT",
|
||||
"RAM 3500 LARAMIE SLT QUAD CAB",
|
||||
"RAM 3500 LIMITED MEGA CAB",
|
||||
"RAM 3500 LONGHORN CREW CAB",
|
||||
"RAM 3500 QUAD CAB",
|
||||
"RAM 3500 SLT",
|
||||
"RAM 3500 SLT CREW CAB",
|
||||
"RAM 3500 SLT MEGA CAB",
|
||||
"RAM 3500 SLT QUAD CAB",
|
||||
"RAM 3500 SPORT QUAD CAB",
|
||||
"RAM 3500 ST",
|
||||
"RAM 3500 ST CREW CAB",
|
||||
"RAM 3500 ST QUAD CAB",
|
||||
"RAM 3500 TRX QUAD CAB",
|
||||
"RAM 4500",
|
||||
"RAM 4500 CREW CAB",
|
||||
"RAM 5500",
|
||||
"RAM 5500 CREW CAB",
|
||||
"W250 TURBO DIESEL",
|
||||
|
||||
"C Series 5500",
|
||||
"C/R 1500 4+CAB",
|
||||
"C/R 1500 PICKUP",
|
||||
"C/R 1500 SIERRA SL EXT CAB",
|
||||
"C/R 3500",
|
||||
"C/R 3500 PICKUP",
|
||||
"CANYON ALL TERRAIN CREW CAB",
|
||||
"CANYON CREW CAB",
|
||||
"CANYON DENALI CREW CAB",
|
||||
"CANYON EXT CAB",
|
||||
"CANYON SL",
|
||||
"CANYON SL EXT CAB",
|
||||
"CANYON SLE",
|
||||
"CANYON SLE CREW CAB",
|
||||
"CANYON SLE EXT CAB",
|
||||
"CANYON SLT CREW CAB",
|
||||
"CANYON SLT CREW CAB (AMALGAMA)",
|
||||
"K/V 1500 4+CAB",
|
||||
"K/V 1500 PICKUP",
|
||||
"K/V 2500 4+CAB",
|
||||
"K/V 2500 PICKUP",
|
||||
"K/V 3500 SIERRA SL CREW CAB",
|
||||
"K/V 3500 SIERRA SLE CREW CAB",
|
||||
"SIERRA 1500 AT4 CREW CAB",
|
||||
"SIERRA 1500 AT4 DOUBLE CAB",
|
||||
"SIERRA 1500 CREW CAB",
|
||||
"SIERRA 1500 CREW CAB (AMALGAM)",
|
||||
"SIERRA 1500 DENALI CREW CAB",
|
||||
"SIERRA 1500 DENALI EXT CAB",
|
||||
"SIERRA 1500 DOUBLE CAB",
|
||||
"SIERRA 1500 ELEVATION CREW CAB",
|
||||
"SIERRA 1500 ELEVATION DC",
|
||||
"SIERRA 1500 EXT CAB",
|
||||
"SIERRA 1500 HD CREW CAB",
|
||||
"SIERRA 1500 HD SLE CREW CAB",
|
||||
"SIERRA 1500 HD SLT CREW CAB",
|
||||
"SIERRA 1500 NEVADA EDITION",
|
||||
"SIERRA 1500 PICKUP",
|
||||
"SIERRA 1500 SL CREW CAB",
|
||||
"SIERRA 1500 SL EXT CAB",
|
||||
"SIERRA 1500 SL PICKUP",
|
||||
"SIERRA 1500 SLE CREW CAB",
|
||||
"SIERRA 1500 SLE DC (AMALGAMAT)",
|
||||
"SIERRA 1500 SLE DOUBLE CAB",
|
||||
"SIERRA 1500 SLE EXT CAB",
|
||||
"SIERRA 1500 SLE EXT CAB (AMAL)",
|
||||
"SIERRA 1500 SLE PICKUP",
|
||||
"SIERRA 1500 SLT CREW (AMALGAM)",
|
||||
"SIERRA 1500 SLT CREW CAB",
|
||||
"SIERRA 1500 SLT DOUBLE CAB",
|
||||
"SIERRA 1500 SLT EXT CAB",
|
||||
"SIERRA 1500 WT CREW CAB",
|
||||
"SIERRA 1500 WT EXT CAB",
|
||||
"SIERRA 1500 WT PICKUP",
|
||||
"SIERRA 2500 EXT CAB",
|
||||
"SIERRA 2500 HD AT4 CREW CAB",
|
||||
"SIERRA 2500 HD CREW CAB",
|
||||
"SIERRA 2500 HD DENALI CREW CAB",
|
||||
"SIERRA 2500 HD DOUBLE CAB",
|
||||
"SIERRA 2500 HD EXT CAB",
|
||||
"SIERRA 2500 HD PICKUP",
|
||||
"SIERRA 2500 HD SL EXT CAB",
|
||||
"SIERRA 2500 HD SL PICKUP",
|
||||
"SIERRA 2500 HD SLE CREW CAB",
|
||||
"SIERRA 2500 HD SLE DOUBLE CAB",
|
||||
"SIERRA 2500 HD SLE EXT CAB",
|
||||
"SIERRA 2500 HD SLE PICKUP",
|
||||
"SIERRA 2500 HD SLT CREW CAB",
|
||||
"SIERRA 2500 HD SLT DOUBLE CAB",
|
||||
"SIERRA 2500 HD SLT EXT CAB",
|
||||
"SIERRA 2500 HD WT CREW CAB",
|
||||
"SIERRA 2500 HD WT DOUBLE CAB",
|
||||
"SIERRA 2500 HD WT EXT CAB",
|
||||
"SIERRA 2500 HD WT PICKUP",
|
||||
"SIERRA 2500 SLE EXT CAB",
|
||||
"SIERRA 3500 AT4 CREW CAB",
|
||||
"SIERRA 3500 CREW CAB",
|
||||
"SIERRA 3500 DENALI CREW CAB",
|
||||
"SIERRA 3500 EXT CAB",
|
||||
"SIERRA 3500 PICKUP",
|
||||
"SIERRA 3500 SL CREW CAB",
|
||||
"SIERRA 3500 SLE",
|
||||
"SIERRA 3500 SLE CREW CAB",
|
||||
"SIERRA 3500 SLE EXT CAB",
|
||||
"SIERRA 3500 SLT CREW CAB",
|
||||
"SIERRA 3500 WT CREW CAB",
|
||||
"SONOMA",
|
||||
"SONOMA CREW CAB",
|
||||
"SONOMA EXT CAB",
|
||||
|
||||
"1500",
|
||||
"1500 Classic",
|
||||
"Pickup 1500",
|
||||
"Pickup 3500",
|
||||
"ProMaster 1500",
|
||||
|
||||
"RIDGELINE",
|
||||
"RIDGELINE BLACK EDITION",
|
||||
"RIDGELINE DX",
|
||||
"RIDGELINE EX-L",
|
||||
"RIDGELINE LX",
|
||||
"RIDGELINE RT",
|
||||
"RIDGELINE RTL",
|
||||
"RIDGELINE RTS",
|
||||
"RIDGELINE RTX",
|
||||
"RIDGELINE SE",
|
||||
"RIDGELINE SPORT",
|
||||
"RIDGELINE TOURING",
|
||||
"RIDGELINE VP",
|
||||
|
||||
"TITAN",
|
||||
"TACOMA",
|
||||
"TUNDRA",
|
||||
"AVALANCE",
|
||||
"COLORADO",
|
||||
"SILVERADO",
|
||||
"SILVERADO 1500",
|
||||
"SILVERADO 2500",
|
||||
"SILVERADO 3500",
|
||||
"DAKOTA",
|
||||
"RAM 1500",
|
||||
"RAM 2500",
|
||||
"RAM 3500",
|
||||
"RAM 4500",
|
||||
"RAM 5500",
|
||||
"CANYON",
|
||||
"SIERRA 1500",
|
||||
"SIERRA 2500",
|
||||
"SIERRA 3500",
|
||||
"SONOMA",
|
||||
"1500"
|
||||
]
|
||||
@@ -1,39 +0,0 @@
|
||||
const logger = require("../../utils/logger");
|
||||
const TrucksList = require("./trucks.json");
|
||||
const CargoVanList = require("./cargovans.json");
|
||||
const PassengerVanList = require("./passengervans.json");
|
||||
const SuvList = require("./suvs.json");
|
||||
|
||||
|
||||
const vehicletype = async (req, res) => {
|
||||
try {
|
||||
const { model } = req.body;
|
||||
if (!model || model.trim() === "") {
|
||||
res.status(400).json({ success: false, error: "Please provide a model" });
|
||||
} else {
|
||||
vehicle
|
||||
const type = getVehicleType(model.trim())
|
||||
res.status(200).json({ success: true, ...type });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log("vehicletype-error", "ERROR", req?.user?.email, null, {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
res.status(500).json({ error: error.message, stack: error.stack });
|
||||
}
|
||||
};
|
||||
|
||||
function getVehicleType(model) {
|
||||
const inTrucks = TrucksList.includes(model.toUpperCase());
|
||||
const inPV = PassengerVanList.includes(model.toUpperCase());
|
||||
const inSuv = SuvList.includes(model.toUpperCase());
|
||||
const inCv = CargoVanList.includes(model.toUpperCase());
|
||||
|
||||
if (inTrucks) return { type: "TK", match: true };
|
||||
else if (inPV) return { type: "PC", match: true };
|
||||
else if (inSuv) return { type: "SUV", match: true };
|
||||
else if (inCv) return { type: "VN", match: true };
|
||||
else return { type: "PC", match: false };
|
||||
}
|
||||
exports.default = vehicletype;
|
||||
@@ -86,6 +86,9 @@ async function FetchSubscriptions({ redisHelpers, socket, jobid, SubscriptionObj
|
||||
logRequest: false
|
||||
});
|
||||
const SubscriptionMeta = subscriptions.data.subscriptions.find((s) => s.subscriptionId === SubscriptionID);
|
||||
if (!SubscriptionMeta) {
|
||||
throw new Error(`Subscription metadata not found for SubscriptionID: ${SubscriptionID}`);
|
||||
}
|
||||
if (setSessionTransactionData) {
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
@@ -102,11 +105,15 @@ async function FetchSubscriptions({ redisHelpers, socket, jobid, SubscriptionObj
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function GetDepartmentId({ apiName, debug = false, SubscriptionMeta, overrideDepartmentId }) {
|
||||
if (!apiName) throw new Error("apiName not provided. Unable to get department without apiName.");
|
||||
if (!SubscriptionMeta || !Array.isArray(SubscriptionMeta.apiDmsInfo)) {
|
||||
throw new Error("Subscription metadata missing apiDmsInfo.");
|
||||
}
|
||||
if (debug) {
|
||||
console.log("API Names & Departments ");
|
||||
console.log("===========");
|
||||
@@ -118,9 +125,8 @@ async function GetDepartmentId({ apiName, debug = false, SubscriptionMeta, overr
|
||||
.find((info) => info.name === apiName)?.departments; //Departments are categorized by API name and have an array of departments.
|
||||
|
||||
if (overrideDepartmentId) {
|
||||
return departmentIds && departmentIds.find(d => d.id === overrideDepartmentId)?.id
|
||||
return departmentIds && departmentIds.find((d) => d.id === overrideDepartmentId)?.id;
|
||||
} else {
|
||||
|
||||
return departmentIds && departmentIds[0] && departmentIds[0].id; //TODO: This makes the assumption that there is only 1 department.
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,22 +180,52 @@ async function FortellisSelectedCustomer({ socket, redisHelpers, selectedCustome
|
||||
getTransactionType(jobid),
|
||||
FortellisCacheEnums.txEnvelope
|
||||
);
|
||||
const DMSVid = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
FortellisCacheEnums.DMSVid
|
||||
);
|
||||
if (!JobData || !txEnvelope) {
|
||||
const friendlyMessage =
|
||||
"Fortellis export context was lost after reconnect. Click Post again to restart the Fortellis flow.";
|
||||
CreateFortellisLogEvent(socket, "WARN", friendlyMessage, {
|
||||
jobid,
|
||||
hasJobData: !!JobData,
|
||||
hasTxEnvelope: !!txEnvelope
|
||||
});
|
||||
socket.emit("export-failed", {
|
||||
title: "Fortellis",
|
||||
severity: "warning",
|
||||
errorCode: "FORTELLIS_CONTEXT_MISSING",
|
||||
friendlyMessage
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const DMSVid = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
FortellisCacheEnums.DMSVid
|
||||
);
|
||||
if (!DMSVid) {
|
||||
const friendlyMessage =
|
||||
"Fortellis vehicle context is missing after reconnect. Click Post again to restart the Fortellis flow.";
|
||||
CreateFortellisLogEvent(socket, "WARN", friendlyMessage, {
|
||||
jobid,
|
||||
hasDMSVid: !!DMSVid
|
||||
});
|
||||
socket.emit("export-failed", {
|
||||
title: "Fortellis",
|
||||
severity: "warning",
|
||||
errorCode: "FORTELLIS_CONTEXT_MISSING",
|
||||
friendlyMessage
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let DMSCust;
|
||||
if (selectedCustomerId) {
|
||||
CreateFortellisLogEvent(socket, "DEBUG", `{3.1} Querying the Customer using Customer ID: ${selectedCustomerId}`);
|
||||
|
||||
//Get cust list from Redis. Return the item
|
||||
const DMSCustList = await getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
FortellisCacheEnums.DMSCustList
|
||||
);
|
||||
const DMSCustList =
|
||||
(await getSessionTransactionData(socket.id, getTransactionType(jobid), FortellisCacheEnums.DMSCustList)) || [];
|
||||
const existingCustomerInDMSCustList = DMSCustList.find((c) => c.customerId === selectedCustomerId);
|
||||
DMSCust = existingCustomerInDMSCustList || {
|
||||
customerId: selectedCustomerId //This is the fall back in case it is the generic customer.
|
||||
@@ -306,7 +336,7 @@ async function FortellisSelectedCustomer({ socket, redisHelpers, selectedCustome
|
||||
//There was something wrong. Throw an error to trigger clean up.
|
||||
//throw new Error("Error posting DMS Batch Transaction");
|
||||
}
|
||||
} catch (error) {
|
||||
} catch {
|
||||
//Clean up the transaction and insert a faild error code
|
||||
// //Get the error code
|
||||
CreateFortellisLogEvent(socket, "DEBUG", `{6.1} Getting errors for Transaction ID ${DMSTransHeader.transID}`);
|
||||
@@ -336,6 +366,12 @@ async function FortellisSelectedCustomer({ socket, redisHelpers, selectedCustome
|
||||
stack: error.stack,
|
||||
data: error.errorData
|
||||
});
|
||||
socket.emit("export-failed", {
|
||||
title: "Fortellis",
|
||||
severity: "error",
|
||||
error: error.message,
|
||||
friendlyMessage: "Fortellis export failed. Please click Post again to retry."
|
||||
});
|
||||
await InsertFailedExportLog({
|
||||
socket,
|
||||
JobData,
|
||||
@@ -757,7 +793,13 @@ async function InsertDmsVehicle({ socket, redisHelpers, JobData, txEnvelope, DMS
|
||||
modelAbrev: txEnvelope.dms_model,
|
||||
// "modelDescription": "SILVERADO 1500 2WD EXT CAB LT",
|
||||
// "modelType": "T",
|
||||
modelYear: JobData.v_model_yr,
|
||||
modelYear:
|
||||
JobData.v_model_yr &&
|
||||
(JobData.v_model_yr < 100
|
||||
? JobData.v_model_yr >= (moment().year() + 1) % 100
|
||||
? 1900 + parseInt(JobData.v_model_yr, 10)
|
||||
: 2000 + parseInt(JobData.v_model_yr, 10)
|
||||
: JobData.v_model_yr),
|
||||
// "numberOfEngineCylinders": 4,
|
||||
odometerStatus: txEnvelope.kmout,
|
||||
// "options": [
|
||||
@@ -916,6 +958,10 @@ async function UpdateDmsVehicle({ socket, redisHelpers, JobData, DMSVeh, DMSCust
|
||||
delete DMSVehToSend.invoice;
|
||||
delete DMSVehToSend.inventoryAccount;
|
||||
|
||||
!DMSVehToSend.vehicle.engineNumber && delete DMSVehToSend.vehicle.engineNumber;
|
||||
!DMSVehToSend.vehicle.saleClassValue && DMSVehToSend.vehicle.saleClassValue === "MISC";
|
||||
!DMSVehToSend.vehicle.exteriorColor && delete DMSVehToSend.vehicle.exteriorColor;
|
||||
|
||||
const result = await MakeFortellisCall({
|
||||
...FortellisActions.UpdateVehicle,
|
||||
requestSearchParams: {},
|
||||
|
||||
@@ -1285,6 +1285,7 @@ exports.KAIZEN_QUERY = `query KAIZEN_EXPORT($start: timestamptz, $bodyshopid: uu
|
||||
date_repairstarted
|
||||
date_void
|
||||
dms_allocation
|
||||
ded_amt
|
||||
employee_body_rel {
|
||||
first_name
|
||||
last_name
|
||||
@@ -1380,6 +1381,7 @@ exports.KAIZEN_QUERY = `query KAIZEN_EXPORT($start: timestamptz, $bodyshopid: uu
|
||||
}
|
||||
parts_tax_rates
|
||||
plate_no
|
||||
policy_no
|
||||
rate_la1
|
||||
rate_la2
|
||||
rate_la3
|
||||
|
||||
17
server/routes/aiRoutes.js
Normal file
17
server/routes/aiRoutes.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const multer = require("multer");
|
||||
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
|
||||
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
|
||||
const { handleBillOcr, handleBillOcrStatus } = require("../ai/bill-ocr/bill-ocr");
|
||||
|
||||
// Configure multer for form data parsing
|
||||
const upload = multer();
|
||||
|
||||
router.use(validateFirebaseIdTokenMiddleware);
|
||||
router.use(withUserGraphQLClientMiddleware);
|
||||
|
||||
router.post("/bill-ocr", upload.single('billScan'), handleBillOcr);
|
||||
router.get("/bill-ocr/status/:textractJobId", handleBillOcrStatus);
|
||||
|
||||
module.exports = router;
|
||||
@@ -144,20 +144,5 @@ router.post("/emsupload", validateFirebaseIdTokenMiddleware, data.emsUpload);
|
||||
// Redis Cache Routes
|
||||
router.post("/bodyshop-cache", eventAuthorizationMiddleware, updateBodyshopCache);
|
||||
|
||||
// Estimate Scrubber Vehicle Type
|
||||
router.post("/es/vehicletype", data.vehicletype);
|
||||
router.post("/analytics/documents", data.documentAnalytics);
|
||||
// Health Check for docker-compose-cluster load balancer, only available in development
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
router.get("/health", (req, res) => {
|
||||
const healthStatus = {
|
||||
status: "healthy",
|
||||
timestamp: new Date().toISOString(),
|
||||
environment: process.env.NODE_ENV || "unknown",
|
||||
uptime: process.uptime()
|
||||
};
|
||||
res.status(200).json(healthStatus);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,6 +1,7 @@
|
||||
const { RRClient } = require("./lib/index.cjs");
|
||||
const { getRRConfigFromBodyshop } = require("./rr-config");
|
||||
const CreateRRLogEvent = require("./rr-logger-event");
|
||||
const { withRRRequestXml } = require("./rr-log-xml");
|
||||
const InstanceManager = require("../utils/instanceMgr").default;
|
||||
|
||||
/**
|
||||
@@ -217,14 +218,24 @@ const createRRCustomer = async ({ bodyshop, job, overrides = {}, socket }) => {
|
||||
try {
|
||||
response = await client.insertCustomer(safePayload, opts);
|
||||
// Very noisy; only show when log level is cranked to SILLY
|
||||
CreateRRLogEvent(socket, "SILLY", "{CU} insertCustomer: raw response", { response });
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"SILLY",
|
||||
"{CU} insertCustomer: raw response",
|
||||
withRRRequestXml(response, { response })
|
||||
);
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "ERROR", "RR insertCustomer transport error", {
|
||||
message: e?.message,
|
||||
code: e?.code,
|
||||
status: e?.meta?.status || e?.status,
|
||||
payload: safePayload
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
"RR insertCustomer transport error",
|
||||
withRRRequestXml(e, {
|
||||
message: e?.message,
|
||||
code: e?.code,
|
||||
status: e?.meta?.status || e?.status,
|
||||
payload: safePayload
|
||||
})
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -233,12 +244,17 @@ const createRRCustomer = async ({ bodyshop, job, overrides = {}, socket }) => {
|
||||
|
||||
let customerNo = data?.dmsRecKey;
|
||||
if (!customerNo) {
|
||||
CreateRRLogEvent(socket, "ERROR", "RR insertCustomer returned no dmsRecKey/custNo", {
|
||||
status: trx?.status,
|
||||
statusCode: trx?.statusCode,
|
||||
message: trx?.message,
|
||||
data
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
"RR insertCustomer returned no dmsRecKey/custNo",
|
||||
withRRRequestXml(response, {
|
||||
status: trx?.status,
|
||||
statusCode: trx?.statusCode,
|
||||
message: trx?.message,
|
||||
data
|
||||
})
|
||||
);
|
||||
|
||||
throw new Error(
|
||||
`RR insertCustomer returned no dmsRecKey (status=${trx?.status ?? "?"} code=${trx?.statusCode ?? "?"}${
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* @returns {number|null}
|
||||
*/
|
||||
const parseVendorStatusCode = (err) => {
|
||||
// Prefer explicit numeric props when available
|
||||
// Prefer explicit numeric props when available.
|
||||
const codeProp = err?.code ?? err?.statusCode ?? err?.meta?.status?.StatusCode ?? err?.status?.StatusCode;
|
||||
const num = Number(codeProp);
|
||||
if (!Number.isNaN(num) && num > 0) return num;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const { GraphQLClient } = require("graphql-request");
|
||||
const queries = require("../graphql-client/queries");
|
||||
const CreateRRLogEvent = require("./rr-logger-event");
|
||||
const { extractRRXmlPair } = require("./rr-log-xml");
|
||||
|
||||
/** Get bearer token from the socket (same approach used elsewhere) */
|
||||
const getAuthToken = (socket) =>
|
||||
@@ -178,11 +179,23 @@ const insertRRFailedExportLog = async ({ socket, jobId, job, bodyshop, error, cl
|
||||
const client = new GraphQLClient(endpoint, {});
|
||||
client.setHeaders({ Authorization: `Bearer ${token}` });
|
||||
|
||||
const { requestXml, responseXml } = extractRRXmlPair(error);
|
||||
const xmlFromError =
|
||||
requestXml || responseXml
|
||||
? {
|
||||
...(requestXml ? { request: requestXml } : {}),
|
||||
...(responseXml ? { response: responseXml } : {})
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const meta = buildRRExportMeta({
|
||||
result,
|
||||
extra: {
|
||||
error: error?.message || String(error),
|
||||
classification: classification || undefined
|
||||
classification: classification || undefined,
|
||||
...(requestXml ? { requestXml } : {}),
|
||||
...(responseXml ? { responseXml } : {}),
|
||||
...(xmlFromError && !result?.xml ? { xml: xmlFromError } : {})
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const { buildRRRepairOrderPayload } = require("./rr-job-helpers");
|
||||
const { buildClientAndOpts } = require("./rr-lookup");
|
||||
const CreateRRLogEvent = require("./rr-logger-event");
|
||||
const { withRRRequestXml } = require("./rr-log-xml");
|
||||
const { extractRrResponsibilityCenters } = require("./rr-responsibility-centers");
|
||||
const CdkCalculateAllocations = require("./rr-calculate-allocations").default;
|
||||
const { resolveRROpCodeFromBodyshop } = require("./rr-utils");
|
||||
@@ -147,10 +148,7 @@ const createMinimalRRRepairOrder = async (args) => {
|
||||
|
||||
const response = await client.createRepairOrder(payload, finalOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR minimal Repair Order created", {
|
||||
payload,
|
||||
response
|
||||
});
|
||||
CreateRRLogEvent(socket, "INFO", "RR minimal Repair Order created", withRRRequestXml(response, { payload, response }));
|
||||
|
||||
const data = response?.data || null;
|
||||
const statusBlocks = response?.statusBlocks || {};
|
||||
@@ -262,7 +260,7 @@ const updateRRRepairOrderWithFullData = async (args) => {
|
||||
CreateRRLogEvent(socket, "INFO", "RR allocations resolved for update", {
|
||||
hasAllocations: allocations.length > 0,
|
||||
count: allocations.length,
|
||||
allocationsPreview: allocations.slice(0, 2).map(a => ({
|
||||
allocationsPreview: allocations.slice(0, 2).map((a) => ({
|
||||
type: a?.type,
|
||||
code: a?.code,
|
||||
laborSale: a?.laborSale,
|
||||
@@ -322,12 +320,12 @@ const updateRRRepairOrderWithFullData = async (args) => {
|
||||
// Add roNo for linking to existing RO
|
||||
payload.roNo = String(roNo);
|
||||
payload.outsdRoNo = job?.ro_number || job?.id || undefined;
|
||||
|
||||
|
||||
// Keep rolabor - it's needed to register the job/OpCode accounts in Reynolds
|
||||
// Without this, Reynolds won't recognize the OpCode when we send rogg operations
|
||||
// The rolabor section tells Reynolds "these jobs exist" even with minimal data
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "Sending full data for early RO (using create with roNo)", {
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "Preparing full data for early RO (using create with roNo)", {
|
||||
roNo: String(roNo),
|
||||
hasRolabor: !!payload.rolabor,
|
||||
hasRogg: !!payload.rogg,
|
||||
@@ -338,10 +336,18 @@ const updateRRRepairOrderWithFullData = async (args) => {
|
||||
// Reynolds will merge this with the existing RO header
|
||||
const response = await client.createRepairOrder(payload, finalOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR Repair Order full data sent", {
|
||||
payload,
|
||||
response
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"INFO",
|
||||
"Sending full data for early RO (using create with roNo)",
|
||||
withRRRequestXml(response, {
|
||||
roNo: String(roNo),
|
||||
hasRolabor: !!payload.rolabor,
|
||||
hasRogg: !!payload.rogg,
|
||||
payload,
|
||||
response
|
||||
})
|
||||
);
|
||||
|
||||
const data = response?.data || null;
|
||||
const statusBlocks = response?.statusBlocks || {};
|
||||
@@ -501,10 +507,7 @@ const exportJobToRR = async (args) => {
|
||||
|
||||
const response = await client.createRepairOrder(payload, finalOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR raw Repair Order created", {
|
||||
payload,
|
||||
response
|
||||
});
|
||||
CreateRRLogEvent(socket, "INFO", "RR raw Repair Order created", withRRRequestXml(response, { payload, response }));
|
||||
|
||||
const data = response?.data || null;
|
||||
const statusBlocks = response?.statusBlocks || {};
|
||||
@@ -603,10 +606,15 @@ const finalizeRRRepairOrder = async (args) => {
|
||||
|
||||
const rrRes = await client.updateRepairOrder(payload, finalOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", "RR Repair Order finalized", {
|
||||
payload,
|
||||
response: rrRes
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"SILLY",
|
||||
"RR Repair Order finalized",
|
||||
withRRRequestXml(rrRes, {
|
||||
payload,
|
||||
response: rrRes
|
||||
})
|
||||
);
|
||||
|
||||
const data = rrRes?.data || null;
|
||||
const statusBlocks = rrRes?.statusBlocks || {};
|
||||
|
||||
@@ -52,6 +52,122 @@ const asN2 = (dineroLike) => {
|
||||
return amount.toFixed(2);
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalize various "money-like" shapes to integer cents.
|
||||
* Supports:
|
||||
* - Dinero instances (getAmount / toUnit)
|
||||
* - { cents }
|
||||
* - { amount, precision }
|
||||
* - plain numbers (treated as units, e.g. dollars)
|
||||
* - numeric strings (treated as units, e.g. "123.45")
|
||||
* @param value
|
||||
* @returns {number}
|
||||
*/
|
||||
const toMoneyCents = (value) => {
|
||||
if (value == null || value === "") return 0;
|
||||
|
||||
if (typeof value.getAmount === "function") {
|
||||
return value.getAmount();
|
||||
}
|
||||
|
||||
if (typeof value.toUnit === "function") {
|
||||
const unit = value.toUnit();
|
||||
return Number.isFinite(unit) ? Math.round(unit * 100) : 0;
|
||||
}
|
||||
|
||||
if (typeof value.cents === "number") {
|
||||
return value.cents;
|
||||
}
|
||||
|
||||
if (typeof value.amount === "number") {
|
||||
const precision = typeof value.precision === "number" ? value.precision : 2;
|
||||
if (precision === 2) return value.amount;
|
||||
const factor = Math.pow(10, 2 - precision);
|
||||
return Math.round(value.amount * factor);
|
||||
}
|
||||
|
||||
if (typeof value === "number") {
|
||||
return Math.round(value * 100);
|
||||
}
|
||||
|
||||
if (typeof value === "string") {
|
||||
const parsed = Number.parseFloat(value);
|
||||
return Number.isFinite(parsed) ? Math.round(parsed * 100) : 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
const asN2FromCents = (cents) => asN2({ amount: Number.isFinite(cents) ? cents : 0, precision: 2 });
|
||||
|
||||
/**
|
||||
* Build RR estimate block from allocation totals.
|
||||
* @param {Array} allocations
|
||||
* @returns {{parts: string, labor: string, total: string}|null}
|
||||
*/
|
||||
const buildEstimateFromAllocations = (allocations) => {
|
||||
if (!Array.isArray(allocations) || allocations.length === 0) return null;
|
||||
|
||||
const totals = allocations.reduce(
|
||||
(acc, alloc) => {
|
||||
acc.parts += toMoneyCents(alloc?.partsSale);
|
||||
acc.labor += toMoneyCents(alloc?.laborTaxableSale);
|
||||
acc.labor += toMoneyCents(alloc?.laborNonTaxableSale);
|
||||
acc.total += toMoneyCents(alloc?.totalSale);
|
||||
return acc;
|
||||
},
|
||||
{ parts: 0, labor: 0, total: 0 }
|
||||
);
|
||||
|
||||
// If totalSale wasn't provided, keep total coherent with parts + labor.
|
||||
if (!totals.total) {
|
||||
totals.total = totals.parts + totals.labor;
|
||||
}
|
||||
|
||||
return {
|
||||
parts: asN2FromCents(totals.parts),
|
||||
labor: asN2FromCents(totals.labor),
|
||||
total: asN2FromCents(totals.total)
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Build RR estimate block from precomputed job totals.
|
||||
* @param job
|
||||
* @returns {{parts: string, labor: string, total: string}|null}
|
||||
*/
|
||||
const buildEstimateFromJobTotals = (job) => {
|
||||
const totals = job?.job_totals;
|
||||
if (!totals) return null;
|
||||
|
||||
const partsCents = toMoneyCents(totals?.parts?.parts?.total) + toMoneyCents(totals?.parts?.sublets?.total);
|
||||
const laborCents = toMoneyCents(totals?.rates?.rates_subtotal ?? totals?.rates?.subtotal);
|
||||
let totalCents = toMoneyCents(totals?.totals?.subtotal);
|
||||
|
||||
if (!totalCents) {
|
||||
totalCents = partsCents + laborCents;
|
||||
}
|
||||
|
||||
// If we truly have no numbers from totals, omit estimate entirely.
|
||||
if (!partsCents && !laborCents && !totalCents) return null;
|
||||
|
||||
return {
|
||||
parts: asN2FromCents(partsCents),
|
||||
labor: asN2FromCents(laborCents),
|
||||
total: asN2FromCents(totalCents)
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Build RR estimate block from the best available source.
|
||||
* @param job
|
||||
* @param allocations
|
||||
* @returns {{parts: string, labor: string, total: string}|null}
|
||||
*/
|
||||
const buildRREstimate = ({ job, allocations } = {}) => {
|
||||
return buildEstimateFromAllocations(allocations) || buildEstimateFromJobTotals(job);
|
||||
};
|
||||
|
||||
/**
|
||||
* Build RO.GOG structure for the reynolds-rome-client `createRepairOrder` payload
|
||||
* from allocations.
|
||||
@@ -103,44 +219,6 @@ const buildRogogFromAllocations = (allocations, { opCode, payType = "Cust", roNo
|
||||
|
||||
const ops = [];
|
||||
|
||||
/**
|
||||
* Normalize various "money-like" shapes to integer cents.
|
||||
* Supports:
|
||||
* - Dinero instances (getAmount / toUnit)
|
||||
* - { cents }
|
||||
* - { amount, precision }
|
||||
* - plain numbers (treated as units, e.g. dollars)
|
||||
*/
|
||||
const toCents = (value) => {
|
||||
if (!value) return 0;
|
||||
|
||||
if (typeof value.getAmount === "function") {
|
||||
return value.getAmount();
|
||||
}
|
||||
|
||||
if (typeof value.toUnit === "function") {
|
||||
const unit = value.toUnit();
|
||||
return Number.isFinite(unit) ? Math.round(unit * 100) : 0;
|
||||
}
|
||||
|
||||
if (typeof value.cents === "number") {
|
||||
return value.cents;
|
||||
}
|
||||
|
||||
if (typeof value.amount === "number") {
|
||||
const precision = typeof value.precision === "number" ? value.precision : 2;
|
||||
if (precision === 2) return value.amount;
|
||||
const factor = Math.pow(10, 2 - precision);
|
||||
return Math.round(value.amount * factor);
|
||||
}
|
||||
|
||||
if (typeof value === "number") {
|
||||
return Math.round(value * 100);
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
const asMoneyLike = (amountCents) => ({
|
||||
amount: amountCents || 0,
|
||||
precision: 2
|
||||
@@ -154,13 +232,13 @@ const buildRogogFromAllocations = (allocations, { opCode, payType = "Cust", roNo
|
||||
// Only centers configured for RR GOG are included
|
||||
if (!breakOut || !itemType) continue;
|
||||
|
||||
const partsTaxableCents = toCents(alloc.partsTaxableSale);
|
||||
const partsNonTaxableCents = toCents(alloc.partsNonTaxableSale);
|
||||
const extrasTaxableCents = toCents(alloc.extrasTaxableSale);
|
||||
const extrasNonTaxableCents = toCents(alloc.extrasNonTaxableSale);
|
||||
const laborTaxableCents = toCents(alloc.laborTaxableSale);
|
||||
const laborNonTaxableCents = toCents(alloc.laborNonTaxableSale);
|
||||
const costCents = toCents(alloc.cost);
|
||||
const partsTaxableCents = toMoneyCents(alloc.partsTaxableSale);
|
||||
const partsNonTaxableCents = toMoneyCents(alloc.partsNonTaxableSale);
|
||||
const extrasTaxableCents = toMoneyCents(alloc.extrasTaxableSale);
|
||||
const extrasNonTaxableCents = toMoneyCents(alloc.extrasNonTaxableSale);
|
||||
const laborTaxableCents = toMoneyCents(alloc.laborTaxableSale);
|
||||
const laborNonTaxableCents = toMoneyCents(alloc.laborNonTaxableSale);
|
||||
const costCents = toMoneyCents(alloc.cost);
|
||||
|
||||
const segments = [];
|
||||
|
||||
@@ -418,6 +496,11 @@ const buildRRRepairOrderPayload = ({
|
||||
mileageIn: job.kmin
|
||||
};
|
||||
|
||||
const estimate = buildRREstimate({ job, allocations });
|
||||
if (estimate) {
|
||||
payload.estimate = estimate;
|
||||
}
|
||||
|
||||
if (story) {
|
||||
payload.roComment = String(story).trim();
|
||||
}
|
||||
|
||||
63
server/rr/rr-log-xml.js
Normal file
63
server/rr/rr-log-xml.js
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* Extract request/response XML from RR response/result shapes.
|
||||
* @param rrObj
|
||||
* @returns {{requestXml: string|null, responseXml: string|null}}
|
||||
*/
|
||||
const extractRRXmlPair = (rrObj) => {
|
||||
const xml = rrObj?.xml ?? rrObj?.meta?.xml;
|
||||
|
||||
let requestXml = null;
|
||||
let responseXml = null;
|
||||
|
||||
if (typeof xml === "string") {
|
||||
requestXml = xml;
|
||||
} else {
|
||||
if (typeof xml?.request === "string") requestXml = xml.request;
|
||||
else if (typeof xml?.req === "string") requestXml = xml.req;
|
||||
else if (typeof xml?.starXml === "string") requestXml = xml.starXml;
|
||||
if (typeof xml?.response === "string") responseXml = xml.response;
|
||||
}
|
||||
|
||||
if (!requestXml && typeof rrObj?.requestXml === "string") requestXml = rrObj.requestXml;
|
||||
if (!requestXml && typeof rrObj?.meta?.requestXml === "string") requestXml = rrObj.meta.requestXml;
|
||||
if (!requestXml && typeof rrObj?.meta?.reqXml === "string") requestXml = rrObj.meta.reqXml;
|
||||
if (!requestXml && typeof rrObj?.meta?.request === "string") requestXml = rrObj.meta.request;
|
||||
if (!responseXml && typeof rrObj?.responseXml === "string") responseXml = rrObj.responseXml;
|
||||
if (!responseXml && typeof rrObj?.meta?.responseXml === "string") responseXml = rrObj.meta.responseXml;
|
||||
if (!responseXml && typeof rrObj?.meta?.resXml === "string") responseXml = rrObj.meta.resXml;
|
||||
if (!responseXml && typeof rrObj?.meta?.response === "string") responseXml = rrObj.meta.response;
|
||||
|
||||
// If wrapped HTTP response data contains raw XML, surface it.
|
||||
if (!responseXml && typeof rrObj?.response?.data === "string") {
|
||||
const xmlData = rrObj.response.data.trim();
|
||||
if (xmlData.startsWith("<")) responseXml = xmlData;
|
||||
}
|
||||
|
||||
// Try one level down when errors are wrapped.
|
||||
if ((!requestXml || !responseXml) && rrObj?.cause && rrObj.cause !== rrObj) {
|
||||
const nested = extractRRXmlPair(rrObj.cause);
|
||||
if (!requestXml) requestXml = nested.requestXml;
|
||||
if (!responseXml) responseXml = nested.responseXml;
|
||||
}
|
||||
|
||||
return { requestXml, responseXml };
|
||||
};
|
||||
|
||||
/**
|
||||
* Add Reynolds request/response XML to RR log metadata when available.
|
||||
* @param rrObj
|
||||
* @param meta
|
||||
* @returns {*}
|
||||
*/
|
||||
const withRRRequestXml = (rrObj, meta = {}) => {
|
||||
const { requestXml, responseXml } = extractRRXmlPair(rrObj);
|
||||
const xmlMeta = {};
|
||||
if (requestXml) xmlMeta.requestXml = requestXml;
|
||||
if (responseXml) xmlMeta.responseXml = responseXml;
|
||||
return Object.keys(xmlMeta).length ? { ...meta, ...xmlMeta } : meta;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
extractRRXmlPair,
|
||||
withRRRequestXml
|
||||
};
|
||||
@@ -12,6 +12,7 @@ const { createRRCustomer } = require("./rr-customers");
|
||||
const { ensureRRServiceVehicle } = require("./rr-service-vehicles");
|
||||
const { classifyRRVendorError } = require("./rr-errors");
|
||||
const { markRRExportSuccess, insertRRFailedExportLog } = require("./rr-export-logs");
|
||||
const { withRRRequestXml, extractRRXmlPair } = require("./rr-log-xml");
|
||||
const {
|
||||
makeVehicleSearchPayloadFromJob,
|
||||
ownersFromVinBlocks,
|
||||
@@ -48,6 +49,21 @@ const resolveJobId = (explicit, payload, job) => explicit || payload?.jobId || j
|
||||
*/
|
||||
const resolveVin = ({ tx, job }) => tx?.jobData?.vin || job?.v_vin || null;
|
||||
|
||||
/**
|
||||
* Add request/response XML to socket event payloads when available.
|
||||
* @param rrObj
|
||||
* @param payload
|
||||
* @returns {*}
|
||||
*/
|
||||
const withRRXmlSocketPayload = (rrObj, payload = {}) => {
|
||||
const { requestXml, responseXml } = extractRRXmlPair(rrObj);
|
||||
return {
|
||||
...payload,
|
||||
...(requestXml ? { requestXml } : {}),
|
||||
...(responseXml ? { responseXml } : {})
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Sort vehicle owners first in the list, preserving original order otherwise.
|
||||
* @param list
|
||||
@@ -154,15 +170,13 @@ const setJobDmsIdForSocket = async ({ socket, jobId, dmsId, dmsCustomerId, dmsAd
|
||||
if (!token) throw new Error("Missing auth token for setJobDmsIdForSocket");
|
||||
|
||||
const client = new GraphQLClient(endpoint, {});
|
||||
await client
|
||||
.setHeaders({ Authorization: `Bearer ${token}` })
|
||||
.request(queries.SET_JOB_DMS_ID, {
|
||||
id: jobId,
|
||||
dms_id: String(dmsId),
|
||||
dms_customer_id: dmsCustomerId ? String(dmsCustomerId) : null,
|
||||
dms_advisor_id: dmsAdvisorId ? String(dmsAdvisorId) : null,
|
||||
kmin: mileageIn != null && mileageIn > 0 ? parseInt(mileageIn, 10) : null
|
||||
});
|
||||
await client.setHeaders({ Authorization: `Bearer ${token}` }).request(queries.SET_JOB_DMS_ID, {
|
||||
id: jobId,
|
||||
dms_id: String(dmsId),
|
||||
dms_customer_id: dmsCustomerId ? String(dmsCustomerId) : null,
|
||||
dms_advisor_id: dmsAdvisorId ? String(dmsAdvisorId) : null,
|
||||
kmin: mileageIn != null && mileageIn > 0 ? parseInt(mileageIn, 10) : null
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "Linked job.dms_id to RR RO", {
|
||||
jobId,
|
||||
@@ -241,7 +255,12 @@ const rrMultiCustomerSearch = async ({ bodyshop, job, socket, redisHelpers }) =>
|
||||
|
||||
const multiResponse = await rrCombinedSearch(bodyshop, q);
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", "Multi Customer Search - raw combined search", { response: multiResponse });
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"SILLY",
|
||||
"Multi Customer Search - raw combined search",
|
||||
withRRRequestXml(multiResponse, { response: multiResponse })
|
||||
);
|
||||
|
||||
if (fromVin) {
|
||||
const multiBlocks = Array.isArray(multiResponse?.data) ? multiResponse.data : [];
|
||||
@@ -262,7 +281,7 @@ const rrMultiCustomerSearch = async ({ bodyshop, job, socket, redisHelpers }) =>
|
||||
const norm = normalizeCustomerCandidates(multiResponse, { ownersSet });
|
||||
merged.push(...norm);
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "WARN", "Multi-search subquery failed", { kind: q.kind, error: e.message });
|
||||
CreateRRLogEvent(socket, "WARN", "Multi-search subquery failed", withRRRequestXml(e, { kind: q.kind, error: e.message }));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,7 +329,7 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
count: decorated.length
|
||||
});
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "ERROR", "RR combined lookup error", { error: e.message, jobid });
|
||||
CreateRRLogEvent(socket, "ERROR", "RR combined lookup error", withRRRequestXml(e, { error: e.message, jobid }));
|
||||
cb?.({ jobid, error: e.message });
|
||||
}
|
||||
});
|
||||
@@ -387,7 +406,7 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
fromCache
|
||||
});
|
||||
} catch (err) {
|
||||
CreateRRLogEvent(socket, "ERROR", "rr-get-advisors: failed", { error: err?.message });
|
||||
CreateRRLogEvent(socket, "ERROR", "rr-get-advisors: failed", withRRRequestXml(err, { error: err?.message }));
|
||||
ack?.({ ok: false, error: err?.message || "get advisors failed" });
|
||||
}
|
||||
});
|
||||
@@ -458,14 +477,26 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
anyOwner: decorated.some((c) => c.vinOwner || c.isVehicleOwner)
|
||||
});
|
||||
} catch (error) {
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR early RO creation (prepare)`, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Error during RR early RO creation (prepare)`,
|
||||
withRRRequestXml(error, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
})
|
||||
);
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", { vendor: "rr", jobId: rid, error: error.message });
|
||||
socket.emit(
|
||||
"export-failed",
|
||||
withRRXmlSocketPayload(error, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message
|
||||
})
|
||||
);
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
@@ -511,7 +542,11 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
});
|
||||
|
||||
// Filter out invalid values
|
||||
if (selectedCustNo === "undefined" || selectedCustNo === "null" || (selectedCustNo && selectedCustNo.trim() === "")) {
|
||||
if (
|
||||
selectedCustNo === "undefined" ||
|
||||
selectedCustNo === "null" ||
|
||||
(selectedCustNo && selectedCustNo.trim() === "")
|
||||
) {
|
||||
selectedCustNo = null;
|
||||
}
|
||||
|
||||
@@ -555,7 +590,12 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
if (vehQ && vehQ.kind === "vin" && job?.v_vin) {
|
||||
const vinResponse = await rrCombinedSearch(bodyshop, vehQ);
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", `VIN owner pre-check response (early RO)`, { response: vinResponse });
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"SILLY",
|
||||
`VIN owner pre-check response (early RO)`,
|
||||
withRRRequestXml(vinResponse, { response: vinResponse })
|
||||
);
|
||||
|
||||
const vinBlocks = Array.isArray(vinResponse?.data) ? vinResponse.data : [];
|
||||
|
||||
@@ -588,9 +628,14 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "WARN", `VIN owner pre-check failed; continuing with selected customer (early RO)`, {
|
||||
error: e?.message
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"WARN",
|
||||
`VIN owner pre-check failed; continuing with selected customer (early RO)`,
|
||||
withRRRequestXml(e, {
|
||||
error: e?.message
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Cache final/effective customer selection
|
||||
@@ -705,42 +750,52 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
|
||||
const outsdRoNo = data?.outsdRoNo ?? job?.ro_number ?? job?.id ?? null;
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", "Early RO created - checking dmsRoNo", {
|
||||
dmsRoNo,
|
||||
resultRoNo: result?.roNo,
|
||||
dataRoNo: data?.dmsRoNo,
|
||||
jobId: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"DEBUG",
|
||||
"Early RO created - checking dmsRoNo",
|
||||
withRRRequestXml(result, {
|
||||
dmsRoNo,
|
||||
resultRoNo: result?.roNo,
|
||||
dataRoNo: data?.dmsRoNo,
|
||||
jobId: rid
|
||||
})
|
||||
);
|
||||
|
||||
// ✅ Persist DMS RO number, customer ID, advisor ID, and mileage on the job
|
||||
if (dmsRoNo) {
|
||||
const mileageIn = txEnvelope?.kmin ?? null;
|
||||
CreateRRLogEvent(socket, "DEBUG", "Calling setJobDmsIdForSocket", {
|
||||
jobId: rid,
|
||||
CreateRRLogEvent(socket, "DEBUG", "Calling setJobDmsIdForSocket", {
|
||||
jobId: rid,
|
||||
dmsId: dmsRoNo,
|
||||
customerId: effectiveCustNo,
|
||||
advisorId: String(advisorNo),
|
||||
mileageIn
|
||||
});
|
||||
await setJobDmsIdForSocket({
|
||||
socket,
|
||||
jobId: rid,
|
||||
await setJobDmsIdForSocket({
|
||||
socket,
|
||||
jobId: rid,
|
||||
dmsId: dmsRoNo,
|
||||
dmsCustomerId: effectiveCustNo,
|
||||
dmsAdvisorId: String(advisorNo),
|
||||
mileageIn
|
||||
});
|
||||
} else {
|
||||
CreateRRLogEvent(socket, "WARN", "RR early RO creation succeeded but no DMS RO number was returned", {
|
||||
jobId: rid,
|
||||
resultPreview: {
|
||||
roNo: result?.roNo,
|
||||
data: {
|
||||
dmsRoNo: data?.dmsRoNo,
|
||||
outsdRoNo: data?.outsdRoNo
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"WARN",
|
||||
"RR early RO creation succeeded but no DMS RO number was returned",
|
||||
withRRRequestXml(result, {
|
||||
jobId: rid,
|
||||
resultPreview: {
|
||||
roNo: result?.roNo,
|
||||
data: {
|
||||
dmsRoNo: data?.dmsRoNo,
|
||||
outsdRoNo: data?.outsdRoNo
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
@@ -758,10 +813,15 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", `{EARLY-5} Minimal RO created successfully`, {
|
||||
dmsRoNo: dmsRoNo || null,
|
||||
outsdRoNo: outsdRoNo || null
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"INFO",
|
||||
`{EARLY-5} Minimal RO created successfully`,
|
||||
withRRRequestXml(result, {
|
||||
dmsRoNo: dmsRoNo || null,
|
||||
outsdRoNo: outsdRoNo || null
|
||||
})
|
||||
);
|
||||
|
||||
// Mark success in export logs
|
||||
await markRRExportSuccess({
|
||||
@@ -810,11 +870,16 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
message: vendorMessage
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "ERROR", `Early RO creation failed`, {
|
||||
roStatus: result?.roStatus,
|
||||
statusBlocks: result?.statusBlocks,
|
||||
classification: cls
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Early RO creation failed`,
|
||||
withRRRequestXml(result, {
|
||||
roStatus: result?.roStatus,
|
||||
statusBlocks: result?.statusBlocks,
|
||||
classification: cls
|
||||
})
|
||||
);
|
||||
|
||||
await insertRRFailedExportLog({
|
||||
socket,
|
||||
@@ -827,9 +892,11 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
});
|
||||
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || result?.error || "RR early RO creation failed",
|
||||
...withRRXmlSocketPayload(result, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || result?.error || "RR early RO creation failed"
|
||||
}),
|
||||
...cls
|
||||
});
|
||||
|
||||
@@ -843,14 +910,19 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
} catch (error) {
|
||||
const cls = classifyRRVendorError(error);
|
||||
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR early RO creation (customer-selected)`, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Error during RR early RO creation (customer-selected)`,
|
||||
withRRRequestXml(error, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
})
|
||||
);
|
||||
|
||||
try {
|
||||
if (!bodyshop || !job) {
|
||||
@@ -875,9 +947,11 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message,
|
||||
...withRRXmlSocketPayload(error, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message
|
||||
}),
|
||||
...cls
|
||||
});
|
||||
socket.emit("rr-user-notice", { jobId: rid, ...cls });
|
||||
@@ -940,14 +1014,14 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
|
||||
// Check if this job already has an early RO - if so, use stored IDs and skip customer search
|
||||
const hasEarlyRO = !!job?.dms_id;
|
||||
|
||||
|
||||
if (hasEarlyRO) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `{2} Early RO exists - using stored customer/advisor`, {
|
||||
dms_id: job.dms_id,
|
||||
dms_customer_id: job.dms_customer_id,
|
||||
dms_advisor_id: job.dms_advisor_id
|
||||
});
|
||||
|
||||
|
||||
// Cache the stored customer/advisor IDs for the next step
|
||||
if (job.dms_customer_id) {
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
@@ -967,18 +1041,18 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
defaultRRTTL
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Emit empty customer list to frontend (won't show modal)
|
||||
socket.emit("rr-select-customer", []);
|
||||
|
||||
|
||||
// Continue directly with the export by calling the selected customer handler logic inline
|
||||
// This is essentially the same as if user selected the stored customer
|
||||
const selectedCustNo = job.dms_customer_id;
|
||||
|
||||
|
||||
if (!selectedCustNo) {
|
||||
throw new Error("Early RO exists but no customer ID stored");
|
||||
}
|
||||
|
||||
|
||||
// Continue with ensureRRServiceVehicle and export (same as rr-selected-customer handler)
|
||||
const { client, opts } = await buildClientAndOpts(bodyshop);
|
||||
const routing = opts?.routing || client?.opts?.routing || null;
|
||||
@@ -1011,7 +1085,12 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
redisHelpers
|
||||
});
|
||||
|
||||
const advisorNo = job.dms_advisor_id || readAdvisorNo({ txEnvelope }, await redisHelpers.getSessionTransactionData(socket.id, getTransactionType(rid), RRCacheEnums.AdvisorNo));
|
||||
const advisorNo =
|
||||
job.dms_advisor_id ||
|
||||
readAdvisorNo(
|
||||
{ txEnvelope },
|
||||
await redisHelpers.getSessionTransactionData(socket.id, getTransactionType(rid), RRCacheEnums.AdvisorNo)
|
||||
);
|
||||
|
||||
if (!advisorNo) {
|
||||
throw new Error("Advisor is required (advisorNo).");
|
||||
@@ -1030,7 +1109,28 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
roNo: job.dms_id
|
||||
});
|
||||
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"SILLY",
|
||||
"{4.1} RR RO update response received",
|
||||
withRRRequestXml(result, {
|
||||
dmsRoNo: job.dms_id,
|
||||
success: !!result?.success
|
||||
})
|
||||
);
|
||||
|
||||
if (!result?.success) {
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
"RR Repair Order update failed",
|
||||
withRRRequestXml(result, {
|
||||
jobId: rid,
|
||||
dmsRoNo: job.dms_id,
|
||||
roStatus: result?.roStatus,
|
||||
statusBlocks: result?.statusBlocks
|
||||
})
|
||||
);
|
||||
throw new Error(result?.roStatus?.message || "Failed to update RR Repair Order");
|
||||
}
|
||||
|
||||
@@ -1059,15 +1159,20 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", `RR Repair Order updated successfully`, {
|
||||
dmsRoNo,
|
||||
jobId: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"INFO",
|
||||
`RR Repair Order updated successfully`,
|
||||
withRRRequestXml(result, {
|
||||
dmsRoNo,
|
||||
jobId: rid
|
||||
})
|
||||
);
|
||||
|
||||
// For early RO flow, only emit validation-required (not export-job:result)
|
||||
// since the export is not complete yet - we're just waiting for validation
|
||||
socket.emit("rr-validation-required", { dmsRoNo, jobId: rid });
|
||||
|
||||
|
||||
return ack?.({ ok: true, skipCustomerSelection: true, dmsRoNo });
|
||||
}
|
||||
|
||||
@@ -1082,14 +1187,26 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
anyOwner: decorated.some((c) => c.vinOwner || c.isVehicleOwner)
|
||||
});
|
||||
} catch (error) {
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR export (prepare)`, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Error during RR export (prepare)`,
|
||||
withRRRequestXml(error, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
})
|
||||
);
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", { vendor: "rr", jobId: rid, error: error.message });
|
||||
socket.emit(
|
||||
"export-failed",
|
||||
withRRXmlSocketPayload(error, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message
|
||||
})
|
||||
);
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
@@ -1148,7 +1265,12 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
if (vehQ && vehQ.kind === "vin" && job?.v_vin) {
|
||||
const vinResponse = await rrCombinedSearch(bodyshop, vehQ);
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", `VIN owner pre-check response`, { response: vinResponse });
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"SILLY",
|
||||
`VIN owner pre-check response`,
|
||||
withRRRequestXml(vinResponse, { response: vinResponse })
|
||||
);
|
||||
|
||||
const vinBlocks = Array.isArray(vinResponse?.data) ? vinResponse.data : [];
|
||||
|
||||
@@ -1181,9 +1303,14 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "WARN", `VIN owner pre-check failed; continuing with selected customer`, {
|
||||
error: e?.message
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"WARN",
|
||||
`VIN owner pre-check failed; continuing with selected customer`,
|
||||
withRRRequestXml(e, {
|
||||
error: e?.message
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Cache final/effective customer selection
|
||||
@@ -1277,25 +1404,25 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
// When updating an early RO, use stored customer/advisor IDs
|
||||
let finalEffectiveCustNo = effectiveCustNo;
|
||||
let finalAdvisorNo = advisorNo;
|
||||
|
||||
|
||||
if (shouldUpdate && job?.dms_customer_id) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `Using stored customer ID from early RO`, {
|
||||
CreateRRLogEvent(socket, "DEBUG", `Using stored customer ID from early RO`, {
|
||||
storedCustomerId: job.dms_customer_id,
|
||||
originalCustomerId: effectiveCustNo
|
||||
originalCustomerId: effectiveCustNo
|
||||
});
|
||||
finalEffectiveCustNo = String(job.dms_customer_id);
|
||||
}
|
||||
|
||||
|
||||
if (shouldUpdate && job?.dms_advisor_id) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `Using stored advisor ID from early RO`, {
|
||||
CreateRRLogEvent(socket, "DEBUG", `Using stored advisor ID from early RO`, {
|
||||
storedAdvisorId: job.dms_advisor_id,
|
||||
originalAdvisorId: advisorNo
|
||||
originalAdvisorId: advisorNo
|
||||
});
|
||||
finalAdvisorNo = String(job.dms_advisor_id);
|
||||
}
|
||||
|
||||
let result;
|
||||
|
||||
|
||||
if (shouldUpdate) {
|
||||
// UPDATE existing RO with full data
|
||||
CreateRRLogEvent(socket, "DEBUG", `{4} Updating existing RR RO with full data`, { dmsRoNo: existingDmsId });
|
||||
@@ -1344,16 +1471,21 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
if (dmsRoNo) {
|
||||
await setJobDmsIdForSocket({ socket, jobId: rid, dmsId: dmsRoNo });
|
||||
} else {
|
||||
CreateRRLogEvent(socket, "WARN", "RR export succeeded but no DMS RO number was returned", {
|
||||
jobId: rid,
|
||||
resultPreview: {
|
||||
roNo: result?.roNo,
|
||||
data: {
|
||||
dmsRoNo: data?.dmsRoNo,
|
||||
outsdRoNo: data?.outsdRoNo
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"WARN",
|
||||
"RR export succeeded but no DMS RO number was returned",
|
||||
withRRRequestXml(result, {
|
||||
jobId: rid,
|
||||
resultPreview: {
|
||||
roNo: result?.roNo,
|
||||
data: {
|
||||
dmsRoNo: data?.dmsRoNo,
|
||||
outsdRoNo: data?.outsdRoNo
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
@@ -1370,10 +1502,15 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", `{5} RO created. Waiting for validation.`, {
|
||||
dmsRoNo: dmsRoNo || null,
|
||||
outsdRoNo: outsdRoNo || null
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"INFO",
|
||||
`{5} RO created. Waiting for validation.`,
|
||||
withRRRequestXml(result, {
|
||||
dmsRoNo: dmsRoNo || null,
|
||||
outsdRoNo: outsdRoNo || null
|
||||
})
|
||||
);
|
||||
|
||||
// Tell FE to prompt for "Finished/Close"
|
||||
socket.emit("rr-validation-required", { jobId: rid, dmsRoNo, outsdRoNo });
|
||||
@@ -1412,11 +1549,16 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
message: vendorMessage
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "ERROR", `Export failed (step 1)`, {
|
||||
roStatus: result?.roStatus,
|
||||
statusBlocks: result?.statusBlocks,
|
||||
classification: cls
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Export failed (step 1)`,
|
||||
withRRRequestXml(result, {
|
||||
roStatus: result?.roStatus,
|
||||
statusBlocks: result?.statusBlocks,
|
||||
classification: cls
|
||||
})
|
||||
);
|
||||
|
||||
await insertRRFailedExportLog({
|
||||
socket,
|
||||
@@ -1429,9 +1571,11 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
});
|
||||
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || result?.error || "RR export failed",
|
||||
...withRRXmlSocketPayload(result, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || result?.error || "RR export failed"
|
||||
}),
|
||||
...cls
|
||||
});
|
||||
|
||||
@@ -1445,14 +1589,19 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
} catch (error) {
|
||||
const cls = classifyRRVendorError(error);
|
||||
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR export (selected-customer)`, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Error during RR export (selected-customer)`,
|
||||
withRRRequestXml(error, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
})
|
||||
);
|
||||
|
||||
try {
|
||||
if (!bodyshop || !job) {
|
||||
@@ -1477,9 +1626,11 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message,
|
||||
...withRRXmlSocketPayload(error, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message
|
||||
}),
|
||||
...cls
|
||||
});
|
||||
socket.emit("rr-user-notice", { jobId: rid, ...cls });
|
||||
@@ -1541,7 +1692,12 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
});
|
||||
|
||||
if (finalizeResult?.success) {
|
||||
CreateRRLogEvent(socket, "INFO", `{7} Finalize success; marking exported`, { dmsRoNo, outsdRoNo });
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"INFO",
|
||||
`{7} Finalize success; marking exported`,
|
||||
withRRRequestXml(finalizeResult, { dmsRoNo, outsdRoNo })
|
||||
);
|
||||
|
||||
// ✅ Mark exported + success log
|
||||
await markRRExportSuccess({
|
||||
@@ -1584,6 +1740,17 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
message: vendorMessage
|
||||
});
|
||||
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
"Finalize failed",
|
||||
withRRRequestXml(finalizeResult, {
|
||||
roStatus: finalizeResult?.roStatus,
|
||||
statusBlocks: finalizeResult?.statusBlocks,
|
||||
classification: cls
|
||||
})
|
||||
);
|
||||
|
||||
await insertRRFailedExportLog({
|
||||
socket,
|
||||
jobId: rid,
|
||||
@@ -1595,23 +1762,30 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
});
|
||||
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || finalizeResult?.error || "RR finalize failed",
|
||||
...withRRXmlSocketPayload(finalizeResult, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || finalizeResult?.error || "RR finalize failed"
|
||||
}),
|
||||
...cls
|
||||
});
|
||||
ack?.({ ok: false, error: cls.friendlyMessage || "RR finalize failed", classification: cls });
|
||||
}
|
||||
} catch (error) {
|
||||
const cls = classifyRRVendorError(error);
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR finalize`, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"ERROR",
|
||||
`Error during RR finalize`,
|
||||
withRRRequestXml(error, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
})
|
||||
);
|
||||
|
||||
try {
|
||||
if (!bodyshop || !job) {
|
||||
@@ -1635,7 +1809,17 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
});
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", { vendor: "rr", jobId: rid, error: error.message, ...cls });
|
||||
socket.emit(
|
||||
"export-failed",
|
||||
{
|
||||
...withRRXmlSocketPayload(error, {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message
|
||||
}),
|
||||
...cls
|
||||
}
|
||||
);
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { buildClientAndOpts, rrCombinedSearch } = require("./rr-lookup");
|
||||
const CreateRRLogEvent = require("./rr-logger-event");
|
||||
const { withRRRequestXml } = require("./rr-log-xml");
|
||||
/**
|
||||
* Pick and normalize VIN from inputs
|
||||
* @param vin
|
||||
@@ -168,9 +169,12 @@ const ensureRRServiceVehicle = async (args = {}) => {
|
||||
if (bodyshop) {
|
||||
const combinedSearchResponse = await rrCombinedSearch(bodyshop, { kind: "vin", vin: vinStr, maxResults: 50 });
|
||||
|
||||
CreateRRLogEvent(socket, "silly", "{SV} Preflight combined search by VIN: raw response", {
|
||||
response: combinedSearchResponse
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"silly",
|
||||
"{SV} Preflight combined search by VIN: raw response",
|
||||
withRRRequestXml(combinedSearchResponse, { response: combinedSearchResponse })
|
||||
);
|
||||
|
||||
owners = ownersFromCombined(combinedSearchResponse, vinStr);
|
||||
}
|
||||
@@ -194,10 +198,15 @@ const ensureRRServiceVehicle = async (args = {}) => {
|
||||
}
|
||||
} catch (e) {
|
||||
// Preflight shouldn't be fatal; log and continue to insert (idempotency will still be handled)
|
||||
CreateRRLogEvent(socket, "warn", "{SV} VIN preflight lookup failed; continuing to insert", {
|
||||
vin: vinStr,
|
||||
error: e?.message
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"warn",
|
||||
"{SV} VIN preflight lookup failed; continuing to insert",
|
||||
withRRRequestXml(e, {
|
||||
vin: vinStr,
|
||||
error: e?.message
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Vendor says: MODEL DESCRIPTION HAS MAXIMUM LENGTH OF 20
|
||||
@@ -271,7 +280,7 @@ const ensureRRServiceVehicle = async (args = {}) => {
|
||||
try {
|
||||
const res = await client.insertServiceVehicle(insertPayload, insertOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "silly", "{SV} insertServiceVehicle: raw response", { res });
|
||||
CreateRRLogEvent(socket, "silly", "{SV} insertServiceVehicle: raw response", withRRRequestXml(res, { res }));
|
||||
|
||||
const data = res?.data ?? {};
|
||||
const svId = data?.dmsRecKey || data?.svId || undefined;
|
||||
@@ -309,11 +318,16 @@ const ensureRRServiceVehicle = async (args = {}) => {
|
||||
};
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "error", "{SV} insertServiceVehicle: failure", {
|
||||
message: e?.message,
|
||||
code: e?.code,
|
||||
status: e?.meta?.status || e?.status
|
||||
});
|
||||
CreateRRLogEvent(
|
||||
socket,
|
||||
"error",
|
||||
"{SV} insertServiceVehicle: failure",
|
||||
withRRRequestXml(e, {
|
||||
message: e?.message,
|
||||
code: e?.code,
|
||||
status: e?.meta?.status || e?.status
|
||||
})
|
||||
);
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -68,12 +68,33 @@ const fetchBodyshopFromDB = async (bodyshopId, logger) => {
|
||||
* @param logger
|
||||
*/
|
||||
const applyRedisHelpers = ({ pubClient, app, logger }) => {
|
||||
const toRedisJson = (value) => JSON.stringify(value === undefined ? null : value);
|
||||
|
||||
// Store session data in Redis
|
||||
const setSessionData = async (socketId, key, value, ttl) => {
|
||||
try {
|
||||
await pubClient.hset(`socket:${socketId}`, key, JSON.stringify(value)); // Use Redis pubClient
|
||||
const sessionKey = `socket:${socketId}`;
|
||||
|
||||
// Supports both forms:
|
||||
// 1) setSessionData(socketId, "field", value, ttl)
|
||||
// 2) setSessionData(socketId, { fieldA: valueA, fieldB: valueB }, ttl)
|
||||
if (key && typeof key === "object" && !Array.isArray(key)) {
|
||||
const entries = Object.entries(key).flatMap(([field, fieldValue]) => [field, toRedisJson(fieldValue)]);
|
||||
|
||||
if (entries.length > 0) {
|
||||
await pubClient.hset(sessionKey, ...entries);
|
||||
}
|
||||
|
||||
const objectTtl = typeof value === "number" ? value : typeof ttl === "number" ? ttl : null;
|
||||
if (objectTtl) {
|
||||
await pubClient.expire(sessionKey, objectTtl);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
await pubClient.hset(sessionKey, key, toRedisJson(value)); // Use Redis pubClient
|
||||
if (ttl && typeof ttl === "number") {
|
||||
await pubClient.expire(`socket:${socketId}`, ttl);
|
||||
await pubClient.expire(sessionKey, ttl);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log(`Error Setting Session Data for socket ${socketId}: ${error}`, "ERROR", "redis");
|
||||
@@ -88,7 +109,26 @@ const applyRedisHelpers = ({ pubClient, app, logger }) => {
|
||||
*/
|
||||
const getSessionData = async (socketId, key) => {
|
||||
try {
|
||||
const data = await pubClient.hget(`socket:${socketId}`, key);
|
||||
const sessionKey = `socket:${socketId}`;
|
||||
|
||||
// Supports:
|
||||
// 1) getSessionData(socketId, "field") -> parsed field value
|
||||
// 2) getSessionData(socketId) -> parsed object of all fields
|
||||
if (typeof key === "undefined") {
|
||||
const raw = await pubClient.hgetall(sessionKey);
|
||||
if (!raw || Object.keys(raw).length === 0) return null;
|
||||
|
||||
return Object.entries(raw).reduce((acc, [field, rawValue]) => {
|
||||
try {
|
||||
acc[field] = JSON.parse(rawValue);
|
||||
} catch {
|
||||
acc[field] = rawValue;
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
const data = await pubClient.hget(sessionKey, key);
|
||||
return data ? JSON.parse(data) : null;
|
||||
} catch (error) {
|
||||
logger.log(`Error Getting Session Data for socket ${socketId}: ${error}`, "ERROR", "redis");
|
||||
@@ -106,7 +146,7 @@ const applyRedisHelpers = ({ pubClient, app, logger }) => {
|
||||
*/
|
||||
const setSessionTransactionData = async (socketId, transactionType, key, value, ttl) => {
|
||||
try {
|
||||
await pubClient.hset(getSocketTransactionkey({ socketId, transactionType }), key, JSON.stringify(value)); // Use Redis pubClient
|
||||
await pubClient.hset(getSocketTransactionkey({ socketId, transactionType }), key, toRedisJson(value)); // Use Redis pubClient
|
||||
if (ttl && typeof ttl === "number") {
|
||||
await pubClient.expire(getSocketTransactionkey({ socketId, transactionType }), ttl);
|
||||
}
|
||||
@@ -160,7 +200,17 @@ const applyRedisHelpers = ({ pubClient, app, logger }) => {
|
||||
*/
|
||||
const clearSessionTransactionData = async (socketId, transactionType) => {
|
||||
try {
|
||||
await pubClient.del(getSocketTransactionkey({ socketId, transactionType }));
|
||||
if (transactionType) {
|
||||
await pubClient.del(getSocketTransactionkey({ socketId, transactionType }));
|
||||
return;
|
||||
}
|
||||
|
||||
// If no transactionType is provided, clear all transaction namespaces for this socket.
|
||||
const pattern = getSocketTransactionkey({ socketId, transactionType: "*" });
|
||||
const keys = await pubClient.keys(pattern);
|
||||
if (Array.isArray(keys) && keys.length > 0) {
|
||||
await pubClient.del(...keys);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log(
|
||||
`Error Clearing Session Transaction Data for socket ${socketId}:${transactionType}: ${error}`,
|
||||
|
||||
@@ -1,114 +1,113 @@
|
||||
const { isArray } = require("lodash");
|
||||
const logger = require("../utils/logger");
|
||||
|
||||
|
||||
function createLogEvent(socket, level, message) {
|
||||
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
|
||||
// console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`);
|
||||
socket.emit("log-event", {
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
|
||||
// console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`);
|
||||
socket.emit("log-event", {
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
|
||||
logger.log("ws-log-event", level, socket.user.email, socket.recordid, {
|
||||
wsmessage: message
|
||||
});
|
||||
|
||||
if (socket.logEvents && isArray(socket.logEvents)) {
|
||||
socket.logEvents.push({
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
}
|
||||
// if (level === "ERROR") {
|
||||
// throw new Error(message);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
function createJsonEvent(socket, level, message, json) {
|
||||
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
|
||||
//console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`);
|
||||
socket.emit("log-event", {
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
}
|
||||
logger.log(
|
||||
"ws-log-event-json",
|
||||
level,
|
||||
socket.user.email,
|
||||
socket.recordid,
|
||||
{
|
||||
wsmessage: message,
|
||||
json
|
||||
},
|
||||
true
|
||||
);
|
||||
logger.log("ws-log-event", level, socket.user.email, socket.recordid, {
|
||||
wsmessage: message
|
||||
});
|
||||
|
||||
if (socket.logEvents && isArray(socket.logEvents)) {
|
||||
socket.logEvents.push({
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
socket.logEvents.push({
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
}
|
||||
// if (level === "ERROR") {
|
||||
// throw new Error(message);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
function createJsonEvent(socket, level, message, json) {
|
||||
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
|
||||
//console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`);
|
||||
socket.emit("log-event", {
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
}
|
||||
logger.log(
|
||||
"ws-log-event-json",
|
||||
level,
|
||||
socket.user.email,
|
||||
socket.recordid,
|
||||
{
|
||||
wsmessage: message,
|
||||
json
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
if (socket.logEvents && isArray(socket.logEvents)) {
|
||||
socket.logEvents.push({
|
||||
timestamp: new Date(),
|
||||
level,
|
||||
message
|
||||
});
|
||||
}
|
||||
// if (level === "ERROR") {
|
||||
// throw new Error(message);
|
||||
// }
|
||||
}
|
||||
|
||||
function createXmlEvent(socket, xml, message, isError = false) {
|
||||
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy("SILLY")) {
|
||||
socket.emit("log-event", {
|
||||
timestamp: new Date(),
|
||||
level: isError ? "ERROR" : "SILLY",
|
||||
message: `${message}: ${xml}`
|
||||
});
|
||||
}
|
||||
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy("SILLY")) {
|
||||
socket.emit("log-event", {
|
||||
timestamp: new Date(),
|
||||
level: isError ? "ERROR" : "SILLY",
|
||||
message: `${message}: ${xml}`
|
||||
});
|
||||
}
|
||||
|
||||
logger.log(
|
||||
isError ? "ws-log-event-xml-error" : "ws-log-event-xml",
|
||||
isError ? "ERROR" : "SILLY",
|
||||
socket.user.email,
|
||||
socket.recordid,
|
||||
{
|
||||
wsmessage: message,
|
||||
xml
|
||||
},
|
||||
true
|
||||
);
|
||||
logger.log(
|
||||
isError ? "ws-log-event-xml-error" : "ws-log-event-xml",
|
||||
isError ? "ERROR" : "SILLY",
|
||||
socket.user.email,
|
||||
socket.recordid,
|
||||
{
|
||||
wsmessage: message,
|
||||
xml
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
if (socket.logEvents && isArray(socket.logEvents)) {
|
||||
socket.logEvents.push({
|
||||
timestamp: new Date(),
|
||||
level: isError ? "ERROR" : "SILLY",
|
||||
message,
|
||||
xml
|
||||
});
|
||||
}
|
||||
if (socket.logEvents && isArray(socket.logEvents)) {
|
||||
socket.logEvents.push({
|
||||
timestamp: new Date(),
|
||||
level: isError ? "ERROR" : "SILLY",
|
||||
message,
|
||||
xml
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function LogLevelHierarchy(level) {
|
||||
switch (level) {
|
||||
case "XML":
|
||||
return 5;
|
||||
case "SILLY":
|
||||
return 5;
|
||||
case "DEBUG":
|
||||
return 4;
|
||||
case "INFO":
|
||||
return 3;
|
||||
case "WARN":
|
||||
return 2;
|
||||
case "ERROR":
|
||||
return 1;
|
||||
default:
|
||||
return 3;
|
||||
}
|
||||
switch (level) {
|
||||
case "XML":
|
||||
return 5;
|
||||
case "SILLY":
|
||||
return 5;
|
||||
case "DEBUG":
|
||||
return 4;
|
||||
case "INFO":
|
||||
return 3;
|
||||
case "WARN":
|
||||
return 2;
|
||||
case "ERROR":
|
||||
return 1;
|
||||
default:
|
||||
return 3;
|
||||
}
|
||||
}
|
||||
|
||||
exports.createLogEvent = createLogEvent;
|
||||
|
||||
@@ -4,11 +4,14 @@ const { FortellisJobExport, FortellisSelectedCustomer } = require("../fortellis/
|
||||
const CdkCalculateAllocations = require("../cdk/cdk-calculate-allocations").default;
|
||||
const registerRREvents = require("../rr/rr-register-socket-events");
|
||||
|
||||
const SOCKET_SESSION_TTL_SECONDS = 60 * 60 * 24;
|
||||
|
||||
const redisSocketEvents = ({ io, redisHelpers, ioHelpers, logger }) => {
|
||||
// Destructure helpers locally, but keep full objects available for downstream modules
|
||||
const {
|
||||
setSessionData,
|
||||
getSessionData,
|
||||
clearSessionData,
|
||||
addUserSocketMapping,
|
||||
removeUserSocketMapping,
|
||||
refreshUserSocketTTL,
|
||||
@@ -51,12 +54,16 @@ const redisSocketEvents = ({ io, redisHelpers, ioHelpers, logger }) => {
|
||||
}
|
||||
|
||||
// NEW: seed a base session for this socket so downstream handlers can read it
|
||||
await setSessionData(socket.id, {
|
||||
bodyshopId,
|
||||
email: user.email,
|
||||
uid: user.user_id || user.uid,
|
||||
seededAt: Date.now()
|
||||
});
|
||||
await setSessionData(
|
||||
socket.id,
|
||||
{
|
||||
bodyshopId,
|
||||
email: user.email,
|
||||
uid: user.user_id || user.uid,
|
||||
seededAt: Date.now()
|
||||
},
|
||||
SOCKET_SESSION_TTL_SECONDS
|
||||
);
|
||||
|
||||
await addUserSocketMapping(user.email, socket.id, bodyshopId);
|
||||
next();
|
||||
@@ -126,14 +133,18 @@ const redisSocketEvents = ({ io, redisHelpers, ioHelpers, logger }) => {
|
||||
}
|
||||
|
||||
// NEW: refresh (or create) the base session with the latest info
|
||||
await setSessionData(socket.id, {
|
||||
bodyshopId,
|
||||
email: user.email,
|
||||
uid: user.user_id || user.uid,
|
||||
refreshedAt: Date.now()
|
||||
});
|
||||
await setSessionData(
|
||||
socket.id,
|
||||
{
|
||||
bodyshopId,
|
||||
email: user.email,
|
||||
uid: user.user_id || user.uid,
|
||||
refreshedAt: Date.now()
|
||||
},
|
||||
SOCKET_SESSION_TTL_SECONDS
|
||||
);
|
||||
|
||||
await refreshUserSocketTTL(user.email, bodyshopId);
|
||||
await refreshUserSocketTTL(user.email);
|
||||
socket.emit("token-updated", { success: true });
|
||||
} catch (error) {
|
||||
if (error.code === "auth/id-token-expired") {
|
||||
@@ -189,6 +200,11 @@ const redisSocketEvents = ({ io, redisHelpers, ioHelpers, logger }) => {
|
||||
if (socket.user?.email) {
|
||||
await removeUserSocketMapping(socket.user.email, socket.id);
|
||||
}
|
||||
try {
|
||||
await clearSessionData(socket.id);
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
// Optional: clear transactional session
|
||||
try {
|
||||
await clearSessionTransactionData(socket.id);
|
||||
|
||||
Reference in New Issue
Block a user