Merge remote-tracking branch 'origin/master-AIO' into feature/IO-2433-esignature

This commit is contained in:
Patrick Fic
2026-03-23 12:57:38 -07:00
44 changed files with 1555 additions and 317 deletions

View File

@@ -130,12 +130,13 @@ exports.default = async (req, res) => {
async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
try {
const url = urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(bill.vendor.name)}'`
);
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(bill.vendor.name)}'`
),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -150,6 +151,11 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
bodyshopid: bill.job.shopid,
email: req.user.email
});
logger.log("qbo-payables-query", "DEBUG", req.user.email, null, {
method: "QueryVendorRecord",
call: url,
result: result.json
});
return result.json?.QueryResponse?.Vendor?.[0];
} catch (error) {
@@ -167,8 +173,9 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
DisplayName: StandardizeName(bill.vendor.name)
};
try {
const url = urlBuilder(qbo_realmId, "vendor");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "vendor"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -184,6 +191,12 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
bodyshopid: bill.job.shopid,
email: req.user.email
});
logger.log("qbo-payments-insert", "DEBUG", req.user.email, null, {
method: "InsertVendorRecord",
call: url,
Vendor: Vendor,
result: result.json
});
if (result.status >= 400) {
throw new Error(JSON.stringify(result.json.Fault));
@@ -274,11 +287,12 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
VendorRef: {
value: vendor.Id
},
...(vendor.TermRef && !bill.is_credit_memo && {
SalesTermRef: {
value: vendor.TermRef.value
}
}),
...(vendor.TermRef &&
!bill.is_credit_memo && {
SalesTermRef: {
value: vendor.TermRef.value
}
}),
TxnDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.format("YYYY-MM-DD"),
@@ -318,8 +332,9 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
[logKey]: logValue
});
try {
const url = urlBuilder(qbo_realmId, bill.is_credit_memo ? "vendorcredit" : "bill");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, bill.is_credit_memo ? "vendorcredit" : "bill"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -335,6 +350,12 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
bodyshopid: bill.job.shopid,
email: req.user.email
});
logger.log("qbo-payables-insert", "DEBUG", req.user.email, null, {
method: "InsertBill",
call: url,
postingObj: bill.is_credit_memo ? VendorCredit : billQbo,
result: result.json
});
if (result.status >= 400) {
throw new Error(JSON.stringify(result.json.Fault));

View File

@@ -82,14 +82,7 @@ exports.default = async (req, res) => {
if (isThreeTier || (!isThreeTier && twoTierPref === "name")) {
//Insert the name/owner and account for whether the source should be the ins co in 3 tier..
ownerCustomerTier = await QueryOwner(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier,
insCoCustomerTier
);
ownerCustomerTier = await QueryOwner(oauthClient, qbo_realmId, req, payment.job, insCoCustomerTier);
//Query for the owner itself.
if (!ownerCustomerTier) {
ownerCustomerTier = await InsertOwner(
@@ -229,8 +222,9 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef)
paymentQbo
});
try {
const url = urlBuilder(qbo_realmId, "payment");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "payment"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -246,6 +240,12 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef)
bodyshopid: payment.job.shopid,
email: req.user.email
});
logger.log("qbo-payments-insert", "DEBUG", req.user.email, null, {
method: "InsertPayment",
call: url,
paymentQbo: paymentQbo,
result: result.json
});
if (result.status >= 400) {
throw new Error(JSON.stringify(result.json.Fault));
@@ -428,8 +428,9 @@ async function InsertCreditMemo(oauthClient, qbo_realmId, req, payment, parentRe
paymentQbo
});
try {
const url = urlBuilder(qbo_realmId, "creditmemo");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "creditmemo"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -445,6 +446,12 @@ async function InsertCreditMemo(oauthClient, qbo_realmId, req, payment, parentRe
bodyshopid: req.user.bodyshopid,
email: req.user.email
});
logger.log("qbo-metadata-query", "DEBUG", req.user.email, null, {
method: "InsertCreditMemo",
call: url,
paymentQbo: paymentQbo,
result: result.json
});
if (result.status >= 400) {
throw new Error(JSON.stringify(result.json.Fault));

View File

@@ -213,12 +213,13 @@ exports.default = async (req, res) => {
async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
try {
const url = urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}' and Active = true`
);
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}' and Active = true`
),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -233,6 +234,11 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
jobid: job.id,
email: req.user.email
});
logger.log("qbo-receivables-query", "DEBUG", req.user.email, job.id, {
method: "QueryInsuranceCo",
call: url,
result: result.json
});
return result.json?.QueryResponse?.Customer?.[0];
} catch (error) {
@@ -266,8 +272,9 @@ async function InsertInsuranceCo(oauthClient, qbo_realmId, req, job, bodyshop) {
}
};
try {
const url = urlBuilder(qbo_realmId, "customer");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "customer"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -283,6 +290,12 @@ async function InsertInsuranceCo(oauthClient, qbo_realmId, req, job, bodyshop) {
jobid: job.id,
email: req.user.email
});
logger.log("qbo-receivables-insert", "DEBUG", req.user.email, job.id, {
method: "InsertInsuranceCo",
call: url,
customerObj: Customer,
result: result.json
});
return result.json?.Customer;
} catch (error) {
@@ -298,12 +311,13 @@ exports.InsertInsuranceCo = InsertInsuranceCo;
async function QueryOwner(oauthClient, qbo_realmId, req, job, parentTierRef) {
const ownerName = generateOwnerTier(job, true, null);
const url = urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(ownerName)}' and Active = true`
);
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(ownerName)}' and Active = true`
),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -318,6 +332,11 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, parentTierRef) {
jobid: job.id,
email: req.user.email
});
logger.log("qbo-receivables-query", "DEBUG", req.user.email, job.id, {
method: "QueryOwner",
call: url,
result: result.json
});
return result.json?.QueryResponse?.Customer?.find((x) => x.ParentRef?.value === parentTierRef?.Id);
}
@@ -347,8 +366,9 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
: {})
};
try {
const url = urlBuilder(qbo_realmId, "customer");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "customer"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -364,6 +384,12 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
jobid: job.id,
email: req.user.email
});
logger.log("qbo-receivables-insert", "DEBUG", req.user.email, job.id, {
method: "InsertOwner",
call: url,
customerObj: Customer,
result: result.json
});
return result.json?.Customer;
} catch (error) {
@@ -378,12 +404,13 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
exports.InsertOwner = InsertOwner;
async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
const url = urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${job.ro_number}' and Active = true`
);
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${job.ro_number}' and Active = true`
),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -398,6 +425,11 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
jobid: job.id,
email: req.user.email
});
logger.log("qbo-receivables-query", "DEBUG", req.user.email, job.id, {
method: "QueryJob",
call: url,
result: result.json
});
const customers = result.json?.QueryResponse?.Customer;
return customers && (parentTierRef ? customers.find((x) => x.ParentRef.value === parentTierRef.Id) : customers[0]);
@@ -423,8 +455,9 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
}
};
try {
const url = urlBuilder(qbo_realmId, "customer");
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "customer"),
url: url,
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -440,6 +473,12 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
jobid: job.id,
email: req.user.email
});
logger.log("qbo-receivables-insert", "DEBUG", req.user.email, job.id, {
method: "InsertJob",
call: url,
customerObj: Customer,
result: result.json
});
if (result.status >= 400) {
throw new Error(JSON.stringify(result.json.Fault));

View File

@@ -0,0 +1,73 @@
const { isString } = require("lodash");
const { sendServerEmail } = require("../email/sendemail");
const logger = require("../utils/logger");
const { raw } = require("express");
const SUPPORT_EMAIL = "patrick@imexsystems.ca";
const safeJsonParse = (maybeJson) => {
if (!isString(maybeJson)) return null;
try {
return JSON.parse(maybeJson);
} catch {
return null;
}
};
const handleBillAiFeedback = async (req, res) => {
try {
const rating = req.body?.rating;
const comments = isString(req.body?.comments) ? req.body?.comments?.trim() : "";
const billFormValues = safeJsonParse(req.body?.billFormValues);
const rawAIData = safeJsonParse(req.body?.rawAIData);
const jobid = billFormValues?.jobid || billFormValues?.jobId || "unknown";
const shopname = req.body?.shopname || "unknown";
const subject = `Bill AI Feedback (${rating === "up" ? "+" : "-"}) Shop=${shopname} jobid=${jobid}`;
const text = [
`User: ${req?.user?.email || "unknown"}`,
`Rating: ${rating}`,
comments ? `Comments: ${comments}` : "Comments: (none)",
"",
"Form Values (User):",
JSON.stringify(billFormValues, null, 4),
"",
"Raw AI Data:",
JSON.stringify(rawAIData, null, 4)
]
.filter(Boolean)
.join("\n");
const attachments = [];
if (req.file?.buffer) {
attachments.push({
filename: req.file.originalname || `bill-${jobid}.pdf`,
content: req.file.buffer,
contentType: req.file.mimetype || "application/pdf"
});
}
await sendServerEmail({
to: [SUPPORT_EMAIL],
subject,
type: "text",
text,
attachments
});
return res.json({ success: true });
} catch (error) {
logger.log("bill-ai-feedback-error", "ERROR", req?.user?.email, null, {
message: error?.message,
stack: error?.stack
});
return res.status(500).json({ message: "Failed to submit feedback" });
}
};
module.exports = {
handleBillAiFeedback
};

View File

@@ -212,7 +212,8 @@ async function processSinglePageDocument(pdfBuffer) {
return {
...processedData,
originalTextractResponse: result
//Removed as this is a large object that provides minimal value to send to client.
// originalTextractResponse: result
};
}
@@ -392,7 +393,8 @@ async function handleTextractNotification(message) {
status: 'COMPLETED',
data: {
...processedData,
originalTextractResponse: originalResponse
//Removed as this is a large object that provides minimal value to send to client.
// originalTextractResponse: originalResponse
},
completedAt: new Date().toISOString()
}

View File

@@ -66,7 +66,12 @@ exports.default = async function ReloadCdkMakes(req, res) {
} catch (error) {
logger.log("cdk-replace-makes-models-error", "ERROR", req.user.email, null, {
cdk_dealerid,
error
error: {
message: error?.message,
stack: error?.stack,
name: error?.name,
code: error?.code
}
});
res.status(500).json(error);
}
@@ -105,7 +110,12 @@ async function GetCdkMakes(req, cdk_dealerid) {
} catch (error) {
logger.log("cdk-replace-makes-models-error", "ERROR", req.user.email, null, {
cdk_dealerid,
error
error: {
message: error?.message,
stack: error?.stack,
name: error?.name,
code: error?.code
}
});
throw new Error(error);
@@ -141,7 +151,12 @@ async function GetFortellisMakes(req, cdk_dealerid) {
} catch (error) {
logger.log("fortellis-replace-makes-models-error", "ERROR", req.user.email, null, {
cdk_dealerid,
error
error: {
message: error?.message,
stack: error?.stack,
name: error?.name,
code: error?.code
}
});
throw new Error(error);

View File

@@ -44,7 +44,7 @@ const logEmail = async (req, email) => {
}
};
const sendServerEmail = async ({ subject, text, to = [] }) => {
const sendServerEmail = async ({ subject, text, to = [], attachments }) => {
if (process.env.NODE_ENV === undefined) return;
try {
@@ -57,6 +57,7 @@ const sendServerEmail = async ({ subject, text, to = [] }) => {
to: ["support@imexsystems.ca", ...to],
subject: subject,
text: text,
attachments: attachments,
ses: {
// optional extra arguments for SendRawEmail
Tags: [

View File

@@ -959,7 +959,7 @@ async function UpdateDmsVehicle({ socket, redisHelpers, JobData, DMSVeh, DMSCust
delete DMSVehToSend.inventoryAccount;
!DMSVehToSend.vehicle.engineNumber && delete DMSVehToSend.vehicle.engineNumber;
!DMSVehToSend.vehicle.saleClassValue && DMSVehToSend.vehicle.saleClassValue === "MISC";
!DMSVehToSend.vehicle.saleClassValue && (DMSVehToSend.vehicle.saleClassValue = "MISC");
!DMSVehToSend.vehicle.exteriorColor && delete DMSVehToSend.vehicle.exteriorColor;
const result = await MakeFortellisCall({

View File

@@ -315,7 +315,12 @@ function CalculateRatesTotals(ratesList) {
if (item.mod_lbr_ty) {
//Check to see if it has 0 hours and a price instead.
//Extend for when there are hours and a price.
if (item.lbr_op === "OP14" && item.act_price > 0 && (!item.part_type || item.mod_lb_hrs === 0) && !IsAdditionalCost(item)) {
if (
item.lbr_op === "OP14" &&
item.act_price > 0 &&
(!item.part_type || item.mod_lb_hrs === 0) &&
!IsAdditionalCost(item)
) {
//Scenario where SGI may pay out hours using a part price.
if (!ret[item.mod_lbr_ty.toLowerCase()].total) {
ret[item.mod_lbr_ty.toLowerCase()].total = Dinero();
@@ -339,38 +344,30 @@ function CalculateRatesTotals(ratesList) {
let subtotal = Dinero({ amount: 0 });
let rates_subtotal = Dinero({ amount: 0 });
for (const property in ret) {
for (const [property, values] of Object.entries(ret)) {
//Skip calculating mapa and mash if we got the amounts.
if (!((property === "mapa" && hasMapaLine) || (property === "mash" && hasMashLine))) {
if (!ret[property].total) {
ret[property].total = Dinero();
}
let threshold;
//Check if there is a max for this type.
if (ratesList.materials && ratesList.materials[property]) {
//
if (ratesList.materials[property].cal_maxdlr && ratesList.materials[property].cal_maxdlr > 0) {
//It has an upper threshhold.
threshold = Dinero({
amount: Math.round(ratesList.materials[property].cal_maxdlr * 100)
});
}
}
const shouldSkipCalculation = (property === "mapa" && hasMapaLine) || (property === "mash" && hasMashLine);
if (!shouldSkipCalculation) {
values.total ??= Dinero();
//Check if there is a max for this type and apply it.
const maxDollar =
ratesList.materials?.[property]?.cal_maxdlr || ratesList.materials?.[property.toUpperCase()]?.cal_maxdlr;
const threshold = maxDollar > 0 ? Dinero({ amount: Math.round(maxDollar * 100) }) : null;
const total = Dinero({
amount: Math.round((ret[property].rate || 0) * 100)
}).multiply(ret[property].hours);
amount: Math.round((values.rate || 0) * 100)
}).multiply(values.hours);
if (threshold && total.greaterThanOrEqual(threshold)) {
ret[property].total = ret[property].total.add(threshold);
} else {
ret[property].total = ret[property].total.add(total);
}
values.total = values.total.add(threshold && total.greaterThanOrEqual(threshold) ? threshold : total);
}
subtotal = subtotal.add(ret[property].total);
subtotal = subtotal.add(values.total);
if (property !== "mapa" && property !== "mash") rates_subtotal = rates_subtotal.add(ret[property].total);
if (property !== "mapa" && property !== "mash") {
rates_subtotal = rates_subtotal.add(values.total);
}
}
ret.subtotal = subtotal;

View File

@@ -4,9 +4,14 @@ const multer = require("multer");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const { handleBillOcr, handleBillOcrStatus } = require("../ai/bill-ocr/bill-ocr");
const { handleBillAiFeedback } = require("../ai/bill-ai-feedback");
// Configure multer for form data parsing
const upload = multer();
// Configure multer for form data parsing (memory storage)
const upload = multer({
limits: {
fileSize: 5 * 1024 * 1024 // 5MB
}
});
router.use(validateFirebaseIdTokenMiddleware);
router.use(withUserGraphQLClientMiddleware);
@@ -14,4 +19,6 @@ router.use(withUserGraphQLClientMiddleware);
router.post("/bill-ocr", upload.single('billScan'), handleBillOcr);
router.get("/bill-ocr/status/:textractJobId", handleBillOcrStatus);
router.post("/bill-feedback", upload.single("billPdf"), handleBillAiFeedback);
module.exports = router;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -205,8 +205,7 @@ const updateRRRepairOrderWithFullData = async (args) => {
const { client, opts } = buildClientAndOpts(bodyshop);
// For full data update after early RO, we still use "Insert" referenceId
// because we're inserting the job operations for the first time
// For full data update after early RO, use the RR update route.
const finalOpts = {
...opts,
envelope: {
@@ -214,7 +213,7 @@ const updateRRRepairOrderWithFullData = async (args) => {
sender: {
...(opts?.envelope?.sender || {}),
task: "BSMRO",
referenceId: "Insert"
referenceId: "Update"
}
}
};
@@ -317,32 +316,45 @@ const updateRRRepairOrderWithFullData = async (args) => {
opCode
});
// Add roNo for linking to existing RO
// Update the existing RO created during the early RO step.
payload.finalUpdate = "N";
payload.roNo = String(roNo);
payload.outsdRoNo = job?.ro_number || job?.id || undefined;
// Keep rolabor - it's needed to register the job/OpCode accounts in Reynolds
// Without this, Reynolds won't recognize the OpCode when we send rogg operations
// The rolabor section tells Reynolds "these jobs exist" even with minimal data
// RR update rejects placeholder non-labor ROLABOR rows with zero labor prices.
// Keep only the actual labor jobs in ROLABOR and let ROGOG carry parts/extras.
if (payload.rolabor?.ops?.length && payload.rogg?.ops?.length) {
const laborJobNos = new Set(
payload.rogg.ops
.filter((op) => op?.segmentKind === "laborTaxable" || op?.segmentKind === "laborNonTaxable")
.map((op) => String(op.jobNo))
);
CreateRRLogEvent(socket, "INFO", "Preparing full data for early RO (using create with roNo)", {
payload.rolabor.ops = payload.rolabor.ops.filter((op) => laborJobNos.has(String(op?.jobNo)));
if (!payload.rolabor.ops.length) {
delete payload.rolabor;
}
}
CreateRRLogEvent(socket, "INFO", "Preparing full data update for existing RR RO", {
roNo: String(roNo),
hasRolabor: !!payload.rolabor,
rolaborCount: payload.rolabor?.ops?.length || 0,
hasRogg: !!payload.rogg,
payload
});
// Use createRepairOrder (not update) with the roNo to link to the existing early RO
// Reynolds will merge this with the existing RO header
const response = await client.createRepairOrder(payload, finalOpts);
const response = await client.updateRepairOrder(payload, finalOpts);
CreateRRLogEvent(
socket,
"INFO",
"Sending full data for early RO (using create with roNo)",
"RR full data update sent for existing RO",
withRRRequestXml(response, {
roNo: String(roNo),
hasRolabor: !!payload.rolabor,
rolaborCount: payload.rolabor?.ops?.length || 0,
hasRogg: !!payload.rogg,
payload,
response

View File

@@ -368,8 +368,9 @@ const buildRogogFromAllocations = (allocations, { opCode, payType = "Cust", roNo
*
* We still keep a 1:1 mapping with GOG ops: each op gets a corresponding
* OpCodeLaborInfo entry using the same JobNo and the same tax flag as its
* GOG line. Labor-specific details (hrs/rate) remain zeroed out, and the
* DMS can ignore non-labor ops by virtue of the zero hours/amounts.
* GOG line. Labor-specific hours/rate remain zeroed out, but actual labor
* sale amounts are mirrored into ROLABOR for labor segments so RR receives
* the expected labor pricing on updates. Non-labor ops remain zeroed.
*
* @param {Object} rogg - result of buildRogogFromAllocations
* @param {Object} opts
@@ -388,6 +389,8 @@ const buildRolaborFromRogog = (rogg, { payType = "Cust" } = {}) => {
const txFlag = firstLine.custTxblNtxblFlag ?? "N";
const linePayType = firstLine.custPayTypeFlag || "C";
const isLaborSegment = op.segmentKind === "laborTaxable" || op.segmentKind === "laborNonTaxable";
const laborAmount = isLaborSegment ? String(firstLine?.amount?.custPrice ?? "0") : "0";
return {
opCode: op.opCode,
@@ -403,8 +406,8 @@ const buildRolaborFromRogog = (rogg, { payType = "Cust" } = {}) => {
amount: {
payType,
amtType: "Job",
custPrice: "0",
totalAmt: "0"
custPrice: laborAmount,
totalAmt: laborAmount
}
};
});