Merge remote-tracking branch 'origin/master-AIO' into feature/IO-3587-Commision-Cut-clean

This commit is contained in:
Dave
2026-03-20 15:05:23 -04:00
16 changed files with 1177 additions and 116 deletions

View File

@@ -0,0 +1,73 @@
const { isString } = require("lodash");
const { sendServerEmail } = require("../email/sendemail");
const logger = require("../utils/logger");
const { raw } = require("express");
const SUPPORT_EMAIL = "patrick@imexsystems.ca";
const safeJsonParse = (maybeJson) => {
if (!isString(maybeJson)) return null;
try {
return JSON.parse(maybeJson);
} catch {
return null;
}
};
const handleBillAiFeedback = async (req, res) => {
try {
const rating = req.body?.rating;
const comments = isString(req.body?.comments) ? req.body?.comments?.trim() : "";
const billFormValues = safeJsonParse(req.body?.billFormValues);
const rawAIData = safeJsonParse(req.body?.rawAIData);
const jobid = billFormValues?.jobid || billFormValues?.jobId || "unknown";
const shopname = req.body?.shopname || "unknown";
const subject = `Bill AI Feedback (${rating === "up" ? "+" : "-"}) Shop=${shopname} jobid=${jobid}`;
const text = [
`User: ${req?.user?.email || "unknown"}`,
`Rating: ${rating}`,
comments ? `Comments: ${comments}` : "Comments: (none)",
"",
"Form Values (User):",
JSON.stringify(billFormValues, null, 4),
"",
"Raw AI Data:",
JSON.stringify(rawAIData, null, 4)
]
.filter(Boolean)
.join("\n");
const attachments = [];
if (req.file?.buffer) {
attachments.push({
filename: req.file.originalname || `bill-${jobid}.pdf`,
content: req.file.buffer,
contentType: req.file.mimetype || "application/pdf"
});
}
await sendServerEmail({
to: [SUPPORT_EMAIL],
subject,
type: "text",
text,
attachments
});
return res.json({ success: true });
} catch (error) {
logger.log("bill-ai-feedback-error", "ERROR", req?.user?.email, null, {
message: error?.message,
stack: error?.stack
});
return res.status(500).json({ message: "Failed to submit feedback" });
}
};
module.exports = {
handleBillAiFeedback
};

View File

@@ -212,7 +212,8 @@ async function processSinglePageDocument(pdfBuffer) {
return {
...processedData,
originalTextractResponse: result
//Removed as this is a large object that provides minimal value to send to client.
// originalTextractResponse: result
};
}
@@ -392,7 +393,8 @@ async function handleTextractNotification(message) {
status: 'COMPLETED',
data: {
...processedData,
originalTextractResponse: originalResponse
//Removed as this is a large object that provides minimal value to send to client.
// originalTextractResponse: originalResponse
},
completedAt: new Date().toISOString()
}

View File

@@ -44,7 +44,7 @@ const logEmail = async (req, email) => {
}
};
const sendServerEmail = async ({ subject, text, to = [] }) => {
const sendServerEmail = async ({ subject, text, to = [], attachments }) => {
if (process.env.NODE_ENV === undefined) return;
try {
@@ -57,6 +57,7 @@ const sendServerEmail = async ({ subject, text, to = [] }) => {
to: ["support@imexsystems.ca", ...to],
subject: subject,
text: text,
attachments: attachments,
ses: {
// optional extra arguments for SendRawEmail
Tags: [

View File

@@ -4,9 +4,14 @@ const multer = require("multer");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const { handleBillOcr, handleBillOcrStatus } = require("../ai/bill-ocr/bill-ocr");
const { handleBillAiFeedback } = require("../ai/bill-ai-feedback");
// Configure multer for form data parsing
const upload = multer();
// Configure multer for form data parsing (memory storage)
const upload = multer({
limits: {
fileSize: 5 * 1024 * 1024 // 5MB
}
});
router.use(validateFirebaseIdTokenMiddleware);
router.use(withUserGraphQLClientMiddleware);
@@ -14,4 +19,6 @@ router.use(withUserGraphQLClientMiddleware);
router.post("/bill-ocr", upload.single('billScan'), handleBillOcr);
router.get("/bill-ocr/status/:textractJobId", handleBillOcrStatus);
router.post("/bill-feedback", upload.single("billPdf"), handleBillAiFeedback);
module.exports = router;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -205,8 +205,7 @@ const updateRRRepairOrderWithFullData = async (args) => {
const { client, opts } = buildClientAndOpts(bodyshop);
// For full data update after early RO, we still use "Insert" referenceId
// because we're inserting the job operations for the first time
// For full data update after early RO, use the RR update route.
const finalOpts = {
...opts,
envelope: {
@@ -214,7 +213,7 @@ const updateRRRepairOrderWithFullData = async (args) => {
sender: {
...(opts?.envelope?.sender || {}),
task: "BSMRO",
referenceId: "Insert"
referenceId: "Update"
}
}
};
@@ -317,32 +316,45 @@ const updateRRRepairOrderWithFullData = async (args) => {
opCode
});
// Add roNo for linking to existing RO
// Update the existing RO created during the early RO step.
payload.finalUpdate = "N";
payload.roNo = String(roNo);
payload.outsdRoNo = job?.ro_number || job?.id || undefined;
// Keep rolabor - it's needed to register the job/OpCode accounts in Reynolds
// Without this, Reynolds won't recognize the OpCode when we send rogg operations
// The rolabor section tells Reynolds "these jobs exist" even with minimal data
// RR update rejects placeholder non-labor ROLABOR rows with zero labor prices.
// Keep only the actual labor jobs in ROLABOR and let ROGOG carry parts/extras.
if (payload.rolabor?.ops?.length && payload.rogg?.ops?.length) {
const laborJobNos = new Set(
payload.rogg.ops
.filter((op) => op?.segmentKind === "laborTaxable" || op?.segmentKind === "laborNonTaxable")
.map((op) => String(op.jobNo))
);
CreateRRLogEvent(socket, "INFO", "Preparing full data for early RO (using create with roNo)", {
payload.rolabor.ops = payload.rolabor.ops.filter((op) => laborJobNos.has(String(op?.jobNo)));
if (!payload.rolabor.ops.length) {
delete payload.rolabor;
}
}
CreateRRLogEvent(socket, "INFO", "Preparing full data update for existing RR RO", {
roNo: String(roNo),
hasRolabor: !!payload.rolabor,
rolaborCount: payload.rolabor?.ops?.length || 0,
hasRogg: !!payload.rogg,
payload
});
// Use createRepairOrder (not update) with the roNo to link to the existing early RO
// Reynolds will merge this with the existing RO header
const response = await client.createRepairOrder(payload, finalOpts);
const response = await client.updateRepairOrder(payload, finalOpts);
CreateRRLogEvent(
socket,
"INFO",
"Sending full data for early RO (using create with roNo)",
"RR full data update sent for existing RO",
withRRRequestXml(response, {
roNo: String(roNo),
hasRolabor: !!payload.rolabor,
rolaborCount: payload.rolabor?.ops?.length || 0,
hasRogg: !!payload.rogg,
payload,
response

View File

@@ -368,8 +368,9 @@ const buildRogogFromAllocations = (allocations, { opCode, payType = "Cust", roNo
*
* We still keep a 1:1 mapping with GOG ops: each op gets a corresponding
* OpCodeLaborInfo entry using the same JobNo and the same tax flag as its
* GOG line. Labor-specific details (hrs/rate) remain zeroed out, and the
* DMS can ignore non-labor ops by virtue of the zero hours/amounts.
* GOG line. Labor-specific hours/rate remain zeroed out, but actual labor
* sale amounts are mirrored into ROLABOR for labor segments so RR receives
* the expected labor pricing on updates. Non-labor ops remain zeroed.
*
* @param {Object} rogg - result of buildRogogFromAllocations
* @param {Object} opts
@@ -388,6 +389,8 @@ const buildRolaborFromRogog = (rogg, { payType = "Cust" } = {}) => {
const txFlag = firstLine.custTxblNtxblFlag ?? "N";
const linePayType = firstLine.custPayTypeFlag || "C";
const isLaborSegment = op.segmentKind === "laborTaxable" || op.segmentKind === "laborNonTaxable";
const laborAmount = isLaborSegment ? String(firstLine?.amount?.custPrice ?? "0") : "0";
return {
opCode: op.opCode,
@@ -403,8 +406,8 @@ const buildRolaborFromRogog = (rogg, { payType = "Cust" } = {}) => {
amount: {
payType,
amtType: "Job",
custPrice: "0",
totalAmt: "0"
custPrice: laborAmount,
totalAmt: laborAmount
}
};
});