Merged in release/2025-06-02 (pull request #2330)

Release/2025 06 02
This commit is contained in:
Patrick Fic
2025-05-22 16:46:27 +00:00
54 changed files with 1259 additions and 1332 deletions

View File

@@ -6,7 +6,7 @@ require("dotenv").config({
function urlBuilder(realmId, object, query = null) {
return `https://${
process.env.NODE_ENV === "production" ? "" : "sandbox-"
}quickbooks.api.intuit.com/v3/company/${realmId}/${object}${query ? `?query=${encodeURIComponent(query)}` : ""}`;
}quickbooks.api.intuit.com/v3/company/${realmId}/${object}?minorversion=75${query ? `&query=${encodeURIComponent(query)}` : ""}`;
}
function StandardizeName(str) {

View File

@@ -4,4 +4,5 @@ exports.chatter = require("./chatter").default;
exports.claimscorp = require("./claimscorp").default;
exports.kaizen = require("./kaizen").default;
exports.usageReport = require("./usageReport").default;
exports.podium = require("./podium").default;
exports.podium = require("./podium").default;
exports.emsUpload = require("./emsUpload").default;

22
server/data/emsUpload.js Normal file
View File

@@ -0,0 +1,22 @@
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const s3Client = require("../utils/s3"); // Using the S3 client utilities with LocalStack support
const emsUpload = async (req, res) => {
try {
const { bodyshopid, ciecaid, clm_no, ownr_ln } = req.body;
const presignedUrl = await s3Client.getPresignedUrl({
bucketName: process.env.S3_EMS_UPLOAD_BUCKET,
key: `${bodyshopid}/${ciecaid}-${clm_no}-${ownr_ln}-${moment().format("YYYY-MM-DD--HH-mm-ss")}.zip`
});
res.status(200).json({ presignedUrl });
} catch (error) {
logger.log("ems-upload-presign-error", "ERROR", req?.user?.email, null, {
error: error.message,
stack: error.stack
});
res.status(500).json({ error: error.message, stack: error.stack });
}
};
exports.default = emsUpload;

View File

@@ -185,7 +185,7 @@ async function uploadViaSFTP(csvObj) {
await sftp.connect(ftpSetup);
try {
csvObj.result = await sftp.put(Buffer.from(csvObj.xml), `${csvObj.filename}`);
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("podium-sftp-upload", "DEBUG", "api", csvObj.bodyshopid, {
imexshopid: csvObj.imexshopid,
filename: csvObj.filename,

View File

@@ -1596,6 +1596,7 @@ query QUERY_JOB_COSTING_DETAILS($id: uuid!) {
ca_customer_gst
dms_allocation
cieca_pfl
cieca_stl
materials
joblines(where: { removed: { _eq: false } }) {
id
@@ -1712,6 +1713,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
ca_customer_gst
dms_allocation
cieca_pfl
cieca_stl
materials
joblines(where: {removed: {_eq: false}}) {
id

View File

@@ -567,6 +567,29 @@ function GenerateCostingData(job) {
);
}
if (InstanceManager({ imex: false, rome: true })) {
const stlTowing = job.cieca_stl?.data.find((c) => c.ttl_type === "OTTW");
const stlStorage = job.cieca_stl?.data.find((c) => c.ttl_type === "OTST");
if (!jobLineTotalsByProfitCenter.additional[defaultProfits["TOW"]])
jobLineTotalsByProfitCenter.additional[defaultProfits["TOW"]] = Dinero();
jobLineTotalsByProfitCenter.additional[defaultProfits["TOW"]] = stlTowing
? Dinero({ amount: Math.round(stlTowing.ttl_amt * 100) })
: Dinero({
amount: Math.round((job.towing_payable || 0) * 100)
});
if (!jobLineTotalsByProfitCenter.additional[defaultProfits["STO"]])
jobLineTotalsByProfitCenter.additional[defaultProfits["STO"]] = Dinero();
jobLineTotalsByProfitCenter.additional[defaultProfits["STO"]] = stlStorage
? Dinero({ amount: Math.round(stlStorage.ttl_amt * 100) })
: Dinero({
amount: Math.round((job.storage_payable || 0) * 100)
});
}
//Is it a DMS Setup?
const selectedDmsAllocationConfig =
(job.bodyshop.md_responsibility_centers.dms_defaults &&

View File

@@ -138,6 +138,9 @@ router.post("/canvastest", validateFirebaseIdTokenMiddleware, canvastest);
// Alert Check
router.post("/alertcheck", eventAuthorizationMiddleware, alertCheck);
//EMS Upload
router.post("/emsupload", validateFirebaseIdTokenMiddleware, data.emsUpload);
// Redis Cache Routes
router.post("/bodyshop-cache", eventAuthorizationMiddleware, updateBodyshopCache);

View File

@@ -7,7 +7,7 @@ const { status, markConversationRead } = require("../sms/status");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Twilio Webhook Middleware for production
// TODO: Look into this because it technically is never validating anything
// TODO: This is never actually doing anything, we should probably verify
const twilioWebhookMiddleware = twilio.webhook({ validate: process.env.NODE_ENV === "PRODUCTION" });
router.post("/receive", twilioWebhookMiddleware, receive);

View File

@@ -1,17 +1,23 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const {
FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID,
UNARCHIVE_CONVERSATION,
CREATE_CONVERSATION,
INSERT_MESSAGE
} = require("../graphql-client/queries");
const { phone } = require("phone");
const { admin } = require("../firebase/firebase-handler");
const logger = require("../utils/logger");
const InstanceManager = require("../utils/instanceMgr").default;
exports.receive = async (req, res) => {
/**
* Receive SMS messages from Twilio and process them
* @param req
* @param res
* @returns {Promise<*>}
*/
const receive = async (req, res) => {
const {
logger,
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
@@ -20,7 +26,7 @@ exports.receive = async (req, res) => {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body)
image_path: generateMediaArray(req.body, logger)
};
logger.log("sms-inbound", "DEBUG", "api", null, loggerData);
@@ -35,7 +41,7 @@ exports.receive = async (req, res) => {
try {
// Step 1: Find the bodyshop and existing conversation
const response = await client.request(queries.FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
const response = await client.request(FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
mssid: req.body.MessagingServiceSid,
phone: phone(req.body.From).phoneNumber
});
@@ -46,7 +52,7 @@ exports.receive = async (req, res) => {
const bodyshop = response.bodyshops[0];
// Sort conversations by `updated_at` (or `created_at`) and pick the last one
// Step 4: Process conversation
const sortedConversations = bodyshop.conversations.sort((a, b) => new Date(a.created_at) - new Date(b.created_at));
const existingConversation = sortedConversations.length
? sortedConversations[sortedConversations.length - 1]
@@ -57,25 +63,21 @@ exports.receive = async (req, res) => {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
image_path: generateMediaArray(req.body, logger),
isoutbound: false,
userid: null // Add additional fields as necessary
userid: null
};
if (existingConversation) {
// Use the existing conversation
conversationid = existingConversation.id;
// Unarchive the conversation if necessary
if (existingConversation.archived) {
await client.request(queries.UNARCHIVE_CONVERSATION, {
await client.request(UNARCHIVE_CONVERSATION, {
id: conversationid,
archived: false
});
}
} else {
// Create a new conversation
const newConversationResponse = await client.request(queries.CREATE_CONVERSATION, {
const newConversationResponse = await client.request(CREATE_CONVERSATION, {
conversation: {
bodyshopid: bodyshop.id,
phone_num: phone(req.body.From).phoneNumber,
@@ -86,13 +88,12 @@ exports.receive = async (req, res) => {
conversationid = createdConversation.id;
}
// Ensure `conversationid` is added to the message
newMessage.conversationid = conversationid;
// Step 3: Insert the message into the conversation
const insertresp = await client.request(queries.INSERT_MESSAGE, {
// Step 5: Insert the message
const insertresp = await client.request(INSERT_MESSAGE, {
msg: newMessage,
conversationid: conversationid
conversationid
});
const message = insertresp?.insert_messages?.returning?.[0];
@@ -102,8 +103,7 @@ exports.receive = async (req, res) => {
throw new Error("Conversation data is missing from the response.");
}
// Step 4: Notify clients through Redis
const broadcastRoom = getBodyshopRoom(conversation.bodyshop.id);
// Step 6: Notify clients
const conversationRoom = getBodyshopConversationRoom({
bodyshopId: conversation.bodyshop.id,
conversationId: conversation.id
@@ -116,6 +116,8 @@ exports.receive = async (req, res) => {
msid: message.sid
};
const broadcastRoom = getBodyshopRoom(conversation.bodyshop.id);
ioRedis.to(broadcastRoom).emit("new-message-summary", {
...commonPayload,
existingConversation: !!existingConversation,
@@ -131,13 +133,13 @@ exports.receive = async (req, res) => {
summary: false
});
// Step 5: Send FCM notification
// Step 7: Send FCM notification
const fcmresp = await admin.messaging().send({
topic: `${message.conversation.bodyshop.imexshopid}-messaging`,
notification: {
title: InstanceManager({
imex: `ImEX Online Message - ${message.conversation.phone_num}`,
rome: `Rome Online Message - ${message.conversation.phone_num}`,
rome: `Rome Online Message - ${message.conversation.phone_num}`
}),
body: message.image_path ? `Image ${message.text}` : message.text
},
@@ -157,11 +159,17 @@ exports.receive = async (req, res) => {
res.status(200).send("");
} catch (e) {
handleError(req, e, res, "RECEIVE_MESSAGE");
handleError(req, e, res, "RECEIVE_MESSAGE", logger);
}
};
const generateMediaArray = (body) => {
/**
* Generate media array from the request body
* @param body
* @param logger
* @returns {null|*[]}
*/
const generateMediaArray = (body, logger) => {
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
const ret = [];
@@ -174,12 +182,20 @@ const generateMediaArray = (body) => {
}
};
const handleError = (req, error, res, context) => {
/**
* Handle error logging and response
* @param req
* @param error
* @param res
* @param context
* @param logger
*/
const handleError = (req, error, res, context, logger) => {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
image_path: generateMediaArray(req.body, logger),
messagingServiceSid: req.body.MessagingServiceSid,
context,
error
@@ -187,3 +203,7 @@ const handleError = (req, error, res, context) => {
res.status(500).json({ error: error.message || "Internal Server Error" });
};
module.exports = {
receive
};

View File

@@ -1,19 +1,20 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const twilio = require("twilio");
const { phone } = require("phone");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const { INSERT_MESSAGE } = require("../graphql-client/queries");
const client = twilio(process.env.TWILIO_AUTH_TOKEN, process.env.TWILIO_AUTH_KEY);
const gqlClient = require("../graphql-client/graphql-client").client;
exports.send = async (req, res) => {
/**
* Send an outbound SMS message
* @param req
* @param res
* @returns {Promise<void>}
*/
const send = async (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
const {
ioRedis,
logger,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
@@ -25,8 +26,8 @@ exports.send = async (req, res) => {
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
image: selectedMedia.length > 0,
image_path: selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
if (!to || !messagingServiceSid || (!body && selectedMedia.length === 0) || !conversationid) {
@@ -38,8 +39,8 @@ exports.send = async (req, res) => {
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
image: selectedMedia.length > 0,
image_path: selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
res.status(400).json({ success: false, message: "Missing required parameter(s)." });
return;
@@ -59,12 +60,15 @@ exports.send = async (req, res) => {
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
image: selectedMedia.length > 0,
image_path: selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
};
try {
const gqlResponse = await gqlClient.request(queries.INSERT_MESSAGE, { msg: newMessage, conversationid });
const gqlResponse = await gqlClient.request(INSERT_MESSAGE, {
msg: newMessage,
conversationid
});
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
@@ -111,3 +115,7 @@ exports.send = async (req, res) => {
res.status(500).json({ success: false, message: "Failed to send message through Twilio." });
}
};
module.exports = {
send
};

View File

@@ -1,13 +1,14 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const { UPDATE_MESSAGE_STATUS, MARK_MESSAGES_AS_READ } = require("../graphql-client/queries");
const logger = require("../utils/logger");
exports.status = async (req, res) => {
/**
* Handle the status of an SMS message
* @param req
* @param res
* @returns {Promise<*>}
*/
const status = async (req, res) => {
const { SmsSid, SmsStatus } = req.body;
const {
ioRedis,
@@ -21,7 +22,7 @@ exports.status = async (req, res) => {
}
// Update message status in the database
const response = await client.request(queries.UPDATE_MESSAGE_STATUS, {
const response = await client.request(UPDATE_MESSAGE_STATUS, {
msid: SmsSid,
fields: { status: SmsStatus }
});
@@ -65,7 +66,13 @@ exports.status = async (req, res) => {
}
};
exports.markConversationRead = async (req, res) => {
/**
* Mark a conversation as read
* @param req
* @param res
* @returns {Promise<*>}
*/
const markConversationRead = async (req, res) => {
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
@@ -80,7 +87,7 @@ exports.markConversationRead = async (req, res) => {
}
try {
const response = await client.request(queries.MARK_MESSAGES_AS_READ, {
const response = await client.request(MARK_MESSAGES_AS_READ, {
conversationId
});
@@ -104,3 +111,8 @@ exports.markConversationRead = async (req, res) => {
res.status(500).json({ error: "Failed to mark conversation as read." });
}
};
module.exports = {
status,
markConversationRead
};

View File

@@ -1,3 +1,11 @@
/**
* @module ioHelpers
* @param app
* @param api
* @param io
* @param logger
* @returns {{getBodyshopRoom: (function(*): string), getBodyshopConversationRoom: (function({bodyshopId: *, conversationId: *}): string)}}
*/
const applyIOHelpers = ({ app, api, io, logger }) => {
// Global Bodyshop Room
const getBodyshopRoom = (bodyshopId) => `bodyshop-broadcast-room:${bodyshopId}`;

View File

@@ -9,6 +9,7 @@ const {
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { InstanceRegion } = require("./instanceMgr");
const { isString, isEmpty } = require("lodash");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const createS3Client = () => {
const S3Options = {
@@ -95,6 +96,17 @@ const createS3Client = () => {
throw error;
}
};
const getPresignedUrl = async ({ bucketName, key }) => {
const command = new PutObjectCommand({
Bucket: bucketName,
Key: key,
StorageClass: "INTELLIGENT_TIERING"
});
const presignedUrl = await getSignedUrl(s3Client, command, { expiresIn: 360 });
return presignedUrl;
}
return {
uploadFileToS3,
downloadFileFromS3,
@@ -102,8 +114,12 @@ const createS3Client = () => {
deleteFileFromS3,
copyFileInS3,
fileExistsInS3,
getPresignedUrl,
...s3Client
};
};
module.exports = createS3Client();