Merge branch 'master-AIO' into feature/IO-2776-cdk-fortellis

This commit is contained in:
Patrick Fic
2025-04-14 12:51:43 -07:00
170 changed files with 20783 additions and 19308 deletions

View File

@@ -12,7 +12,7 @@ const AxiosLib = require("axios").default;
const axios = AxiosLib.create();
const { PBS_ENDPOINTS, PBS_CREDENTIALS } = require("./pbs-constants");
const { CheckForErrors } = require("./pbs-job-export");
const uuid = require("uuid").v4;
axios.interceptors.request.use((x) => {
const socket = x.socket;
@@ -21,6 +21,7 @@ axios.interceptors.request.use((x) => {
...x.headers[x.method],
...x.headers
};
const printable = `${new Date()} | Request: ${x.method.toUpperCase()} | ${
x.url
} | ${JSON.stringify(x.data)} | ${JSON.stringify(headers)}`;

View File

@@ -39,12 +39,14 @@ exports.createShop = async (req, res) => {
try {
const result = await client.request(
`mutation INSERT_BODYSHOPS($bs: bodyshops_insert_input!){
insert_bodyshops_one(object:$bs){
id
}
}`,
`mutation INSERT_BODYSHOPS($bs: bodyshops_insert_input!) {
insert_bodyshops_one(object: $bs) {
id
vendors {
id
}
}
}`,
{
bs: {
...bodyshop,
@@ -54,12 +56,39 @@ exports.createShop = async (req, res) => {
{ countertype: "ihbnum", count: 1 },
{ countertype: "paymentnum", count: 1 }
]
},
vendors: {
data: [{ name: "In-House" }]
}
}
}
);
res.json(result);
const bodyshopId = result.insert_bodyshops_one.id;
const vendorId = result.insert_bodyshops_one.vendors[0].id;
if (!bodyshopId || !vendorId) {
throw new Error("Failed to create bodyshop or vendor");
}
const updateBodyshop = await client.request(
`mutation UPDATE_BODYSHOP($id: uuid!, $inhousevendorid: uuid!) {
update_bodyshops_by_pk(pk_columns: { id: $id }, _set: { inhousevendorid: $inhousevendorid }) {
id
}
}`,
{
id: bodyshopId,
inhousevendorid: vendorId
}
);
res.status(200).json(updateBodyshop);
} catch (error) {
logger.log("admin-create-shop-error", "error", req.user.email, null, {
message: error.message,
stack: error.stack,
request: req.body,
ioadmin: true
});
res.status(500).json(error);
}
};

View File

@@ -18,7 +18,7 @@ const entegralEndpoint =
: "https://uat-ws.armsbusinesssolutions.net/RepairOrderFolderService/RepairOrderFolderService.asmx?WSDL";
const client = require("../graphql-client/graphql-client").client;
const uuid = require("uuid").v4;
const { v4 } = require("uuid");
const momentFormat = "yyyy-MM-DDTHH:mm:ss.SSS";
@@ -79,7 +79,7 @@ exports.default = async (req, res) => {
}
try {
const transId = uuid(); // Can this actually be the job id?
const transId = v4(); // Can this actually be the job id?
let obj = {
RqUID: transId,
DocumentInfo: {

View File

@@ -55,7 +55,12 @@ exports.default = async (req, res) => {
const csv = converter.json2csv(shopList, { emptyFieldValue: "" });
emailer
.sendTaskEmail({
to: ["patrick.fic@convenient-brands.com", "bradley.rhoades@convenient-brands.com", "jrome@rometech.com"],
to: [
"patrick.fic@convenient-brands.com",
"bradley.rhoades@convenient-brands.com",
"jrome@rometech.com",
"ivana@imexsystems.ca"
],
subject: `RO Usage Report - ${moment().format("MM/DD/YYYY")}`,
text: `
Usage Report for ${moment().format("MM/DD/YYYY")} for Rome Online Customers.

View File

@@ -1485,6 +1485,8 @@ exports.GET_JOB_BY_PK = `query GET_JOB_BY_PK($id: uuid!) {
materials
auto_add_ats
rate_ats
flat_rate_ats
rate_ats_flat
joblines(where: { removed: { _eq: false } }){
id
line_no
@@ -2766,6 +2768,88 @@ exports.GET_BODYSHOP_BY_ID = `
id
md_order_statuses
shopname
imexshopid
intellipay_config
state
}
}
`;
exports.GET_DOCUMENTS_BY_JOB = `
query GET_DOCUMENTS_BY_JOB($jobId: uuid!) {
jobs_by_pk(id: $jobId) {
id
ro_number
}
documents_aggregate(where: { jobid: { _eq: $jobId } }) {
aggregate {
sum {
size
}
}
}
documents(order_by: { takenat: desc }, where: { jobid: { _eq: $jobId } }) {
id
name
key
type
size
takenat
extension
bill {
id
invoice_number
date
vendor {
id
name
}
}
}
}`;
exports.QUERY_TEMPORARY_DOCS = ` query QUERY_TEMPORARY_DOCS {
documents(where: { jobid: { _is_null: true } }, order_by: { takenat: desc }) {
id
name
key
type
extension
size
takenat
}
}`;
exports.GET_DOCUMENTS_BY_IDS = `
query GET_DOCUMENTS_BY_IDS($documentIds: [uuid!]!) {
documents(where: {id: {_in: $documentIds}}, order_by: {takenat: desc}) {
id
name
key
type
extension
size
takenat
}
}`;
exports.GET_JOBID_BY_MERCHANTID_RONUMBER = `
query GET_JOBID_BY_MERCHANTID_RONUMBER($merchantID: String!, $roNumber: String!) {
jobs(where: {ro_number: {_eq: $roNumber}, bodyshop: {intellipay_merchant_id: {_eq: $merchantID}}}) {
id
shopid
bodyshop {
id
intellipay_config
email
}
}
}`;
exports.GET_BODYSHOP_BY_MERCHANT_ID = `
query GET_BODYSHOP_BY_MERCHANTID($merchantID: String!) {
bodyshops(where: {intellipay_merchant_id: {_eq: $merchantID}}) {
id
email
}
}`;

View File

@@ -1,63 +1,22 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const qs = require("query-string");
const axios = require("axios");
const moment = require("moment");
const logger = require("../utils/logger");
const { sendTaskEmail } = require("../email/sendemail");
const generateEmailTemplate = require("../email/generateTemplate");
const { isEmpty, isNumber } = require("lodash");
const handleCommentBasedPayment = require("./lib/handleCommentBasedPayment");
const handleInvoiceBasedPayment = require("./lib/handleInvoiceBasedPayment");
const logValidationError = require("./lib/handlePaymentValidationError");
const getCptellerUrl = require("./lib/getCptellerUrl");
const getShopCredentials = require("./lib/getShopCredentials");
const decodeComment = require("./lib/decodeComment");
const domain = process.env.NODE_ENV ? "secure" : "test";
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
const { InstanceRegion, InstanceEndpoints } = require("../utils/instanceMgr");
const client = new SecretsManagerClient({
region: InstanceRegion()
});
const gqlClient = require("../graphql-client/graphql-client").client;
const getShopCredentials = async (bodyshop) => {
// Development only
if (process.env.NODE_ENV === undefined) {
return {
merchantkey: process.env.INTELLIPAY_MERCHANTKEY,
apikey: process.env.INTELLIPAY_APIKEY
};
}
// Production code
if (bodyshop?.imexshopid) {
try {
const secret = await client.send(
new GetSecretValueCommand({
SecretId: `intellipay-credentials-${bodyshop.imexshopid}`,
VersionStage: "AWSCURRENT" // VersionStage defaults to AWSCURRENT if unspecified
})
);
return JSON.parse(secret.SecretString);
} catch (error) {
return {
error: error.message
};
}
}
};
const decodeComment = (comment) => {
try {
return comment ? JSON.parse(Buffer.from(comment, "base64").toString()) : null;
} catch (error) {
return null; // Handle malformed base64 string gracefully
}
};
exports.lightbox_credentials = async (req, res) => {
/**
* @description Get lightbox credentials for the shop
* @param req
* @param res
* @returns {Promise<void>}
*/
const lightboxCredentials = async (req, res) => {
const decodedComment = decodeComment(req.body?.comment);
const logMeta = {
iPayData: req.body?.iPayData,
@@ -73,17 +32,17 @@ exports.lightbox_credentials = async (req, res) => {
const shopCredentials = await getShopCredentials(req.body.bodyshop);
if (shopCredentials.error) {
if (shopCredentials?.error) {
logger.log("intellipay-credentials-error", "ERROR", req.user?.email, null, {
message: shopCredentials.error?.message,
...logMeta
});
res.json({
return res.json({
message: shopCredentials.error?.message,
type: "intellipay-credentials-error",
...logMeta
});
return;
}
try {
@@ -94,7 +53,10 @@ exports.lightbox_credentials = async (req, res) => {
...shopCredentials,
operatingenv: "businessattended"
}),
url: `https://${domain}.cpteller.com/api/custapi.cfc?method=autoterminal${req.body.refresh ? "_refresh" : ""}` //autoterminal_refresh
url: getCptellerUrl({
apiType: "custapi",
params: { method: `autoterminal${req.body.refresh ? "_refresh" : ""}` }
})
};
const response = await axios(options);
@@ -104,13 +66,14 @@ exports.lightbox_credentials = async (req, res) => {
...logMeta
});
res.send(response.data);
return res.send(response.data);
} catch (error) {
logger.log("intellipay-lightbox-error", "ERROR", req.user?.email, null, {
message: error?.message,
...logMeta
});
res.json({
return res.json({
message: error?.message,
type: "intellipay-lightbox-error",
...logMeta
@@ -118,7 +81,13 @@ exports.lightbox_credentials = async (req, res) => {
}
};
exports.payment_refund = async (req, res) => {
/**
* @description Process payment refund
* @param req
* @param res
* @returns {Promise<void>}
*/
const paymentRefund = async (req, res) => {
const decodedComment = decodeComment(req.body.iPayData?.comment);
const logResponseMeta = {
iPayData: req.body?.iPayData,
@@ -136,18 +105,17 @@ exports.payment_refund = async (req, res) => {
const shopCredentials = await getShopCredentials(req.body.bodyshop);
if (shopCredentials.error) {
if (shopCredentials?.error) {
logger.log("intellipay-refund-credentials-error", "ERROR", req.user?.email, null, {
credentialsError: shopCredentials.error,
...logResponseMeta
});
res.status(400).json({
return res.status(400).json({
credentialsError: shopCredentials.error,
type: "intellipay-refund-credentials-error",
...logResponseMeta
});
return;
}
try {
@@ -160,7 +128,11 @@ exports.payment_refund = async (req, res) => {
paymentid: req.body.paymentid,
amount: req.body.amount
}),
url: `https://${domain}.cpteller.com/api/26/webapi.cfc?method=payment_refund`
url: getCptellerUrl({
apiType: "webapi",
version: "26",
params: { method: "payment_refund" }
})
};
logger.log("intellipay-refund-options-prepared", "DEBUG", req.user?.email, null, {
@@ -175,13 +147,14 @@ exports.payment_refund = async (req, res) => {
...logResponseMeta
});
res.send(response.data);
return res.send(response.data);
} catch (error) {
logger.log("intellipay-refund-error", "ERROR", req.user?.email, null, {
message: error?.message,
...logResponseMeta
});
res.status(500).json({
return res.status(500).json({
message: error?.message,
type: "intellipay-refund-error",
...logResponseMeta
@@ -189,7 +162,13 @@ exports.payment_refund = async (req, res) => {
}
};
exports.generate_payment_url = async (req, res) => {
/**
* @description Generate payment URL for the shop
* @param req
* @param res
* @returns {Promise<void>}
*/
const generatePaymentUrl = async (req, res) => {
const decodedComment = decodeComment(req.body.comment);
const logResponseMeta = {
iPayData: req.body?.iPayData,
@@ -209,17 +188,17 @@ exports.generate_payment_url = async (req, res) => {
const shopCredentials = await getShopCredentials(req.body.bodyshop);
if (shopCredentials.error) {
if (shopCredentials?.error) {
logger.log("intellipay-generate-payment-url-credentials-error", "ERROR", req.user?.email, null, {
message: shopCredentials.error?.message,
...logResponseMeta
});
res.status(400).json({
return res.status(400).json({
message: shopCredentials.error?.message,
type: "intellipay-generate-payment-url-credentials-error",
...logResponseMeta
});
return;
}
try {
@@ -234,7 +213,10 @@ exports.generate_payment_url = async (req, res) => {
invoice: req.body.invoice,
createshorturl: true
}),
url: `https://${domain}.cpteller.com/api/custapi.cfc?method=generate_lightbox_url`
url: getCptellerUrl({
apiType: "custapi",
params: { method: "generate_lightbox_url" }
})
};
logger.log("intellipay-generate-payment-url-options-prepared", "DEBUG", req.user?.email, null, {
@@ -250,18 +232,25 @@ exports.generate_payment_url = async (req, res) => {
...logResponseMeta
});
res.send(response.data);
return res.send(response.data);
} catch (error) {
logger.log("intellipay-generate-payment-url-error", "ERROR", req.user?.email, null, {
message: error?.message,
...logResponseMeta
});
res.status(500).json({ message: error?.message, ...logResponseMeta });
return res.status(500).json({ message: error?.message, ...logResponseMeta });
}
};
//Reference: https://intellipay.com/dist/webapi26.html#operation/fee
exports.checkfee = async (req, res) => {
/**
* @description Check the fee for a given amount
* Reference: https://intellipay.com/dist/webapi26.html#operation/fee
* @param req
* @param res
* @returns {Promise<void>}
*/
const checkFee = async (req, res) => {
const logResponseMeta = {
bodyshop: {
id: req.body?.bodyshop?.id,
@@ -274,24 +263,24 @@ exports.checkfee = async (req, res) => {
logger.log("intellipay-checkfee-request-received", "DEBUG", req.user?.email, null, logResponseMeta);
if (!req.body.amount || req.body.amount <= 0) {
if (!isNumber(req.body?.amount) || req.body?.amount <= 0) {
logger.log("intellipay-checkfee-skip", "DEBUG", req.user?.email, null, {
message: "Amount is zero or undefined, skipping fee check.",
...logResponseMeta
});
res.json({ fee: 0 });
return;
return res.json({ fee: 0 });
}
const shopCredentials = await getShopCredentials(req.body.bodyshop);
if (shopCredentials.error) {
if (shopCredentials?.error) {
logger.log("intellipay-checkfee-credentials-error", "ERROR", req.user?.email, null, {
message: shopCredentials.error?.message,
...logResponseMeta
});
res.status(400).json({ error: shopCredentials.error?.message, ...logResponseMeta });
return;
return res.status(400).json({ error: shopCredentials.error?.message, ...logResponseMeta });
}
try {
@@ -312,7 +301,7 @@ exports.checkfee = async (req, res) => {
},
{ sort: false } // Ensure query string order is preserved
),
url: `https://${domain}.cpteller.com/api/26/webapi.cfc`
url: getCptellerUrl({ apiType: "webapi", version: "26" })
};
logger.log("intellipay-checkfee-options-prepared", "DEBUG", req.user?.email, null, {
@@ -327,195 +316,92 @@ exports.checkfee = async (req, res) => {
message: response.data?.error,
...logResponseMeta
});
res.status(400).json({
return res.status(400).json({
error: response.data?.error,
type: "intellipay-checkfee-api-error",
...logResponseMeta
});
} else if (response.data < 0) {
}
if (response.data < 0) {
logger.log("intellipay-checkfee-negative-fee", "ERROR", req.user?.email, null, {
message: "Fee amount returned is negative.",
...logResponseMeta
});
res.json({
return res.json({
error: "Fee amount negative. Check API credentials & account configuration.",
...logResponseMeta,
type: "intellipay-checkfee-negative-fee"
});
} else {
logger.log("intellipay-checkfee-success", "DEBUG", req.user?.email, null, {
fee: response.data,
...logResponseMeta
});
res.json({ fee: response.data, ...logResponseMeta });
}
logger.log("intellipay-checkfee-success", "DEBUG", req.user?.email, null, {
fee: response.data,
...logResponseMeta
});
return res.json({ fee: response.data, ...logResponseMeta });
} catch (error) {
logger.log("intellipay-checkfee-error", "ERROR", req.user?.email, null, {
message: error?.message,
...logResponseMeta
});
res.status(500).json({ error: error?.message, logResponseMeta });
return res.status(500).json({ error: error?.message, logResponseMeta });
}
};
exports.postback = async (req, res) => {
/**
* @description Handle the postback from Intellipay
* @param req
* @param res
* @returns {Promise<void>}
*/
/**
* Handle the postback from Intellipay payment system
*/
const postBack = async (req, res) => {
const { body: values } = req;
const decodedComment = decodeComment(values?.comment);
const logResponseMeta = {
bodyshop: {
id: req.body?.bodyshop?.id,
imexshopid: req.body?.bodyshop?.imexshopid,
name: req.body?.bodyshop?.shopname,
state: req.body?.bodyshop?.state
},
iprequest: values,
decodedComment
};
const logMeta = { iprequest: values, decodedComment };
logger.log("intellipay-postback-received", "DEBUG", req.user?.email, null, logResponseMeta);
logger.log("intellipay-postback-received", "DEBUG", "api", null, logMeta);
try {
if ((!values.invoice || values.invoice === "") && !decodedComment) {
//invoice is specified through the pay link. Comment by IO.
logger.log("intellipay-postback-ignored", "DEBUG", req.user?.email, null, {
// Handle empty/invalid requests
if (isEmpty(values?.invoice) && !decodedComment) {
logger.log("intellipay-postback-ignored", "DEBUG", "api", null, {
message: "No invoice or comment provided",
...logResponseMeta
...logMeta
});
res.sendStatus(200);
return;
return res.sendStatus(200);
}
// Process payment based on data type
if (decodedComment) {
//Shifted the order to have this first to retain backwards compatibility for the old style of short link.
//This has been triggered by IO and may have multiple jobs.
const parsedComment = decodedComment;
logger.log("intellipay-postback-parsed-comment", "DEBUG", req.user?.email, null, {
parsedComment,
...logResponseMeta
});
//Adding in the user email to the short pay email.
//Need to check this to ensure backwards compatibility for clients that don't update.
const partialPayments = Array.isArray(parsedComment) ? parsedComment : parsedComment.payments;
// Fetch jobs by job IDs
const jobs = await gqlClient.request(queries.GET_JOBS_BY_PKS, {
ids: partialPayments.map((p) => p.jobid)
});
logger.log("intellipay-postback-jobs-fetched", "DEBUG", req.user?.email, null, {
jobs,
parsedComment,
...logResponseMeta
});
// Insert new payments
const paymentResult = await gqlClient.request(queries.INSERT_NEW_PAYMENT, {
paymentInput: partialPayments.map((p) => ({
amount: p.amount,
transactionid: values.authcode,
payer: "Customer",
type: values.cardtype,
jobid: p.jobid,
date: moment(Date.now()),
payment_responses: {
data: {
amount: values.total,
bodyshopid: jobs.jobs[0].shopid,
jobid: p.jobid,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
}
}))
});
logger.log("intellipay-postback-payment-success", "DEBUG", req.user?.email, null, {
paymentResult,
jobs,
parsedComment,
...logResponseMeta
});
if (values.origin === "OneLink" && parsedComment.userEmail) {
sendTaskEmail({
to: parsedComment.userEmail,
subject: `New Payment(s) Received - RO ${jobs.jobs.map((j) => j.ro_number).join(", ")}`,
type: "html",
html: generateEmailTemplate({
header: "New Payment(s) Received",
subHeader: "",
body: jobs.jobs
.map(
(job) =>
`Reference: <a href="${InstanceEndpoints()}/manage/jobs/${job.id}">${job.ro_number || "N/A"}</a> | ${job.ownr_co_nm ? job.ownr_co_nm : `${job.ownr_fn || ""} ${job.ownr_ln || ""}`.trim()} | ${`${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""}`.trim()} | $${partialPayments.find((p) => p.jobid === job.id).amount}`
)
.join("<br/>")
})
}).catch((error) => {
logger.log("intellipay-postback-email-error", "ERROR", req.user?.email, null, {
message: error.message,
jobs,
paymentResult,
...logResponseMeta
});
});
}
res.sendStatus(200);
} else if (values.invoice) {
const job = await gqlClient.request(queries.GET_JOB_BY_PK, {
id: values.invoice
});
logger.log("intellipay-postback-invoice-job-fetched", "DEBUG", req.user?.email, null, {
job,
...logResponseMeta
});
const paymentResult = await gqlClient.request(queries.INSERT_NEW_PAYMENT, {
paymentInput: {
amount: values.total,
transactionid: values.authcode,
payer: "Customer",
type: values.cardtype,
jobid: values.invoice,
date: moment(Date.now())
}
});
logger.log("intellipay-postback-invoice-payment-success", "DEBUG", req.user?.email, null, {
paymentResult,
...logResponseMeta
});
const responseResults = await gqlClient.request(queries.INSERT_PAYMENT_RESPONSE, {
paymentResponse: {
amount: values.total,
bodyshopid: job.jobs_by_pk.shopid,
paymentid: paymentResult.id,
jobid: values.invoice,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
});
logger.log("intellipay-postback-invoice-response-success", "DEBUG", req.user?.email, null, {
responseResults,
...logResponseMeta
});
res.sendStatus(200);
return await handleCommentBasedPayment(values, decodedComment, logger, logMeta, res);
} else if (values?.invoice) {
return await handleInvoiceBasedPayment(values, logger, logMeta, res);
} else {
// This should be caught by first validation, but as a safeguard
logValidationError("intellipay-postback-invalid", "No valid invoice or comment provided", logMeta);
return res.status(400).send("Bad Request: No valid invoice or comment provided");
}
} catch (error) {
logger.log("intellipay-postback-error", "ERROR", req.user?.email, null, {
logger.log("intellipay-postback-error", "ERROR", "api", null, {
message: error?.message,
...logResponseMeta
...logMeta
});
res.status(400).json({ successful: false, error: error.message, ...logResponseMeta });
return res.status(400).json({ successful: false, error: error.message, ...logMeta });
}
};
module.exports = {
lightboxCredentials,
paymentRefund,
generatePaymentUrl,
checkFee,
postBack
};

View File

@@ -0,0 +1,14 @@
/**
* @description Decode the comment from base64
* @param comment
* @returns {any|null}
*/
const decodeComment = (comment) => {
try {
return comment ? JSON.parse(Buffer.from(comment, "base64").toString()) : null;
} catch (error) {
return null; // Handle malformed base64 string gracefully
}
};
module.exports = decodeComment;

View File

@@ -0,0 +1,34 @@
/**
* Generates a properly formatted Cpteller API URL
* @param {Object} options - URL configuration options
* @param {string} options.apiType - 'webapi' or 'custapi'
* @param {string} [options.version] - API version (e.g., '26' for webapi)
* @param {Object} [options.params] - URL query parameters
* @returns {string} - The formatted Cpteller URL
*/
const getCptellerUrl = (options) => {
const domain = process.env?.NODE_ENV === "production" ? "secure" : "test";
const { apiType = "webapi", version, params = {} } = options;
// Base URL construction
let url = `https://${domain}.cpteller.com/api/`;
// Add version if specified for webapi
if (apiType === "webapi" && version) {
url += `${version}/`;
}
// Add the API endpoint
url += `${apiType}.cfc`;
// Add query parameters if any exist
const queryParams = new URLSearchParams(params).toString();
if (queryParams) {
url += `?${queryParams}`;
}
return url;
};
module.exports = getCptellerUrl;

View File

@@ -0,0 +1,12 @@
/**
* @description Get payment type based on IP mapping
* @param ipMapping
* @param cardType
* @returns {*}
*/
const getPaymentType = (ipMapping, cardType) => {
const normalizedCardType = (cardType || "").toLowerCase();
return ipMapping ? ipMapping[normalizedCardType] || cardType : cardType;
};
module.exports = getPaymentType;

View File

@@ -0,0 +1,40 @@
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
const { InstanceRegion } = require("../../utils/instanceMgr");
const client = new SecretsManagerClient({
region: InstanceRegion()
});
/**
* @description Get shop credentials from AWS Secrets Manager
* @param bodyshop
* @returns {Promise<{error}|{merchantkey: *, apikey: *}|any>}
*/
const getShopCredentials = async (bodyshop) => {
// In Dev/Testing we will use the environment variables
if (process.env?.NODE_ENV !== "production") {
return {
merchantkey: process.env.INTELLIPAY_MERCHANTKEY,
apikey: process.env.INTELLIPAY_APIKEY
};
}
// In Production, we will use the AWS Secrets Manager
if (bodyshop?.imexshopid) {
try {
const secret = await client.send(
new GetSecretValueCommand({
SecretId: `intellipay-credentials-${bodyshop.imexshopid}`,
VersionStage: "AWSCURRENT" // VersionStage defaults to AWSCURRENT if unspecified
})
);
return JSON.parse(secret.SecretString);
} catch (error) {
return {
error: error.message
};
}
}
};
module.exports = getShopCredentials;

View File

@@ -0,0 +1,81 @@
const sendPaymentNotificationEmail = require("./sendPaymentNotificationEmail");
const { INSERT_NEW_PAYMENT, GET_BODYSHOP_BY_ID, GET_JOBS_BY_PKS } = require("../../graphql-client/queries");
const getPaymentType = require("./getPaymentType");
const moment = require("moment");
const gqlClient = require("../../graphql-client/graphql-client").client;
/**
* @description Handle comment-based payment processing
* @param values
* @param decodedComment
* @param logger
* @param logMeta
* @param res
* @returns {Promise<*>}
*/
const handleCommentBasedPayment = async (values, decodedComment, logger, logMeta, res) => {
logger.log("intellipay-postback-parsed-comment", "DEBUG", "api", null, {
parsedComment: decodedComment,
...logMeta
});
const partialPayments = Array.isArray(decodedComment) ? decodedComment : decodedComment.payments;
// Fetch job data
const jobs = await gqlClient.request(GET_JOBS_BY_PKS, {
ids: partialPayments.map((p) => p.jobid)
});
// Fetch bodyshop data
const bodyshop = await gqlClient.request(GET_BODYSHOP_BY_ID, {
id: jobs.jobs[0].shopid
});
const ipMapping = bodyshop.bodyshops_by_pk.intellipay_config?.payment_map;
logger.log("intellipay-postback-jobs-fetched", "DEBUG", "api", null, {
jobs,
parsedComment: decodedComment,
...logMeta
});
// Create payment records
const paymentResult = await gqlClient.request(INSERT_NEW_PAYMENT, {
paymentInput: partialPayments.map((p) => ({
amount: p.amount,
transactionid: values.authcode,
payer: "Customer",
type: getPaymentType(ipMapping, values.cardtype),
jobid: p.jobid,
date: moment(Date.now()),
payment_responses: {
data: {
amount: values.total,
bodyshopid: bodyshop.bodyshops_by_pk.id,
jobid: p.jobid,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
}
}))
});
logger.log("intellipay-postback-payment-success", "DEBUG", "api", null, {
paymentResult,
jobs,
parsedComment: decodedComment,
...logMeta
});
// Send notification email if needed
if (values?.origin === "OneLink" && decodedComment?.userEmail) {
await sendPaymentNotificationEmail(decodedComment.userEmail, jobs, partialPayments, logger, logMeta);
}
return res.sendStatus(200);
};
module.exports = handleCommentBasedPayment;

View File

@@ -0,0 +1,131 @@
const handlePaymentValidationError = require("./handlePaymentValidationError");
const {
GET_JOBID_BY_MERCHANTID_RONUMBER,
INSERT_PAYMENT_RESPONSE,
INSERT_NEW_PAYMENT,
GET_BODYSHOP_BY_MERCHANT_ID
} = require("../../graphql-client/queries");
const { sendTaskEmail } = require("../../email/sendemail");
const getPaymentType = require("./getPaymentType");
const moment = require("moment");
const gqlClient = require("../../graphql-client/graphql-client").client;
/**
* @description Handle invoice-based payment processing
* @param values
* @param logger
* @param logMeta
* @param res
* @returns {Promise<*>}
*/
const handleInvoiceBasedPayment = async (values, logger, logMeta, res) => {
// Validate required fields
if (!values.merchantid) {
return handlePaymentValidationError(
res,
logger,
"intellipay-postback-no-merchantid",
"Merchant ID is missing",
logMeta
);
}
// Fetch job data
const result = await gqlClient.request(GET_JOBID_BY_MERCHANTID_RONUMBER, {
merchantID: values.merchantid,
roNumber: values.invoice
});
if (!result?.jobs?.length) {
// Fetch bodyshop data
const bodyshop = await gqlClient.request(GET_BODYSHOP_BY_MERCHANT_ID, {
merchantID: values.merchantid
});
if (bodyshop?.bodyshops?.[0]) {
// Note: changed bodyshops to bodyshop to match query name
const email = bodyshop.bodyshops[0].email;
await sendTaskEmail({
to: email,
subject: `Failed to Insert Payment`,
text: `The system has attempted to insert a payment that was generated by your merchant terminal but could not find an associated invoice. Transaction details are below. Please input this payment to your system manually.\n\n${Object.keys(
values
)
.map((key) => `${key}: ${values[key]}`)
.join("\n")}`
});
}
return handlePaymentValidationError(
res,
logger,
"intellipay-postback-job-not-found",
"Job not found",
logMeta,
200
);
}
const job = result.jobs[0];
const bodyshop = job?.bodyshop;
if (!bodyshop) {
return handlePaymentValidationError(
res,
logger,
"intellipay-postback-bodyshop-not-found",
"Bodyshop not found",
logMeta
);
}
const ipMapping = bodyshop.intellipay_config?.payment_map;
logger.log("intellipay-postback-invoice-job-fetched", "DEBUG", "api", null, {
job,
...logMeta
});
// Create payment record
const paymentResult = await gqlClient.request(INSERT_NEW_PAYMENT, {
paymentInput: {
amount: values.total,
transactionid: values.authcode,
payer: "Customer",
type: getPaymentType(ipMapping, values.cardtype),
jobid: job.id,
date: moment(Date.now())
}
});
logger.log("intellipay-postback-invoice-payment-success", "DEBUG", "api", null, {
paymentResult,
...logMeta
});
// Create payment response record
const responseResults = await gqlClient.request(INSERT_PAYMENT_RESPONSE, {
paymentResponse: {
amount: values.total,
bodyshopid: bodyshop.id,
paymentid: paymentResult.id,
jobid: job.id,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
});
logger.log("intellipay-postback-invoice-response-success", "DEBUG", "api", null, {
responseResults,
...logMeta
});
return res.sendStatus(200);
};
module.exports = handleInvoiceBasedPayment;

View File

@@ -0,0 +1,19 @@
/**
* @description Log validation error and send response
* @param res
* @param logger
* @param logCode
* @param message
* @param logMeta
* @param returnCode
* @returns {*}
*/
const handlePaymentValidationError = (res, logger, logCode, message, logMeta, returnCode) => {
logger.log(logCode, "ERROR", "api", null, {
message,
...logMeta
});
return res.status(returnCode || 400).send(`Bad Request: ${message}`);
};
module.exports = handlePaymentValidationError;

View File

@@ -0,0 +1,41 @@
const { sendTaskEmail } = require("../../email/sendemail");
const generateEmailTemplate = require("../../email/generateTemplate");
/**
* @description Send notification email to the user
* @param userEmail
* @param jobs
* @param partialPayments
* @param logger
* @param logMeta
* @returns {Promise<void>}
*/
const sendPaymentNotificationEmail = async (userEmail, jobs, partialPayments, logger, logMeta) => {
try {
await sendTaskEmail({
to: userEmail,
subject: `New Payment(s) Received - RO ${jobs.jobs.map((j) => j.ro_number).join(", ")}`,
type: "html",
html: generateEmailTemplate({
header: "New Payment(s) Received",
subHeader: "",
body: jobs.jobs
.map(
(job) =>
`Reference: <a href="${InstanceEndpoints()}/manage/jobs/${job.id}">${job.ro_number || "N/A"}</a> | ${
job.ownr_co_nm ? job.ownr_co_nm : `${job.ownr_fn || ""} ${job.ownr_ln || ""}`.trim()
} | ${`${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""}`.trim()} | $${partialPayments.find((p) => p.jobid === job.id).amount}`
)
.join("<br/>")
})
});
} catch (error) {
logger.log("intellipay-postback-email-error", "ERROR", "api", null, {
message: error.message,
jobs,
...logMeta
});
}
};
module.exports = sendPaymentNotificationEmail;

View File

@@ -0,0 +1,152 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import mockRequire from "mock-require";
const gqlRequestMock = { request: vi.fn() };
const getPaymentTypeMock = vi.fn(() => "American Express");
const sendPaymentNotificationEmailMock = vi.fn();
let handleCommentBasedPayment;
beforeEach(() => {
vi.resetModules();
vi.clearAllMocks();
// Mock dependencies using mock-require BEFORE requiring the target module
mockRequire("../../../graphql-client/graphql-client", {
client: gqlRequestMock
});
mockRequire("../getPaymentType", getPaymentTypeMock);
mockRequire("../sendPaymentNotificationEmail", sendPaymentNotificationEmailMock);
// Now require the module under test
handleCommentBasedPayment = require("../handleCommentBasedPayment");
// Chain your GraphQL mocks
gqlRequestMock.request
.mockResolvedValueOnce({
jobs: [
{
id: "c1ffe09c-e7d4-46b3-aac5-f23e39563181",
shopid: "bfec8c8c-b7f1-49e0-be4c-524455f4e582"
}
]
})
.mockResolvedValueOnce({
bodyshops_by_pk: {
id: "bfec8c8c-b7f1-49e0-be4c-524455f4e582",
intellipay_config: {
payment_map: {
amex: "American Express"
}
}
}
})
.mockResolvedValueOnce({
insert_payments: {
returning: [{ id: "5dfda3c4-c0a6-4b09-a73d-176ed0ac6499" }]
}
});
});
describe("handleCommentBasedPayment", () => {
const mockLogger = { log: vi.fn() };
const mockRes = { sendStatus: vi.fn() };
const values = {
authcode: "5557301",
total: "0.01",
origin: "Dejavoo",
paymentid: "24294378",
cardtype: "Amex"
};
const decodedComment = {
payments: [{ jobid: "c1ffe09c-e7d4-46b3-aac5-f23e39563181", amount: 0.01 }],
userEmail: "test@example.com"
};
const logMeta = { op: "xyz123" };
it("processes comment-based payment and returns 200", async () => {
await handleCommentBasedPayment(values, decodedComment, mockLogger, logMeta, mockRes);
expect(gqlRequestMock.request).toHaveBeenCalledTimes(3);
expect(getPaymentTypeMock).toHaveBeenCalledWith({ amex: "American Express" }, "Amex");
expect(sendPaymentNotificationEmailMock).not.toHaveBeenCalled();
expect(mockRes.sendStatus).toHaveBeenCalledWith(200);
});
it("sends notification if origin is OneLink and userEmail exists", async () => {
const oneLinkValues = { ...values, origin: "OneLink" };
await handleCommentBasedPayment(oneLinkValues, decodedComment, mockLogger, logMeta, mockRes);
expect(sendPaymentNotificationEmailMock).toHaveBeenCalledWith(
"test@example.com",
expect.anything(),
expect.anything(),
mockLogger,
logMeta
);
expect(mockRes.sendStatus).toHaveBeenCalledWith(200);
});
it("handles decodedComment as a direct array", async () => {
const arrayComment = [{ jobid: "c1ffe09c-e7d4-46b3-aac5-f23e39563181", amount: 0.01 }];
await handleCommentBasedPayment(values, arrayComment, mockLogger, logMeta, mockRes);
expect(gqlRequestMock.request).toHaveBeenCalledTimes(3);
expect(mockRes.sendStatus).toHaveBeenCalledWith(200);
});
it("does not send email if origin is OneLink but userEmail is missing", async () => {
const commentWithoutEmail = {
payments: decodedComment.payments
// no userEmail
};
const oneLinkValues = { ...values, origin: "OneLink" };
await handleCommentBasedPayment(oneLinkValues, commentWithoutEmail, mockLogger, logMeta, mockRes);
expect(sendPaymentNotificationEmailMock).not.toHaveBeenCalled();
expect(mockRes.sendStatus).toHaveBeenCalledWith(200);
});
it("logs important stages of the process", async () => {
await handleCommentBasedPayment(values, decodedComment, mockLogger, logMeta, mockRes);
const logCalls = mockLogger.log.mock.calls.map(([tag]) => tag);
expect(logCalls).toContain("intellipay-postback-parsed-comment");
expect(logCalls).toContain("intellipay-postback-payment-success");
});
it("handles missing payment_map safely", async () => {
gqlRequestMock.request.mockReset(); // 🧹 Clear previous .mockResolvedValueOnce calls
gqlRequestMock.request
.mockResolvedValueOnce({
jobs: [{ id: "job1", shopid: "shop1" }]
})
.mockResolvedValueOnce({
bodyshops_by_pk: {
id: "shop1",
intellipay_config: null
}
})
.mockResolvedValueOnce({
insert_payments: {
returning: [{ id: "payment1" }]
}
});
await handleCommentBasedPayment(values, decodedComment, mockLogger, logMeta, mockRes);
expect(getPaymentTypeMock).toHaveBeenCalledWith(undefined, "Amex");
expect(mockRes.sendStatus).toHaveBeenCalledWith(200);
});
});

View File

@@ -0,0 +1,130 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import mockRequire from "mock-require";
const gqlRequestMock = { request: vi.fn() };
const getPaymentTypeMock = vi.fn(() => "Visa");
const handlePaymentValidationErrorMock = vi.fn();
let handleInvoiceBasedPayment;
beforeEach(() => {
vi.resetModules();
vi.clearAllMocks();
mockRequire("../../../graphql-client/graphql-client", {
client: gqlRequestMock
});
mockRequire("../getPaymentType", getPaymentTypeMock);
mockRequire("../handlePaymentValidationError", handlePaymentValidationErrorMock);
handleInvoiceBasedPayment = require("../handleInvoiceBasedPayment");
gqlRequestMock.request
.mockResolvedValueOnce({
jobs: [
{
id: "job123",
bodyshop: {
id: "shop123",
intellipay_config: {
payment_map: {
visa: "Visa"
}
}
}
}
]
})
.mockResolvedValueOnce({
id: "payment123"
})
.mockResolvedValueOnce({
insert_payment_response: {
returning: [{ id: "response123" }]
}
});
});
describe("handleInvoiceBasedPayment", () => {
const mockLogger = { log: vi.fn() };
const mockRes = { sendStatus: vi.fn() };
const values = {
merchantid: "m123",
invoice: "INV-001",
total: 100.0,
authcode: "AUTH123",
cardtype: "visa",
paymentid: "P789"
};
const logMeta = { op: "abc123" };
it("processes a valid invoice-based payment", async () => {
await handleInvoiceBasedPayment(values, mockLogger, logMeta, mockRes);
expect(gqlRequestMock.request).toHaveBeenCalledTimes(3);
expect(getPaymentTypeMock).toHaveBeenCalledWith({ visa: "Visa" }, "visa");
expect(mockRes.sendStatus).toHaveBeenCalledWith(200);
expect(handlePaymentValidationErrorMock).not.toHaveBeenCalled();
});
it("handles missing merchantid with validation error", async () => {
const invalidValues = { ...values, merchantid: undefined };
await handleInvoiceBasedPayment(invalidValues, mockLogger, logMeta, mockRes);
expect(handlePaymentValidationErrorMock).toHaveBeenCalledWith(
mockRes,
mockLogger,
"intellipay-postback-no-merchantid",
"Merchant ID is missing",
logMeta
);
expect(gqlRequestMock.request).not.toHaveBeenCalled();
});
it("handles job not found with validation error", async () => {
gqlRequestMock.request.mockReset();
gqlRequestMock.request.mockResolvedValueOnce({ jobs: [] });
await handleInvoiceBasedPayment(values, mockLogger, logMeta, mockRes);
expect(handlePaymentValidationErrorMock).toHaveBeenCalledWith(
mockRes,
mockLogger,
"intellipay-postback-job-not-found",
"Job not found",
logMeta,
200
);
});
it("handles missing bodyshop with validation error", async () => {
gqlRequestMock.request.mockReset();
gqlRequestMock.request.mockResolvedValueOnce({
jobs: [{ id: "job123", bodyshop: null }]
});
await handleInvoiceBasedPayment(values, mockLogger, logMeta, mockRes);
expect(handlePaymentValidationErrorMock).toHaveBeenCalledWith(
mockRes,
mockLogger,
"intellipay-postback-bodyshop-not-found",
"Bodyshop not found",
logMeta
);
});
it("logs all expected stages of the process", async () => {
await handleInvoiceBasedPayment(values, mockLogger, logMeta, mockRes);
const logTags = mockLogger.log.mock.calls.map(([tag]) => tag);
expect(logTags).toContain("intellipay-postback-invoice-job-fetched");
expect(logTags).toContain("intellipay-postback-invoice-payment-success");
expect(logTags).toContain("intellipay-postback-invoice-response-success");
});
});

View File

@@ -0,0 +1,277 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
const getPaymentType = require("../getPaymentType");
const decodeComment = require("../decodeComment");
const getCptellerUrl = require("../getCptellerUrl");
const handlePaymentValidationError = require("../handlePaymentValidationError");
const getShopCredentials = require("../getShopCredentials");
describe("Payment Processing Functions", () => {
// DecodeComment Tests
describe("decodeComment", () => {
it("decodes a valid base64-encoded JSON comment", () => {
const encoded = "eyJ0ZXN0IjoiZGF0YSJ9";
const expected = { test: "data" };
expect(decodeComment(encoded)).toEqual(expected);
});
it("decodes a complex base64-encoded JSON with payments", () => {
const encoded = "eyJwYXltZW50cyI6W3siam9iaWQiOiIxMjMifV19";
const expected = { payments: [{ jobid: "123" }] };
expect(decodeComment(encoded)).toEqual(expected);
});
it("returns null when comment is null", () => {
expect(decodeComment(null)).toBeNull();
});
it("returns null when comment is undefined", () => {
expect(decodeComment(undefined)).toBeNull();
});
it("returns null when comment is an empty string", () => {
expect(decodeComment("")).toBeNull();
});
it("returns null when comment is malformed base64", () => {
expect(decodeComment("!@#$%")).toBeNull();
});
it("returns null when comment is valid base64 but not valid JSON", () => {
expect(decodeComment("aW52YWxpZA==")).toBeNull();
});
});
// GetPaymentType Tests
describe("getPaymentType", () => {
it("returns mapped value when card type exists in mapping", () => {
const ipMapping = { visa: "Visa Card", amex: "American Express" };
expect(getPaymentType(ipMapping, "visa")).toBe("Visa Card");
});
it("returns original value when card type not in mapping", () => {
const ipMapping = { visa: "Visa Card" };
expect(getPaymentType(ipMapping, "mastercard")).toBe("mastercard");
});
it("handles lowercase conversion", () => {
const ipMapping = { visa: "Visa Card" };
expect(getPaymentType(ipMapping, "VISA")).toBe("Visa Card");
});
it("handles null mapping", () => {
expect(getPaymentType(null, "visa")).toBe("visa");
});
it("handles undefined mapping", () => {
expect(getPaymentType(undefined, "visa")).toBe("visa");
});
it("handles empty string card type", () => {
const ipMapping = { visa: "Visa Card" };
expect(getPaymentType(ipMapping, "")).toBe("");
});
it("handles undefined card type", () => {
const ipMapping = { visa: "Visa Card" };
expect(getPaymentType(ipMapping, undefined)).toBe(undefined);
});
});
// GetCptellerUrl Tests
describe("getCptellerUrl", () => {
const originalEnv = process.env.NODE_ENV;
afterEach(() => {
process.env.NODE_ENV = originalEnv;
});
it("uses test domain in non-production environment", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({ apiType: "webapi" });
expect(url).toEqual("https://test.cpteller.com/api/webapi.cfc");
});
it("uses secure domain in production environment", () => {
process.env.NODE_ENV = "production";
const url = getCptellerUrl({ apiType: "webapi" });
expect(url).toEqual("https://secure.cpteller.com/api/webapi.cfc");
});
it("adds version number for webapi type", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({ apiType: "webapi", version: "26" });
expect(url).toEqual("https://test.cpteller.com/api/26/webapi.cfc");
});
it("constructs custapi URL without version number", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({ apiType: "custapi", version: "26" });
expect(url).toEqual("https://test.cpteller.com/api/custapi.cfc");
});
it("adds query parameters to the URL", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({
apiType: "webapi",
params: { method: "payment_refund", test: "value" }
});
expect(url).toEqual("https://test.cpteller.com/api/webapi.cfc?method=payment_refund&test=value");
});
it("handles empty params object", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({ apiType: "webapi", params: {} });
expect(url).toEqual("https://test.cpteller.com/api/webapi.cfc");
});
it("defaults to webapi when no apiType is provided", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({});
expect(url).toEqual("https://test.cpteller.com/api/webapi.cfc");
});
it("combines version and query parameters correctly", () => {
process.env.NODE_ENV = "";
const url = getCptellerUrl({
apiType: "webapi",
version: "26",
params: { method: "fee" }
});
expect(url).toEqual("https://test.cpteller.com/api/26/webapi.cfc?method=fee");
});
});
// GetShopCredentials Tests
describe("getShopCredentials", () => {
const originalEnv = { ...process.env };
let mockSend;
beforeEach(() => {
mockSend = vi.fn();
vi.mock("@aws-sdk/client-secrets-manager", () => {
return {
SecretsManagerClient: vi.fn(() => ({
send: mockSend
})),
GetSecretValueCommand: vi.fn((input) => input)
};
});
process.env.INTELLIPAY_MERCHANTKEY = "test-merchant-key";
process.env.INTELLIPAY_APIKEY = "test-api-key";
vi.resetModules();
});
afterEach(() => {
process.env = { ...originalEnv };
vi.restoreAllMocks();
vi.unmock("@aws-sdk/client-secrets-manager");
});
it("returns environment variables in non-production environment", async () => {
process.env.NODE_ENV = "development";
const result = await getShopCredentials({ imexshopid: "12345" });
expect(result).toEqual({
merchantkey: "test-merchant-key",
apikey: "test-api-key"
});
expect(mockSend).not.toHaveBeenCalled();
});
it("returns undefined when imexshopid is missing in production", async () => {
process.env.NODE_ENV = "production";
const result = await getShopCredentials({ name: "Test Shop" });
expect(result).toBeUndefined();
expect(mockSend).not.toHaveBeenCalled();
});
it("returns undefined for null bodyshop in production", async () => {
process.env.NODE_ENV = "production";
const result = await getShopCredentials(null);
expect(result).toBeUndefined();
expect(mockSend).not.toHaveBeenCalled();
});
it("returns undefined for undefined bodyshop in production", async () => {
process.env.NODE_ENV = "production";
const result = await getShopCredentials(undefined);
expect(result).toBeUndefined();
expect(mockSend).not.toHaveBeenCalled();
});
});
// HandlePaymentValidationError Tests
describe("handlePaymentValidationError", () => {
it("logs error and sends 400 response", () => {
const mockLog = vi.fn();
const mockLogger = { log: mockLog };
const mockRes = {
status: vi.fn().mockReturnThis(),
send: vi.fn().mockReturnThis()
};
const logCode = "test-validation-error";
const message = "Invalid data";
const logMeta = { field: "test", value: 123 };
const result = handlePaymentValidationError(mockRes, mockLogger, logCode, message, logMeta);
expect(mockLog).toHaveBeenCalledWith(logCode, "ERROR", "api", null, {
message,
...logMeta
});
expect(mockRes.status).toHaveBeenCalledWith(400);
expect(mockRes.send).toHaveBeenCalledWith(`Bad Request: ${message}`);
expect(result).toBe(mockRes);
});
it("formats different error messages correctly", () => {
const mockLog = vi.fn();
const mockLogger = { log: mockLog };
const mockRes = {
status: vi.fn().mockReturnThis(),
send: vi.fn().mockReturnThis()
};
handlePaymentValidationError(mockRes, mockLogger, "error-code", "Custom error");
expect(mockRes.send).toHaveBeenCalledWith("Bad Request: Custom error");
});
it("passes different logCodes to logger", () => {
const mockLog = vi.fn();
const mockLogger = { log: mockLog };
const mockRes = {
status: vi.fn().mockReturnThis(),
send: vi.fn().mockReturnThis()
};
handlePaymentValidationError(mockRes, mockLogger, "custom-log-code", "Error message");
expect(mockLog).toHaveBeenCalledWith("custom-log-code", "ERROR", "api", null, { message: "Error message" });
});
it("works with minimal logMeta", () => {
const mockLog = vi.fn();
const mockLogger = { log: mockLog };
const mockRes = {
status: vi.fn().mockReturnThis(),
send: vi.fn().mockReturnThis()
};
handlePaymentValidationError(mockRes, mockLogger, "error-code", "Error message", {});
expect(mockLog).toHaveBeenCalledWith("error-code", "ERROR", "api", null, { message: "Error message" });
});
it("works with undefined logMeta", () => {
const mockLog = vi.fn();
const mockLogger = { log: mockLog };
const mockRes = {
status: vi.fn().mockReturnThis(),
send: vi.fn().mockReturnThis()
};
handlePaymentValidationError(mockRes, mockLogger, "error-code", "Error message");
expect(mockLog).toHaveBeenCalledWith("error-code", "ERROR", "api", null, { message: "Error message" });
});
});
});

View File

@@ -1,7 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const adminClient = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const logger = require("../utils/logger");
const InstanceMgr = require("../utils/instanceMgr").default;
@@ -45,6 +43,10 @@ exports.totalsSsu = async function (req, res) {
}
});
if (!result) {
throw new Error("Failed to update job totals");
}
res.status(200).send();
} catch (error) {
logger.log("job-totals-ssu-USA-error", "ERROR", req?.user?.email, id, {
@@ -56,10 +58,10 @@ exports.totalsSsu = async function (req, res) {
}
};
//IMPORTANT*** These two functions MUST be mirrrored.
//IMPORTANT*** These two functions MUST be mirrored.
async function TotalsServerSide(req, res) {
const { job, client } = req.body;
await AutoAddAtsIfRequired({ job: job, client: client });
await AtsAdjustmentsIfRequired({ job: job, client: client, user: req?.user });
try {
let ret = {
@@ -131,6 +133,9 @@ async function TotalsServerSide(req, res) {
}
}
// Exported for testing purposes.
exports.TotalsServerSide = TotalsServerSide;
async function Totals(req, res) {
const { job, id } = req.body;
@@ -138,10 +143,11 @@ async function Totals(req, res) {
const client = req.userGraphQLClient;
logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, job.id, {
jobid: job.id
jobid: job.id,
id: id
});
await AutoAddAtsIfRequired({ job, client });
await AtsAdjustmentsIfRequired({ job, client, user: req.user });
try {
let ret = {
@@ -153,7 +159,7 @@ async function Totals(req, res) {
res.status(200).json(ret);
} catch (error) {
logger.log("job-totals-USA-error", "ERROR", req.user.email, job.id, {
logger.log("job-totals-ssu-USA-error", "ERROR", req.user.email, job.id, {
jobid: job.id,
error: error.message,
stack: error.stack
@@ -162,40 +168,45 @@ async function Totals(req, res) {
}
}
async function AutoAddAtsIfRequired({ job, client }) {
//Check if ATS should be automatically added.
if (job.auto_add_ats) {
//Get the total sum of hours that should be the ATS amount.
//Check to see if an ATS line exists.
async function AtsAdjustmentsIfRequired({ job, client, user }) {
if (job.auto_add_ats || job.flat_rate_ats) {
let atsAmount = 0;
let atsLineIndex = null;
const atsHours = job.joblines.reduce((acc, val, index) => {
if (val.line_desc && val.line_desc.toLowerCase() === "ats amount") {
atsLineIndex = index;
}
if (
val.mod_lbr_ty !== "LA1" &&
val.mod_lbr_ty !== "LA2" &&
val.mod_lbr_ty !== "LA3" &&
val.mod_lbr_ty !== "LA4" &&
val.mod_lbr_ty !== "LAU" &&
val.mod_lbr_ty !== "LAG" &&
val.mod_lbr_ty !== "LAS" &&
val.mod_lbr_ty !== "LAA"
) {
acc = acc + val.mod_lb_hrs;
}
//Check if ATS should be automatically added.
if (job.auto_add_ats) {
const excludedLaborTypes = new Set(["LAA", "LAG", "LAS", "LAU", "LA1", "LA2", "LA3", "LA4"]);
return acc;
}, 0);
//Get the total sum of hours that should be the ATS amount.
//Check to see if an ATS line exists.
const atsHours = job.joblines.reduce((acc, val, index) => {
if (val.line_desc?.toLowerCase() === "ats amount") {
atsLineIndex = index;
}
const atsAmount = atsHours * (job.rate_ats || 0);
//If it does, update it in place, and make sure it is updated for local calculations.
if (!excludedLaborTypes.has(val.mod_lbr_ty)) {
acc = acc + val.mod_lb_hrs;
}
return acc;
}, 0);
atsAmount = atsHours * (job.rate_ats || 0);
}
//Check if a Flat Rate ATS should be added.
if (job.flat_rate_ats) {
atsLineIndex = ((i) => (i === -1 ? null : i))(
job.joblines.findIndex((line) => line.line_desc?.toLowerCase() === "ats amount")
);
atsAmount = job.rate_ats_flat || 0;
}
//If it does not, create one for local calculations and insert it.
if (atsLineIndex === null) {
const newAtsLine = {
jobid: job.id,
alt_partm: null,
line_no: 35,
unq_seq: 0,
line_ind: "E",
line_desc: "ATS Amount",
@@ -220,19 +231,42 @@ async function AutoAddAtsIfRequired({ job, client }) {
prt_dsmk_m: 0.0
};
const result = await client.request(queries.INSERT_NEW_JOB_LINE, {
lineInput: [newAtsLine]
});
try {
const result = await client.request(queries.INSERT_NEW_JOB_LINE, {
lineInput: [newAtsLine]
});
job.joblines.push(newAtsLine);
if (result) {
job.joblines.push(newAtsLine);
}
} catch (error) {
logger.log("job-totals-ssu-ats-error", "ERROR", user?.email, job.id, {
jobid: job.id,
error: error.message,
stack: error.stack
});
}
}
//If it does not, create one for local calculations and insert it.
//If it does, update it in place, and make sure it is updated for local calculations.
else {
const result = await client.request(queries.UPDATE_JOB_LINE, {
line: { act_price: atsAmount },
lineId: job.joblines[atsLineIndex].id
});
job.joblines[atsLineIndex].act_price = atsAmount;
try {
const result = await client.request(queries.UPDATE_JOB_LINE, {
line: { act_price: atsAmount },
lineId: job.joblines[atsLineIndex].id
});
if (result) {
job.joblines[atsLineIndex].act_price = atsAmount;
}
} catch (error) {
logger.log("job-totals-ssu-ats-error", "ERROR", user?.email, job.id, {
jobid: job.id,
atsLineIndex: atsLineIndex,
atsAmount: atsAmount,
jobline: job.joblines[atsLineIndex],
error: error.message,
stack: error.stack
});
}
}
}
}
@@ -314,7 +348,7 @@ async function CalculateRatesTotals({ job, client }) {
let hasMashLine = false;
let hasMahwLine = false;
let hasCustomMahwLine;
let mapaOpCodes = ParseCalopCode(job.materials["MAPA"]?.cal_opcode);
// let mapaOpCodes = ParseCalopCode(job.materials["MAPA"]?.cal_opcode);
let mashOpCodes = ParseCalopCode(job.materials["MASH"]?.cal_opcode);
jobLines.forEach((item) => {
@@ -564,7 +598,7 @@ function CalculatePartsTotals(jobLines, parts_tax_rates, job) {
}
};
default:
default: {
if (!value.part_type && value.db_ref !== "900510" && value.db_ref !== "900511") return acc;
const discountAmount =
@@ -631,6 +665,7 @@ function CalculatePartsTotals(jobLines, parts_tax_rates, job) {
)
}
};
}
}
},
{
@@ -652,7 +687,7 @@ function CalculatePartsTotals(jobLines, parts_tax_rates, job) {
let adjustments = {};
//Track all adjustments that need to be made.
const linesToAdjustForDiscount = [];
//const linesToAdjustForDiscount = [];
Object.keys(parts_tax_rates).forEach((key) => {
//Check if there's a discount or a mark up.
let disc = Dinero(),
@@ -1019,7 +1054,9 @@ function CalculateTaxesTotals(job, otherTotals) {
}
} catch (error) {
logger.log("job-totals-USA Key with issue", "error", null, job.id, {
key
key: key,
error: error.message,
stack: error.stack
});
}
});
@@ -1157,6 +1194,7 @@ function CalculateTaxesTotals(job, otherTotals) {
exports.default = Totals;
//eslint-disable-next-line no-unused-vars
function DiscountNotAlreadyCounted(jobline, joblines) {
return false;
}
@@ -1172,27 +1210,35 @@ function IsTrueOrYes(value) {
return value === true || value === "Y" || value === "y";
}
async function UpdateJobLines(joblinesToUpdate) {
if (joblinesToUpdate.length === 0) return;
const updateQueries = joblinesToUpdate.map((line, index) =>
generateUpdateQuery(_.pick(line, ["id", "prt_dsmk_m", "prt_dsmk_p"]), index)
);
const query = `
mutation UPDATE_EST_LINES{
${updateQueries}
}
`;
// Function not in use from RO to IO Merger 02/05/2024
// async function UpdateJobLines(joblinesToUpdate) {
// if (joblinesToUpdate.length === 0) return;
// const updateQueries = joblinesToUpdate.map((line, index) =>
// generateUpdateQuery(_.pick(line, ["id", "prt_dsmk_m", "prt_dsmk_p"]), index)
// );
// const query = `
// mutation UPDATE_EST_LINES{
// ${updateQueries}
// }
// `;
// try {
// const result = await adminClient.request(query);
// void result;
// } catch (error) {
// logger.log("update-job-lines", "error", null, null, {
// error: error.message,
// stack: error.stack
// });
// }
// }
const result = await adminClient.request(query);
}
const generateUpdateQuery = (lineToUpdate, index) => {
return `
update_joblines${index}: update_joblines(where: { id: { _eq: "${
lineToUpdate.id
}" } }, _set: ${JSON.stringify(lineToUpdate).replace(/"(\w+)"\s*:/g, "$1:")}) {
returning {
id
}
}`;
};
// const generateUpdateQuery = (lineToUpdate, index) => {
// return `
// update_joblines${index}: update_joblines(where: { id: { _eq: "${
// lineToUpdate.id
// }" } }, _set: ${JSON.stringify(lineToUpdate).replace(/"(\w+)"\s*:/g, "$1:")}) {
// returning {
// id
// }
// }`;
// };

View File

@@ -0,0 +1,139 @@
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
const moment = require("moment");
const { captureFixture } = require("./utils/seralizeHelper");
const { TotalsServerSide: totalsServerSideCA } = require("./job-totals"); // Canadian version (imex)
const { TotalsServerSide: totalsServerSideUS } = require("./job-totals-USA");
const InstanceMgr = require("../utils/instanceMgr").default;
const { uploadFileToS3 } = require("../utils/s3");
// requires two buckets be made per env, job-totals-test, job-totals-production, locally it will
// use `job-totals` in the owncloud stack
/**
* Returns the environment prefix based on NODE_ENV
* @returns {string}
*/
const getEnvPrefix = () => {
switch (process.env?.NODE_ENV) {
case "test":
return "test";
case "production":
return "production";
default:
return "test";
}
};
const envPrefix = getEnvPrefix();
const S3_BUCKET_NAME =
process.env?.NODE_ENV === "development"
? "imex-job-totals"
: InstanceMgr({
imex: `job-totals-${envPrefix}`,
rome: `job-totals-${envPrefix}-rome`
});
/**
* Generates a unique S3 key for the job totals file
* @param {string} jobId - The job ID
* @returns {string} - S3 key with timestamp
*/
const generateS3Key = (jobId) => `${jobId}-${moment().toISOString()}.json`;
/**
* Uploads job totals data to S3
* @param {object} data - The data to upload
* @param {string} jobId - The job ID
* @param {object} userInfo - User information for logging
* @returns {Promise<string>} - The S3 key
*/
const uploadJobTotalsToS3 = async (data, jobId, userInfo) => {
const key = generateS3Key(jobId);
try {
await uploadFileToS3({
bucketName: S3_BUCKET_NAME,
key: key,
content: JSON.stringify(data, null, 2),
contentType: "application/json"
});
logger.log(`Job totals uploaded successfully to ${key}`, "info", userInfo.email, jobId);
return key;
} catch (error) {
logger.log("Failed to upload job totals to S3", "error", userInfo.email, jobId, {
error: error?.message,
stack: error?.stack
});
throw error; // Re-throw for the main handler to catch
}
};
/**
* Fetches job data using GraphQL
* @param {object} client - GraphQL client
* @param {string} token - Bearer token
* @param {string} jobId - Job ID to fetch
* @returns {Promise<object>} - Job data
*/
const fetchJobData = async (client, token, jobId) => {
return client
.setHeaders({ Authorization: token })
.request(queries.GET_JOB_BY_PK, { id: jobId })
.then((response) => response.jobs_by_pk);
};
/**
* This function is used to capture job totals json files.
* @param {object} req - Express request
* @param {object} res - Express response
* @returns {Promise<void>}
*/
const jobTotalsRecorder = async (req, res) => {
const { id: jobId } = req.body;
const bearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const userEmail = req?.user?.email;
logger.log("Starting job totals recording", "debug", userEmail, jobId);
try {
// Fetch job data
const jobData = await fetchJobData(client, bearerToken, jobId);
// Get the appropriate totals function based on instance
const totalsFunction = InstanceMgr({
imex: totalsServerSideCA,
rome: totalsServerSideUS
});
// Calculate the totals
const calculatedTotals = await totalsFunction({ body: { job: jobData, client }, req }, res, true);
// Prepare data for storage
const dataToSave = captureFixture(jobData, calculatedTotals);
// Upload to S3
await uploadJobTotalsToS3(dataToSave, jobId, { email: userEmail });
res.status(200).json({ success: true, message: "Job totals recorded successfully" });
} catch (error) {
logger.log("Failed to record job totals", "error", userEmail, jobId, {
error: error?.message,
stack: error?.stack
});
// Avoid sending response if it's already been sent
if (!res.headersSent) {
res.status(503).json({
success: false,
message: "Error processing job totals",
error: error.message
});
}
}
};
module.exports = jobTotalsRecorder;

View File

@@ -1,7 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const adminClient = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const logger = require("../utils/logger");
//****************************************************** */
@@ -32,6 +30,7 @@ exports.totalsSsu = async function (req, res) {
id: id
});
// Capture the output of TotalsServerSide
const newTotals = await TotalsServerSide({ body: { job: job.jobs_by_pk, client: client } }, res, true);
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.UPDATE_JOB, {
@@ -44,20 +43,25 @@ exports.totalsSsu = async function (req, res) {
}
});
if (!result) {
throw new Error("Failed to update job totals");
}
res.status(200).send();
} catch (error) {
logger.log("job-totals-ssu-error", "ERROR", req.user.email, id, {
jobid: id,
error
error: error.message,
stack: error.stack
});
res.status(503).send();
}
};
//IMPORTANT*** These two functions MUST be mirrrored.
//IMPORTANT*** These two functions MUST be mirrored.
async function TotalsServerSide(req, res) {
const { job, client } = req.body;
await AutoAddAtsIfRequired({ job: job, client: client });
await AtsAdjustmentsIfRequired({ job: job, client: client, user: req?.user });
try {
let ret = {
@@ -71,25 +75,28 @@ async function TotalsServerSide(req, res) {
} catch (error) {
logger.log("job-totals-ssu-error", "ERROR", req?.user?.email, job.id, {
jobid: job.id,
error
error: error.message,
stack: error.stack
});
res.status(400).send(JSON.stringify(error));
}
}
// Exported for testing purposes
exports.TotalsServerSide = TotalsServerSide;
async function Totals(req, res) {
const { job, id } = req.body;
const logger = req.logger;
const client = req.userGraphQLClient;
logger.log("job-totals", "DEBUG", req.user.email, job.id, {
jobid: job.id
logger.log("job-totals-ssu", "DEBUG", req.user.email, job.id, {
jobid: job.id,
id: id
});
logger.log("job-totals-ssu", "DEBUG", req.user.email, id, null);
await AutoAddAtsIfRequired({ job, client });
await AtsAdjustmentsIfRequired({ job, client, user: req.user });
try {
let ret = {
@@ -101,48 +108,54 @@ async function Totals(req, res) {
res.status(200).json(ret);
} catch (error) {
logger.log("job-totals-error", "ERROR", req.user.email, job.id, {
logger.log("job-totals-ssu-error", "ERROR", req.user.email, job.id, {
jobid: job.id,
error
error: error.message,
stack: error.stack
});
res.status(400).send(JSON.stringify(error));
}
}
async function AutoAddAtsIfRequired({ job, client }) {
//Check if ATS should be automatically added.
if (job.auto_add_ats) {
//Get the total sum of hours that should be the ATS amount.
//Check to see if an ATS line exists.
async function AtsAdjustmentsIfRequired({ job, client, user }) {
if (job.auto_add_ats || job.flat_rate_ats) {
let atsAmount = 0;
let atsLineIndex = null;
const atsHours = job.joblines.reduce((acc, val, index) => {
if (val.line_desc && val.line_desc.toLowerCase() === "ats amount") {
atsLineIndex = index;
}
if (
val.mod_lbr_ty !== "LA1" &&
val.mod_lbr_ty !== "LA2" &&
val.mod_lbr_ty !== "LA3" &&
val.mod_lbr_ty !== "LA4" &&
val.mod_lbr_ty !== "LAU" &&
val.mod_lbr_ty !== "LAG" &&
val.mod_lbr_ty !== "LAS" &&
val.mod_lbr_ty !== "LAA"
) {
acc = acc + val.mod_lb_hrs;
}
//Check if ATS should be automatically added.
if (job.auto_add_ats) {
const excludedLaborTypes = new Set(["LAA", "LAG", "LAS", "LAU", "LA1", "LA2", "LA3", "LA4"]);
return acc;
}, 0);
//Get the total sum of hours that should be the ATS amount.
//Check to see if an ATS line exists.
const atsHours = job.joblines.reduce((acc, val, index) => {
if (val.line_desc?.toLowerCase() === "ats amount") {
atsLineIndex = index;
}
const atsAmount = atsHours * (job.rate_ats || 0);
//If it does, update it in place, and make sure it is updated for local calculations.
if (!excludedLaborTypes.has(val.mod_lbr_ty)) {
acc = acc + val.mod_lb_hrs;
}
return acc;
}, 0);
atsAmount = atsHours * (job.rate_ats || 0);
}
//Check if a Flat Rate ATS should be added.
if (job.flat_rate_ats) {
atsLineIndex = ((i) => (i === -1 ? null : i))(
job.joblines.findIndex((line) => line.line_desc?.toLowerCase() === "ats amount")
);
atsAmount = job.rate_ats_flat || 0;
}
//If it does not, create one for local calculations and insert it.
if (atsLineIndex === null) {
const newAtsLine = {
jobid: job.id,
alt_partm: null,
line_no: 35,
unq_seq: 0,
line_ind: "E",
line_desc: "ATS Amount",
@@ -167,22 +180,43 @@ async function AutoAddAtsIfRequired({ job, client }) {
prt_dsmk_m: 0.0
};
const result = await client.request(queries.INSERT_NEW_JOB_LINE, {
lineInput: [newAtsLine]
});
try {
const result = await client.request(queries.INSERT_NEW_JOB_LINE, {
lineInput: [newAtsLine]
});
job.joblines.push(newAtsLine);
if (result) {
job.joblines.push(newAtsLine);
}
} catch (error) {
logger.log("job-totals-ssu-ats-error", "ERROR", user?.email, job.id, {
jobid: job.id,
error: error.message,
stack: error.stack
});
}
}
//If it does not, create one for local calculations and insert it.
//If it does, update it in place, and make sure it is updated for local calculations.
else {
const result = await client.request(queries.UPDATE_JOB_LINE, {
line: { act_price: atsAmount },
lineId: job.joblines[atsLineIndex].id
});
job.joblines[atsLineIndex].act_price = atsAmount;
try {
const result = await client.request(queries.UPDATE_JOB_LINE, {
line: { act_price: atsAmount },
lineId: job.joblines[atsLineIndex].id
});
if (result) {
job.joblines[atsLineIndex].act_price = atsAmount;
}
} catch (error) {
logger.log("job-totals-ssu-ats-error", "ERROR", user?.email, job.id, {
jobid: job.id,
atsLineIndex: atsLineIndex,
atsAmount: atsAmount,
jobline: job.joblines[atsLineIndex],
error: error.message,
stack: error.stack
});
}
}
//console.log(job.jobLines);
}
}

View File

@@ -0,0 +1,72 @@
import fs from "fs";
import path from "path";
import { describe, it, expect } from "vitest";
import { TotalsServerSide as TotalsServerSideCA } from "../job-totals"; // Canadian version (imex)
import { TotalsServerSide as TotalsServerSideUS } from "../job-totals-USA";
import { isFunction } from "lodash"; // US version (rome)
/**
* This function is used to replace the values in the object with their toObject() representation.
* @param key
* @param value
* @returns {*}
*/
const dineroReplacer = (key, value) => {
if (isFunction(value)) {
return value.toObject();
}
return value;
};
/**
* Normalizes the output of the TotalsServerSide function by converting
* @param obj
* @returns {any}
*/
const normalizeOutput = (obj) => {
return JSON.parse(JSON.stringify(obj, dineroReplacer));
};
/**
* This test suite is designed to validate the functionality of the TotalsServerSide function
*/
describe("TotalsServerSide fixture tests", () => {
const fixturesDir = path.join(__dirname, "fixtures", "job-totals");
const fixtureFiles = fs.readdirSync(fixturesDir).filter((f) => f.endsWith(".json"));
const dummyClient = {
request: async () => {
return {};
}
};
const dummyRes = {
status: () => ({ send: () => {} })
};
fixtureFiles.forEach((file) => {
it(`should produce matching output for fixture file ${file}`, async () => {
const fixturePath = path.join(fixturesDir, file);
const fixtureData = JSON.parse(fs.readFileSync(fixturePath, "utf8"));
const { environment, input, output: expectedOutput } = fixtureData;
const req = {
body: {
job: input,
client: dummyClient
},
user: {}
};
const computedOutput =
environment === "us" ? await TotalsServerSideUS(req, dummyRes) : await TotalsServerSideCA(req, dummyRes);
const normalizedComputed = normalizeOutput(computedOutput);
const normalizedExpected = normalizeOutput(expectedOutput);
expect(normalizedComputed).toEqual(normalizedExpected);
});
});
});

View File

@@ -0,0 +1,58 @@
const fs = require("fs");
const path = require("path");
const { default: InstanceMgr } = require("../../utils/instanceMgr");
const fixtureDir = path.join(__dirname, "..", "test", "fixtures", "job-totals");
/**
* Custom serializer for Dinero.js objects.
* @param key
* @param value
* @returns {*}
*/
const serializeDinero = (key, value) => {
if (value && typeof value === "object" && typeof value.toObject === "function") {
return value.toObject();
}
return value;
};
/**
* Capture a fixture for job totals.
* @param inputData
* @param outputData
* @param saveLocally
*/
const captureFixture = (inputData, outputData, saveLocally) => {
const fileName = `${inputData.id}.json`;
const filePath = path.join(fixtureDir, fileName);
const dataToSave = {
environment: InstanceMgr({
imex: "ca",
rome: "us"
}),
meta: {
ro_number: inputData.ro_number,
updated_at: inputData.updated_at
},
input: inputData,
output: outputData
};
// Save the file using our custom serializer.
if (saveLocally) {
if (!fs.existsSync(fixtureDir)) {
fs.mkdirSync(fixtureDir, { recursive: true });
}
fs.writeFileSync(filePath, JSON.stringify(dataToSave, serializeDinero, 2), "utf8");
}
return dataToSave;
};
module.exports = {
captureFixture,
serializeDinero
};

View File

@@ -0,0 +1,348 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../utils/logger");
const {
S3Client,
PutObjectCommand,
GetObjectCommand,
CopyObjectCommand,
DeleteObjectCommand
} = require("@aws-sdk/client-s3");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const crypto = require("crypto");
const { InstanceRegion } = require("../utils/instanceMgr");
const {
GET_DOCUMENTS_BY_JOB,
QUERY_TEMPORARY_DOCS,
GET_DOCUMENTS_BY_IDS,
DELETE_MEDIA_DOCUMENTS
} = require("../graphql-client/queries");
const archiver = require("archiver");
const stream = require("node:stream");
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN.
const imgproxyKey = process.env.IMGPROXY_KEY;
const imgproxySalt = process.env.IMGPROXY_SALT;
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET;
//Generate a signed upload link for the S3 bucket.
//All uploads must be going to the same shop and jobid.
exports.generateSignedUploadUrls = async (req, res) => {
const { filenames, bodyshopid, jobid } = req.body;
try {
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid });
const signedUrls = [];
for (const filename of filenames) {
const key = filename;
const client = new S3Client({ region: InstanceRegion() });
const command = new PutObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key,
StorageClass: "INTELLIGENT_TIERING"
});
const presignedUrl = await getSignedUrl(client, command, { expiresIn: 360 });
signedUrls.push({ filename, presignedUrl, key });
}
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
res.json({
success: true,
signedUrls
});
} catch (error) {
res.status(400).json({
success: false,
message: error.message,
stack: error.stack
});
logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack
});
}
};
exports.getThumbnailUrls = async (req, res) => {
const { jobid, billid } = req.body;
try {
logger.log("imgproxy-thumbnails", "DEBUG", req.user?.email, jobid, { billid, jobid });
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//If there's no jobid and no billid, we're in temporary documents.
const data = await (jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const s3client = new S3Client({ region: InstanceRegion() });
const proxiedUrls = [];
for (const document of data.documents) {
//Format to follow:
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with unencoded/unhashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key;
if (/\.[^/.]+$/.test(document.key)) {
key = document.key;
} else {
key = `${document.key}.${document.extension.toLowerCase()}`;
}
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
//Thumbnail Generation Block
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`);
//Full Size URL block
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
const s3Props = {};
if (!document.type.startsWith("image")) {
//If not a picture, we need to get a signed download link to the file using S3 (or cloudfront preferably)
const command = new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
});
const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
s3Props.presignedGetUrl = presignedGetUrl;
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
s3Props.originalUrlViaProxyPath = `${imgproxyBaseUrl}/${originalHmacSalt}/${originalProxyPath}`;
}
proxiedUrls.push({
originalUrl: `${imgproxyBaseUrl}/${fullSizeHmacSalt}/${fullSizeProxyPath}`,
thumbnailUrl: `${imgproxyBaseUrl}/${thumbHmacSalt}/${thumbProxyPath}`,
fullS3Path,
base64UrlEncodedKeyString,
thumbProxyPath,
...s3Props,
...document
});
}
res.json(proxiedUrls);
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
jobid,
billid,
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
}
};
exports.getBillFiles = async (req, res) => {
//Givena bill ID, get the documents associated to it.
};
exports.downloadFiles = async (req, res) => {
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
const { jobid, billid, documentids } = req.body;
try {
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobid, { billid, jobid, documentids });
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//Query for the keys of the document IDs
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
//Using the Keys, get all of the S3 links, zip them, and send back to the client.
const s3client = new S3Client({ region: InstanceRegion() });
const archiveStream = archiver("zip");
archiveStream.on("error", (error) => {
console.error("Archival encountered an error:", error);
throw new Error(error);
});
const passthrough = new stream.PassThrough();
archiveStream.pipe(passthrough);
for (const key of data.documents.map((d) => d.key)) {
const response = await s3client.send(new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }));
// :: `response.Body` is a Buffer
console.log(path.basename(key));
archiveStream.append(response.Body, { name: path.basename(key) });
}
archiveStream.finalize();
const archiveKey = `archives/${jobid}/archive-${new Date().toISOString()}.zip`;
const parallelUploads3 = new Upload({
client: s3client,
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passthrough }
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
const uploadResult = await parallelUploads3.done();
//Generate the presigned URL to download it.
const presignedUrl = await getSignedUrl(
s3client,
new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: archiveKey }),
{ expiresIn: 360 }
);
res.json({ success: true, url: presignedUrl });
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
jobid,
billid,
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
}
};
exports.deleteFiles = async (req, res) => {
//Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future.
//Mark as deleted from the documents section of the database.
const { ids } = req.body;
try {
logger.log("imgproxy-delete-files", "DEBUG", req.user.email, null, { ids });
const client = req.userGraphQLClient;
//Do this to make sure that they are only deleting things that they have access to
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: ids });
const s3client = new S3Client({ region: InstanceRegion() });
const deleteTransactions = [];
data.documents.forEach((document) => {
deleteTransactions.push(
(async () => {
try {
// Delete the original object
const deleteResult = await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.key
})
);
return document;
} catch (error) {
return { document, error: error, bucket: imgproxyDestinationBucket };
}
})()
);
});
const result = await Promise.all(deleteTransactions);
const errors = result.filter((d) => d.error);
//Delete only the succesful deletes.
const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, {
ids: result.filter((t) => !t.error).map((d) => d.id)
});
res.json({ errors, deleteMutationResult });
} catch (error) {
logger.log("imgproxy-delete-files-error", "ERROR", req.user.email, null, {
ids,
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
}
};
exports.moveFiles = async (req, res) => {
const { documents, tojobid } = req.body;
try {
logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid });
const s3client = new S3Client({ region: InstanceRegion() });
const moveTransactions = [];
documents.forEach((document) => {
moveTransactions.push(
(async () => {
try {
// Copy the object to the new key
const copyresult = await s3client.send(
new CopyObjectCommand({
Bucket: imgproxyDestinationBucket,
CopySource: `${imgproxyDestinationBucket}/${document.from}`,
Key: document.to,
StorageClass: "INTELLIGENT_TIERING"
})
);
// Delete the original object
const deleteResult = await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.from
})
);
return document;
} catch (error) {
return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket };
}
})()
);
});
const result = await Promise.all(moveTransactions);
const errors = result.filter((d) => d.error);
let mutations = "";
result
.filter((d) => !d.error)
.forEach((d, idx) => {
//Create mutation text
mutations =
mutations +
`
update_doc${idx}:update_documents_by_pk(pk_columns: { id: "${d.id}" }, _set: {key: "${d.to}", jobid: "${tojobid}"}){
id
}
`;
});
const client = req.userGraphQLClient;
if (mutations !== "") {
const mutationResult = await client.request(`mutation {
${mutations}
}`);
res.json({ errors, mutationResult });
} else {
res.json({ errors: "No images were succesfully moved on remote server. " });
}
} catch (error) {
logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, {
documents,
tojobid,
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
}
};
function base64UrlEncode(str) {
return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
}
function createHmacSha256(data) {
return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
}

View File

@@ -11,12 +11,14 @@ require("dotenv").config({
var cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL);
exports.createSignedUploadURL = (req, res) => {
const createSignedUploadURL = (req, res) => {
logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET));
};
exports.downloadFiles = (req, res) => {
exports.createSignedUploadURL = createSignedUploadURL;
const downloadFiles = (req, res) => {
const { ids } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
@@ -26,8 +28,9 @@ exports.downloadFiles = (req, res) => {
});
res.send(url);
};
exports.downloadFiles = downloadFiles;
exports.deleteFiles = async (req, res) => {
const deleteFiles = async (req, res) => {
const { ids } = req.body;
const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
@@ -88,7 +91,9 @@ exports.deleteFiles = async (req, res) => {
}
};
exports.renameKeys = async (req, res) => {
exports.deleteFiles = deleteFiles;
const renameKeys = async (req, res) => {
const { documents, tojobid } = req.body;
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
@@ -146,6 +151,7 @@ exports.renameKeys = async (req, res) => {
res.json({ errors: "No images were succesfully moved on remote server. " });
}
};
exports.renameKeys = renameKeys;
//Also needs to be updated in upload utility and mobile app.
function DetermineFileType(filetype) {

View File

@@ -160,6 +160,11 @@ async function OpenSearchUpdateHandler(req, res) {
res.status(200).json(response.body);
}
} catch (error) {
// We don't want this spam message existing in development/test,
if (process.env?.NODE_ENV !== "production" && error?.message === "Invalid URL") {
return res.status(400).json(JSON.stringify(error));
}
logger.log("os-handler-error", "ERROR", null, null, {
id: req.body.event.data.new.id,
index: req.body.table.name,
@@ -167,6 +172,7 @@ async function OpenSearchUpdateHandler(req, res) {
stack: error.stack
// body: document
});
res.status(400).json(JSON.stringify(error));
}
}

View File

@@ -1,12 +1,18 @@
const express = require("express");
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { lightbox_credentials, payment_refund, generate_payment_url, postback, checkfee } = require("../intellipay/intellipay");
const {
lightboxCredentials,
paymentRefund,
generatePaymentUrl,
postBack,
checkFee
} = require("../intellipay/intellipay");
router.post("/lightbox_credentials", validateFirebaseIdTokenMiddleware, lightbox_credentials);
router.post("/payment_refund", validateFirebaseIdTokenMiddleware, payment_refund);
router.post("/generate_payment_url", validateFirebaseIdTokenMiddleware, generate_payment_url);
router.post("/checkfee", validateFirebaseIdTokenMiddleware, checkfee);
router.post("/postback", postback);
router.post("/lightbox_credentials", validateFirebaseIdTokenMiddleware, lightboxCredentials);
router.post("/payment_refund", validateFirebaseIdTokenMiddleware, paymentRefund);
router.post("/generate_payment_url", validateFirebaseIdTokenMiddleware, generatePaymentUrl);
router.post("/checkfee", validateFirebaseIdTokenMiddleware, checkFee);
router.post("/postback", postBack);
module.exports = router;

View File

@@ -7,6 +7,7 @@ const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMI
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { totals, statustransition, totalsSsu, costing, lifecycle, costingmulti, jobUpdated } = require("../job/job");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const jobTotalsRecorder = require("../job/job-totals-recorder");
router.post("/totals", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totals);
router.post("/statustransition", eventAuthorizationMiddleware, statustransition);
@@ -17,5 +18,6 @@ router.post("/costingmulti", validateFirebaseIdTokenMiddleware, withUserGraphQLC
router.post("/partsscan", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, partsScan);
router.post("/ppc", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, ppc.generatePpc);
router.post("/job-updated", eventAuthorizationMiddleware, jobUpdated);
router.post("/totals-recorder", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, jobTotalsRecorder);
module.exports = router;

View File

@@ -1,13 +1,28 @@
const express = require("express");
const router = express.Router();
const { createSignedUploadURL, downloadFiles, renameKeys, deleteFiles } = require("../media/media");
const {
generateSignedUploadUrls: createSignedUploadURLImgproxy,
getThumbnailUrls: getThumbnailUrlsImgproxy,
downloadFiles: downloadFilesImgproxy,
moveFiles: moveFilesImgproxy,
deleteFiles: deleteFilesImgproxy
} = require("../media/imgproxy-media");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.use(withUserGraphQLClientMiddleware);
router.post("/sign", createSignedUploadURL);
router.post("/download", downloadFiles);
router.post("/rename", renameKeys);
router.post("/delete", deleteFiles);
router.post("/imgproxy/sign", createSignedUploadURLImgproxy);
router.post("/imgproxy/thumbnails", getThumbnailUrlsImgproxy);
router.post("/imgproxy/download", downloadFilesImgproxy);
router.post("/imgproxy/rename", moveFilesImgproxy);
router.post("/imgproxy/delete", deleteFilesImgproxy);
module.exports = router;

View File

@@ -14,7 +14,7 @@ const { taskAssignedEmail, tasksRemindEmail } = require("../email/tasksEmails");
const { canvastest } = require("../render/canvas-handler");
const { alertCheck } = require("../alerts/alertcheck");
const updateBodyshopCache = require("../web-sockets/updateBodyshopCache");
const uuid = require("uuid").v4;
const { v4 } = require("uuid");
//Test route to ensure Express is responding.
router.get("/test", eventAuthorizationMiddleware, async function (req, res) {
@@ -83,7 +83,7 @@ router.get("/wstest", eventAuthorizationMiddleware, (req, res) => {
// image_path: [],
newMessage: {
conversation: {
id: uuid(),
id: v4(),
archived: false,
bodyshop: {
id: "bfec8c8c-b7f1-49e0-be4c-524455f4e582",

View File

@@ -7,6 +7,7 @@ const { status, markConversationRead } = require("../sms/status");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Twilio Webhook Middleware for production
// TODO: Look into this because it technically is never validating anything
const twilioWebhookMiddleware = twilio.webhook({ validate: process.env.NODE_ENV === "PRODUCTION" });
router.post("/receive", twilioWebhookMiddleware, receive);

14
server/tests/api.test.js Normal file
View File

@@ -0,0 +1,14 @@
import { describe, it, expect } from "vitest";
import request from "supertest";
import express from "express";
const app = express();
app.get("/api/health", (req, res) => res.json({ status: "ok" }));
describe("API", () => {
it("returns health status", async () => {
const response = await request(app).get("/api/health");
expect(response.status).toBe(200);
expect(response.body).toEqual({ status: "ok" });
});
});

11
server/tests/math.test.js Normal file
View File

@@ -0,0 +1,11 @@
import { describe, it, expect } from "vitest";
function add(a, b) {
return a + b;
}
describe("Math", () => {
it("adds two numbers correctly", () => {
expect(add(2, 3)).toBe(5);
});
});