Merge remote-tracking branch 'origin/master-AIO' into feature/IO-2776-cdk-fortellis

# Conflicts:
#	client/src/components/dms-post-form/dms-post-form.component.jsx
#	package-lock.json
#	package.json
#	server/web-sockets/redisSocketEvents.js
This commit is contained in:
Dave
2025-08-21 11:05:03 -04:00
345 changed files with 23812 additions and 16731 deletions

View File

@@ -217,7 +217,7 @@ exports.PbsExportAp = async function (socket, { billids, txEnvelope }) {
socket.emit("ap-export-success", billid);
} else {
CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`);
CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`);
socket.emit("ap-export-failure", {
billid,
error: AccountPostingChange.Message

View File

@@ -105,14 +105,14 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte
socket.emit("export-success", socket.JobData.id);
} else {
CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`);
CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`);
}
} catch (error) {
CdkBase.createLogEvent(socket, "ERROR", `Error encountered in CdkSelectedCustomer. ${error}`);
await InsertFailedExportLog(socket, error);
}
};
// Was Successful
async function CheckForErrors(socket, response) {
if (response.WasSuccessful === undefined || response.WasSuccessful === true) {
CdkBase.createLogEvent(socket, "DEBUG", `Successful response from DMS. ${response.Message || ""}`);

View File

@@ -14,10 +14,8 @@ const oauthClient = new OAuthClient({
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
});
//TODO:AIO Add in QBO callbacks.
const url = InstanceEndpoints();
exports.default = async (req, res) => {

View File

@@ -20,7 +20,6 @@ exports.default = async (req, res) => {
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
});
try {
@@ -149,6 +148,15 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryVendorRecord",
billid: bill.id,
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -178,6 +186,15 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
},
body: JSON.stringify(Vendor)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertVendorRecord",
billid: bill.id,
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Vendor;
} catch (error) {
@@ -190,7 +207,7 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
}
async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop) {
const { accounts, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req);
const { accounts, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req, bill.job.shopid);
const lines = bill.billlines.map((il) =>
generateBillLine(
@@ -246,11 +263,11 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DocNumber: bill.invoice_number,
//...(bill.job.class ? { ClassRef: { Id: classes[bill.job.class] } } : {}),
...(!(
@@ -263,8 +280,8 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
: {}),
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
PrivateNote: `RO ${bill.job.ro_number || ""}`,
Line: lines
};
@@ -280,6 +297,15 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
},
body: JSON.stringify(billQbo)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertBill",
billid: bill.id,
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Bill;
} catch (error) {
@@ -327,8 +353,8 @@ const generateBillLine = (
accountingconfig.qbo && accountingconfig.qbo_usa && region_config.includes("CA_")
? {}
: {
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
AccountRef: {
value: accounts[account.accountname]
}
@@ -342,7 +368,7 @@ const generateBillLine = (
};
};
async function QueryMetaData(oauthClient, qbo_realmId, req) {
async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
const accounts = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
@@ -354,6 +380,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryAccountType",
status: accounts.response?.status,
bodyshopid,
email: req.user.email
})
setNewRefreshToken(req.user.email, accounts);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
@@ -362,7 +396,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryTaxCode",
status: taxCodes.status,
bodyshopid,
email: req.user.email
})
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
method: "POST",
@@ -370,7 +411,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryClasses",
status: classes.status,
bodyshopid,
email: req.user.email
})
const taxCodeMapping = {};
taxCodes.json &&

View File

@@ -10,7 +10,6 @@ const queries = require("../../graphql-client/queries");
const { refresh: refreshOauthToken, setNewRefreshToken } = require("./qbo-callback");
const OAuthClient = require("intuit-oauth");
const moment = require("moment-timezone");
const GraphQLClient = require("graphql-request").GraphQLClient;
const {
QueryInsuranceCo,
InsertInsuranceCo,
@@ -28,8 +27,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
//Fetch the API Access Tokens & Set them for the session.
@@ -132,22 +130,20 @@ exports.default = async (req, res) => {
// //No error. Mark the payment exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QBO_MARK_PAYMENT_EXPORTED, {
paymentId: payment.id,
payment: {
exportedat: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
paymentid: payment.id,
successful: true,
useremail: req.user.email
}
]
});
await client.setHeaders({ Authorization: BearerToken }).request(queries.QBO_MARK_PAYMENT_EXPORTED, {
paymentId: payment.id,
payment: {
exportedat: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
paymentid: payment.id,
successful: true,
useremail: req.user.email
}
]
});
}
ret.push({ paymentid: payment.id, success: true });
@@ -157,7 +153,7 @@ exports.default = async (req, res) => {
});
//Add the export log error.
if (elgen) {
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
@@ -191,14 +187,15 @@ exports.default = async (req, res) => {
}
};
async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef, bodyshop) {
async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef) {
const { paymentMethods, invoices } = await QueryMetaData(
oauthClient,
qbo_realmId,
req,
payment.job.ro_number,
false,
parentRef
parentRef,
payment.job.shopid
);
if (invoices && invoices.length !== 1) {
@@ -255,6 +252,15 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef,
},
body: JSON.stringify(paymentQbo)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertPayment",
paymentid: payment.id,
status: result.response?.status,
bodyshopid: payment.job.shopid,
email: req.user.email
});
setNewRefreshToken(req.user.email, result);
return result && result.Bill;
} catch (error) {
@@ -266,7 +272,7 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef,
}
}
async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditMemo, parentTierRef) {
async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditMemo, parentTierRef, bodyshopid) {
const invoice = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Invoice where DocNumber like '${ro_number}%'`),
method: "POST",
@@ -274,7 +280,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryInvoice",
status: invoice.response?.status,
bodyshopid,
email: req.user.email
});
const paymentMethods = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From PaymentMethod`),
method: "POST",
@@ -282,6 +296,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryPaymentMethod",
status: paymentMethods.response?.status,
bodyshopid,
email: req.user.email
});
setNewRefreshToken(req.user.email, paymentMethods);
// const classes = await oauthClient.makeApiCall({
@@ -325,6 +347,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryTaxCode",
status: taxCodes.response?.status,
bodyshopid,
email: req.user.email
});
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item`),
method: "POST",
@@ -332,6 +363,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryItems",
status: items.response?.status,
bodyshopid,
email: req.user.email
});
setNewRefreshToken(req.user.email, items);
const itemMapping = {};
@@ -370,14 +409,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
};
}
async function InsertCreditMemo(oauthClient, qbo_realmId, req, payment, parentRef, bodyshop) {
const { paymentMethods, invoices, items, taxCodes } = await QueryMetaData(
async function InsertCreditMemo(oauthClient, qbo_realmId, req, payment, parentRef) {
const { invoices, items, taxCodes } = await QueryMetaData(
oauthClient,
qbo_realmId,
req,
payment.job.ro_number,
true,
parentRef
parentRef,
payment.job.shopid
);
if (invoices && invoices.length !== 1) {
@@ -432,11 +472,22 @@ async function InsertCreditMemo(oauthClient, qbo_realmId, req, payment, parentRe
},
body: JSON.stringify(paymentQbo)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertCreditMemo",
paymentid: payment.id,
status: result.response?.status,
bodyshopid: req.user.bodyshopid,
email: req.user.email
});
setNewRefreshToken(req.user.email, result);
return result && result.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, payment.id, {
error: error && error.message,
error: error,
validationError: JSON.stringify(error?.response?.data),
accountmeta: JSON.stringify({ items, taxCodes }),
method: "InsertCreditMemo"
});
throw error;

View File

@@ -22,8 +22,8 @@ exports.default = async (req, res) => {
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
});
try {
//Fetch the API Access Tokens & Set them for the session.
const response = await apiGqlClient.request(queries.GET_QBO_AUTH, {
@@ -233,6 +233,15 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryCustomer",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -279,6 +288,15 @@ async function InsertInsuranceCo(oauthClient, qbo_realmId, req, job, bodyshop) {
},
body: JSON.stringify(Customer)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertCustomer",
status: result.response.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -305,6 +323,15 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryCustomer",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -331,11 +358,11 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
...(job.ownr_ea ? { PrimaryEmailAddr: { Address: job.ownr_ea.trim() } } : {}),
...(isThreeTier
? {
Job: true,
ParentRef: {
value: parentTierRef.Id
}
Job: true,
ParentRef: {
value: parentTierRef.Id
}
}
: {})
};
try {
@@ -347,6 +374,15 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
},
body: JSON.stringify(Customer)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertCustomer",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -372,6 +408,15 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryCustomer",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -411,6 +456,15 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
},
body: JSON.stringify(Customer)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertCustomer",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -424,7 +478,7 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
exports.InsertJob = InsertJob;
async function QueryMetaData(oauthClient, qbo_realmId, req) {
async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item where active=true maxresults 1000`),
method: "POST",
@@ -432,6 +486,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryItems",
status: items.response?.status,
bodyshopid,
jobid: jobid,
email: req.user.email
})
setNewRefreshToken(req.user.email, items);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode where active=true`),
@@ -440,7 +503,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryTaxCodes",
status: taxCodes.response?.status,
bodyshopid,
jobid: jobid,
email: req.user.email
})
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
method: "POST",
@@ -448,7 +519,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
"Content-Type": "application/json"
}
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "QueryClasses",
status: classes.response?.status,
bodyshopid,
jobid: jobid,
email: req.user.email
})
const taxCodeMapping = {};
taxCodes.json &&
@@ -483,7 +562,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req) {
}
async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, parentTierRef) {
const { items, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req);
const { items, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req, job.shopid, job.id);
const InvoiceLineAdd = CreateInvoiceLines({
bodyshop,
jobs_by_pk: job,
@@ -499,57 +578,55 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
DocNumber: job.ro_number,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}),
}
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -575,6 +652,15 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
},
body: JSON.stringify(invoiceObj)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertInvoice",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {
@@ -598,7 +684,7 @@ async function InsertInvoiceMultiPayerInvoice(
payer,
suffix
) {
const { items, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req);
const { items, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req, job.shopid);
const InvoiceLineAdd = createMultiQbPayerLines({
bodyshop,
jobs_by_pk: job,
@@ -616,58 +702,56 @@ async function InsertInvoiceMultiPayerInvoice(
DocNumber: job.ro_number + suffix,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_") && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}),
}
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -693,6 +777,15 @@ async function InsertInvoiceMultiPayerInvoice(
},
body: JSON.stringify(invoiceObj)
});
logger.LogIntegrationCall({
platform: "QBO",
method: "POST",
name: "InsertInvoice",
status: result.response?.status,
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {

View File

@@ -6,7 +6,7 @@ require("dotenv").config({
function urlBuilder(realmId, object, query = null) {
return `https://${
process.env.NODE_ENV === "production" ? "" : "sandbox-"
}quickbooks.api.intuit.com/v3/company/${realmId}/${object}${query ? `?query=${encodeURIComponent(query)}` : ""}`;
}quickbooks.api.intuit.com/v3/company/${realmId}/${object}?minorversion=75${query ? `&query=${encodeURIComponent(query)}` : ""}`;
}
function StandardizeName(str) {

View File

@@ -68,7 +68,7 @@ exports.default = async (req, res) => {
return;
}
if (process.env.NODE_ENV === "PRODUCTION") {
if (process.env.NODE_ENV === "production") {
res.sendStatus(200);
return;
}

View File

@@ -3,7 +3,6 @@ const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const _ = require("lodash");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
@@ -16,6 +15,7 @@ const { sendServerEmail } = require("../email/sendemail");
const AHDineroFormat = "0.00";
const AhDateFormat = "MMDDYYYY";
const NON_ASCII_REGEX = /[^\x20-\x7E]/g;
const repairOpCodes = ["OP4", "OP9", "OP10"];
const replaceOpCodes = ["OP2", "OP5", "OP11", "OP12"];
@@ -37,13 +37,11 @@ const ftpSetup = {
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
return res.sendStatus(403);
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
return res.sendStatus(401);
}
// Send immediate response and continue processing.
@@ -822,7 +820,7 @@ const GenerateDetailLines = (job, line, statuses) => {
BackOrdered: line.status === statuses.default_bo ? "1" : "0",
Cost: (line.billlines[0] && (line.billlines[0].actual_cost * line.billlines[0].quantity).toFixed(2)) || 0,
//Critical: null,
Description: line.line_desc ? line.line_desc.replace(/[^\x00-\x7F]/g, "") : "",
Description: line.line_desc ? line.line_desc.replace(NON_ASCII_REGEX, "") : "",
DiscountMarkup: line.prt_dsmk_m || 0,
InvoiceNumber: line.billlines[0] && line.billlines[0].bill.invoice_number,
IOUPart: 0,
@@ -834,7 +832,7 @@ const GenerateDetailLines = (job, line, statuses) => {
OriginalCost: null,
OriginalInvoiceNumber: null,
PriceEach: line.act_price || 0,
PartNumber: line.oem_partno ? line.oem_partno.replace(/[^\x00-\x7F]/g, "") : "",
PartNumber: line.oem_partno ? line.oem_partno.replace(NON_ASCII_REGEX, "") : "",
ProfitPercent: null,
PurchaseOrderNumber: null,
Qty: line.part_qty || 0,

408
server/data/carfax.js Normal file
View File

@@ -0,0 +1,408 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const InstanceManager = require("../utils/instanceMgr").default;
const { isString, isEmpty } = require("lodash");
const fs = require("fs");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const { uploadFileToS3 } = require("../utils/s3");
const crypto = require("crypto");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const AHDateFormat = "YYYY-MM-DD";
const NON_ASCII_REGEX = /[^\x20-\x7E]/g;
const ftpSetup = {
host: process.env.CARFAX_HOST,
port: process.env.CARFAX_PORT,
username: process.env.CARFAX_USER,
password: process.env.CARFAX_PASSWORD,
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const S3_BUCKET_NAME = InstanceManager({
imex: "imex-carfax-uploads",
rome: "rome-carfax-uploads"
});
const region = InstanceManager.InstanceRegion;
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
const uploadToS3 = (jsonObj) => {
const webPath = isLocal
? `https://${S3_BUCKET_NAME}.s3.localhost.localstack.cloud:4566/${jsonObj.filename}`
: `https://${S3_BUCKET_NAME}.s3.${region}.amazonaws.com/${jsonObj.filename}`;
uploadFileToS3({ bucketName: S3_BUCKET_NAME, key: jsonObj.filename, content: jsonObj.json })
.then(() => {
logger.log("CARFAX-s3-upload", "DEBUG", "api", jsonObj.bodyshopid, {
imexshopid: jsonObj.imexshopid,
filename: jsonObj.filename,
webPath
});
})
.catch((error) => {
logger.log("CARFAX-s3-upload-error", "ERROR", "api", jsonObj.bodyshopid, {
imexshopid: jsonObj.imexshopid,
filename: jsonObj.filename,
webPath,
error: error.message,
stack: error.stack
});
});
};
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
return res.sendStatus(403);
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
return res.sendStatus(401);
}
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
logger.log("CARFAX-start", "DEBUG", "api", null, null);
const allXMLResults = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_CARFAX_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload, ignoreDateFilter } = req.body; //YYYY-MM-DD
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("CARFAX-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("CARFAX-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
await processShopData(shopsToProcess, start, end, skipUpload, ignoreDateFilter, allXMLResults, allErrors);
await sendServerEmail({
subject: `CARFAX Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allXMLResults.map((x) => ({
imexshopid: x.imexshopid,
filename: x.filename,
count: x.count,
result: x.result
})),
null,
2
)}`
});
logger.log("CARFAX-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("CARFAX-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processShopData(shopsToProcess, start, end, skipUpload, ignoreDateFilter, allXMLResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const shopid = bodyshop.imexshopid?.toLowerCase() || bodyshop.shopname.replace(/[^a-zA-Z0-9]/g, "").toLowerCase()
const erroredJobs = [];
try {
logger.log("CARFAX-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const { jobs, bodyshops_by_pk } = await client.request(queries.CARFAX_QUERY, {
bodyshopid: bodyshop.id,
...(ignoreDateFilter
? {}
: {
start: start ? moment(start).startOf("day") : moment().subtract(7, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
})
});
const carfaxObject = {
shopid: shopid,
shop_name: bodyshop.shopname,
job: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
};
if (erroredJobs.length > 0) {
logger.log("CARFAX-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
const jsonObj = {
bodyshopid: bodyshop.id,
imexshopid: shopid,
json: JSON.stringify(carfaxObject, null, 2),
filename: `${shopid}_${moment().format("DDMMYYYY_HHMMss")}.json`,
count: carfaxObject.job.length
};
if (skipUpload) {
fs.writeFileSync(`./logs/${jsonObj.filename}`, jsonObj.json);
} else {
await uploadViaSFTP(jsonObj);
}
allXMLResults.push({
bodyshopid: bodyshop.id,
imexshopid: shopid,
count: jsonObj.count,
filename: jsonObj.filename,
result: jsonObj.result
});
logger.log("CARFAX-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("CARFAX-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: shopid,
CARFAXid: bodyshop.CARFAXid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: shopid,
CARFAXid: bodyshop.CARFAXid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
}
async function uploadViaSFTP(jsonObj) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("CARFAX-sftp-connection-error", "ERROR", "api", jsonObj.bodyshopid, {
error: errors.message,
stack: errors.stack
})
);
try {
// Upload to S3 first.
uploadToS3(jsonObj);
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
try {
jsonObj.result = await sftp.put(Buffer.from(jsonObj.json), `${jsonObj.filename}`);
logger.log("CARFAX-sftp-upload", "DEBUG", "api", jsonObj.bodyshopid, {
imexshopid: jsonObj.imexshopid,
filename: jsonObj.filename,
result: jsonObj.result
});
} catch (error) {
logger.log("CARFAX-sftp-upload-error", "ERROR", "api", jsonObj.bodyshopid, {
filename: jsonObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
} catch (error) {
logger.log("CARFAX-sftp-error", "ERROR", "api", jsonObj.bodyshopid, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
if (!job.job_totals) {
errorCallback({
jobid: job.id,
job: job,
ro_number: job.ro_number,
error: { toString: () => "No job totals for RO." }
});
return {};
}
try {
const ret = {
ro_number: crypto.createHash("md5").update(job.ro_number, "utf8").digest("hex"),
v_vin: job.v_vin || "",
v_year: job.v_model_yr
? parseInt(job.v_model_yr.match(/\d/g))
? parseInt(job.v_model_yr.match(/\d/g).join(""), 10)
: ""
: "",
v_make: job.v_make_desc || "",
v_model: job.v_model_desc || "",
date_estimated:
(job.date_estimated && moment(job.date_estimated).tz(job.bodyshop.timezone).format(AHDateFormat)) ||
(job.created_at && moment(job.created_at).tz(job.bodyshop.timezone).format(AHDateFormat)) ||
"",
data_opened:
(job.date_open && moment(job.date_open).tz(job.bodyshop.timezone).format(AHDateFormat)) ||
(job.created_at && moment(job.created_at).tz(job.bodyshop.timezone).format(AHDateFormat)) ||
"",
date_invoiced:
(job.date_invoiced && moment(job.date_invoiced).tz(job.bodyshop.timezone).format(AHDateFormat)) || "",
loss_date: (job.loss_date && moment(job.loss_date).format(AHDateFormat)) || "",
ins_co_nm: job.ins_co_nm || "",
loss_desc: job.loss_desc || "",
theft_ind: job.theft_ind,
tloss_ind: job.tlos_ind,
subtotal: Dinero(job.job_totals.totals.subtotal).toUnit(),
areaofdamage: {
impact1: generateAreaOfDamage(job.area_of_damage?.impact1 || ""),
impact2: generateAreaOfDamage(job.area_of_damage?.impact2 || "")
},
jobLines: job.joblines.length > 0 ? job.joblines.map((jl) => GenerateDetailLines(jl)) : [generateNullDetailLine()]
};
return ret;
} catch (error) {
logger.log("CARFAX-job-data-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};
const GenerateDetailLines = (line) => {
const ret = {
line_desc: line.line_desc ? line.line_desc.replace(NON_ASCII_REGEX, "") : null,
oem_partno: line.oem_partno ? line.oem_partno.replace(NON_ASCII_REGEX, "") : null,
alt_partno: line.alt_partno ? line.alt_partno.replace(NON_ASCII_REGEX, "") : null,
lbr_ty: generateLaborType(line.mod_lbr_ty),
part_qty: line.part_qty || 0,
part_type: generatePartType(line.part_type),
act_price: line.act_price || 0
};
return ret;
};
const generateNullDetailLine = () => {
return {
line_desc: null,
oem_partno: null,
alt_partno: null,
lbr_ty: null,
part_qty: 0,
part_type: null,
act_price: 0
};
};
const generateAreaOfDamage = (loc) => {
const areaMap = {
"01": "Right Front Corner",
"02": "Right Front Side",
"03": "Right Side",
"04": "Right Rear Side",
"05": "Right Rear Corner",
"06": "Rear",
"07": "Left Rear Corner",
"08": "Left Rear Side",
"09": "Left Side",
10: "Left Front Side",
11: "Left Front Corner",
12: "Front",
13: "Rollover",
14: "Uknown",
15: "Total Loss",
16: "Non-Collision",
19: "All Over",
25: "Hood",
26: "Deck Lid",
27: "Roof",
28: "Undercarriage",
34: "All Over"
};
return areaMap[loc] || null;
};
const generateLaborType = (type) => {
const laborTypeMap = {
laa: "Aluminum",
lab: "Body",
lad: "Diagnostic",
lae: "Electrical",
laf: "Frame",
lag: "Glass",
lam: "Mechanical",
lar: "Refinish",
las: "Structural",
lau: "Other - LAU",
la1: "Other - LA1",
la2: "Other - LA2",
la3: "Other - LA3",
la4: "Other - LA4",
null: "Other",
mapa: "Paint Materials",
mash: "Shop Materials",
rates_subtotal: "Labor Total",
"timetickets.labels.shift": "Shift",
"timetickets.labels.amshift": "Morning Shift",
"timetickets.labels.ambreak": "Morning Break",
"timetickets.labels.pmshift": "Afternoon Shift",
"timetickets.labels.pmbreak": "Afternoon Break",
"timetickets.labels.lunch": "Lunch"
};
return laborTypeMap[type?.toLowerCase()] || null;
};
const generatePartType = (type) => {
const partTypeMap = {
paa: "Aftermarket",
pae: "Existing",
pag: "Glass",
pal: "LKQ",
pan: "OEM",
pao: "Other",
pas: "Sublet",
pasl: "Sublet",
ccc: "CC Cleaning",
ccd: "CC Damage Waiver",
ccdr: "CC Daily Rate",
ccf: "CC Refuel",
ccm: "CC Mileage",
prt_dsmk_total: "Line Item Adjustment"
};
return partTypeMap[type?.toLowerCase()] || null;
};

View File

@@ -2,7 +2,6 @@ const path = require("path");
const queries = require("../graphql-client/queries");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const _ = require("lodash");
const logger = require("../utils/logger");
const fs = require("fs");
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
@@ -29,13 +28,11 @@ const ftpSetup = {
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
return res.sendStatus(403);
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
return res.sendStatus(401);
}
// Send immediate response and continue processing.

View File

@@ -3,7 +3,6 @@ const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const _ = require("lodash");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
@@ -36,13 +35,11 @@ const ftpSetup = {
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
return res.sendStatus(403);
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
return res.sendStatus(401);
}
// Send immediate response and continue processing.

View File

@@ -3,4 +3,7 @@ exports.autohouse = require("./autohouse").default;
exports.chatter = require("./chatter").default;
exports.claimscorp = require("./claimscorp").default;
exports.kaizen = require("./kaizen").default;
exports.usageReport = require("./usageReport").default;
exports.usageReport = require("./usageReport").default;
exports.podium = require("./podium").default;
exports.emsUpload = require("./emsUpload").default;
exports.carfax = require("./carfax").default;

22
server/data/emsUpload.js Normal file
View File

@@ -0,0 +1,22 @@
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const s3Client = require("../utils/s3"); // Using the S3 client utilities with LocalStack support
const emsUpload = async (req, res) => {
try {
const { bodyshopid, ciecaid, clm_no, ownr_ln } = req.body;
const presignedUrl = await s3Client.getPresignedUrl({
bucketName: process.env.S3_EMS_UPLOAD_BUCKET,
key: `${bodyshopid}/${ciecaid}-${clm_no}-${ownr_ln}-${moment().format("YYYY-MM-DD--HH-mm-ss")}.zip`
});
res.status(200).json({ presignedUrl });
} catch (error) {
logger.log("ems-upload-presign-error", "ERROR", req?.user?.email, null, {
error: error.message,
stack: error.stack
});
res.status(500).json({ error: error.message, stack: error.stack });
}
};
exports.default = emsUpload;

View File

@@ -3,7 +3,6 @@ const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const _ = require("lodash");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
@@ -35,13 +34,11 @@ const ftpSetup = {
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
return res.sendStatus(403);
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
return res.sendStatus(401);
}
// Send immediate response and continue processing.

209
server/data/podium.js Normal file
View File

@@ -0,0 +1,209 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const ftpSetup = {
host: process.env.PODIUM_HOST,
port: process.env.PODIUM_PORT,
username: process.env.PODIUM_USER,
password: process.env.PODIUM_PASSWORD,
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
return res.sendStatus(403);
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
return res.sendStatus(401);
}
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
logger.log("podium-start", "DEBUG", "api", null, null);
const allCSVResults = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_PODIUM_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("podium-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("podium-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
await processShopData(shopsToProcess, start, end, skipUpload, allCSVResults, allErrors);
await sendServerEmail({
subject: `Podium Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allCSVResults.map((x) => ({
imexshopid: x.imexshopid,
filename: x.filename,
count: x.count,
result: x.result
})),
null,
2
)}`
});
logger.log("podium-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("podium-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processShopData(shopsToProcess, start, end, skipUpload, allCSVResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const erroredJobs = [];
try {
logger.log("podium-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const { jobs, bodyshops_by_pk } = await client.request(queries.PODIUM_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(2, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const podiumObject = jobs.map((j) => {
return {
"Podium Account ID": bodyshops_by_pk.podiumid,
"First Name": j.ownr_co_nm ? null : j.ownr_fn,
"Last Name": j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
"SMS Number": null,
"Phone 1": j.ownr_ph1,
"Phone 2": j.ownr_ph2,
Email: j.ownr_ea,
"Delivered Date":
(j.actual_delivery && moment(j.actual_delivery).tz(bodyshop.timezone).format("MM/DD/YYYY")) || ""
};
});
if (erroredJobs.length > 0) {
logger.log("podium-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
const csvObj = {
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
csv: converter.json2csv(podiumObject, { emptyFieldValue: "" }),
filename: `${bodyshop.podiumid}-${moment().format("YYYYMMDDTHHMMss")}.csv`,
count: podiumObject.length
};
if (skipUpload) {
fs.writeFileSync(`./logs/${csvObj.filename}`, csvObj.csv);
} else {
await uploadViaSFTP(csvObj);
}
allCSVResults.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
podiumid: bodyshop.podiumid,
count: csvObj.count,
filename: csvObj.filename,
result: csvObj.result
});
logger.log("podium-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("podium-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
podiumid: bodyshop.podiumid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
podiumid: bodyshop.podiumid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
}
async function uploadViaSFTP(csvObj) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("podium-sftp-connection-error", "ERROR", "api", csvObj.bodyshopid, {
error: errors.message,
stack: errors.stack
})
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
try {
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("podium-sftp-upload", "DEBUG", "api", csvObj.bodyshopid, {
imexshopid: csvObj.imexshopid,
filename: csvObj.filename,
result: csvObj.result
});
} catch (error) {
logger.log("podium-sftp-upload-error", "ERROR", "api", csvObj.bodyshopid, {
filename: csvObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
} catch (error) {
logger.log("podium-sftp-error", "ERROR", "api", csvObj.bodyshopid, {
error: error.message,
stack: error.stack
});
throw error;
} finally {
sftp.end();
}
}

View File

@@ -35,7 +35,7 @@ exports.default = async (req, res) => {
//Query the usage data.
const queryResults = await client.request(queries.STATUS_UPDATE, {
today: moment().startOf("day").subtract(7, "days"),
period: moment().subtract(90, "days").startOf("day")
period: moment().subtract(365, "days").startOf("day")
});
//Massage the data.
@@ -66,7 +66,7 @@ exports.default = async (req, res) => {
Usage Report for ${moment().format("MM/DD/YYYY")} for Rome Online Customers.
Notes:
- Days Since Creation: The number of days since the shop was created. Only shops created in the last 90 days are included.
- Days Since Creation: The number of days since the shop was created. Only shops created in the last 365 days are included.
- Updated values should be higher than created values.
- Counts are inclusive of the last 7 days of data.
`,

View File

@@ -1,5 +1,3 @@
const moment = require("moment");
const { default: RenderInstanceManager } = require("../utils/instanceMgr");
const { header, end, start } = require("./html");
// Required Strings
@@ -7,19 +5,6 @@ const { header, end, start } = require("./html");
// - subHeader - The subheader of the email
// - body - The body of the email
// Optional Strings (Have default values)
// - footer - The footer of the email
// - dateLine - The date line of the email
const defaultFooter = () => {
return RenderInstanceManager({
imex: "ImEX Online Collision Repair Management System",
rome: "Rome Technologies"
});
};
const now = () => moment().format("MM/DD/YYYY @ hh:mm a");
/**
* Generate the email template
* @param strings
@@ -32,81 +17,48 @@ const generateEmailTemplate = (strings) => {
header +
start +
`
<table class="row">
<tbody>
<tr>
<th class="small-12 large-12 columns first last">
<table>
<tbody>
<tr>
<td>
<h6 style="text-align:left"><strong>${strings.header}</strong></h6>
</td>
</tr>
<tr>
<td>
<p style="font-size:90%">${strings.subHeader}</p>
</td>
</tr>
</tbody>
</table>
</th>
</tr>
</tbody>
</table>
<!-- Report Title -->
${
strings.header &&
`
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 8px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<h6 style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; color: inherit; word-wrap: normal; font-weight: normal; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 23px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; text-align: center;"><strong style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">${strings.header}</strong></h6>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
`
}
${
strings.subHeader &&
`
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 95%;">${strings.subHeader}</p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
`
}
<!-- End Report Title -->
<!-- Task Detail -->
<table class="row">
<tbody>
<tr>
<th class="small-12 large-12 columns first last">
<table>
<tbody>
<tr>
<td>${strings.body}</td>
</tr>
</tbody>
</table>
</th>
</tr>
</tbody>
</table>
<!-- End Task Detail -->
<!-- Footer -->
<table class="row collapsed footer" id="non-printable">
<tbody>
<tr>
<th class="small-3 large-3 columns first">
<table>
<tbody>
<tr>
<td><p style="font-size:70%; padding-right:10px">${strings?.dateLine || now()}</p></td>
</tr>
</tbody>
</table>
</th>
<th class="small-6 large-6 columns">
<table>
<tbody>
<tr>
<td><p style="font-size:70%; text-align:center">${strings?.footer || defaultFooter()}</p></td>
</tr>
</tbody>
</table>
</th>
<th class="small-3 large-3 columns last">
<table>
<tbody>
<tr>
<td><p style="font-size:70%">&nbsp;</p></td>
</tr>
</tbody>
</table>
</th>
</tr>
</tbody>
</table>` +
end
${
strings.body &&
`
<!-- Report Detail -->
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
${strings.body}
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<!-- End Report Detail -->
`
}
` +
end(strings.dateLine)
);
};

File diff suppressed because it is too large Load Diff

View File

@@ -40,7 +40,9 @@ const logEmail = async (req, email) => {
to: req?.body?.to,
cc: req?.body?.cc,
subject: req?.body?.subject,
email
email,
errorMessage: error?.message,
errorStack: error?.stack
// info,
});
}
@@ -68,6 +70,7 @@ const sendServerEmail = async ({ subject, text }) => {
]
}
},
// eslint-disable-next-line no-unused-vars
(err, info) => {
logger.log("server-email-failure", err ? "error" : "debug", null, null, {
message: err?.message,
@@ -80,6 +83,108 @@ const sendServerEmail = async ({ subject, text }) => {
}
};
const sendWelcomeEmail = async ({ to, resetLink, dateLine, features, bcc }) => {
try {
await mailer.sendMail({
from: InstanceManager({
imex: `ImEX Online <noreply@imex.online>`,
rome: `Rome Online <noreply@romeonline.io>`
}),
to,
bcc,
subject: InstanceManager({
imex: "Welcome to the ImEX Online platform.",
rome: "Welcome to the Rome Online platform."
}),
html: generateEmailTemplate({
header: InstanceManager({
imex: "Welcome to the ImEX Online platform.",
rome: "Welcome to the Rome Online platform."
}),
subHeader: `Your ${InstanceManager({imex: features?.allAccess ? "ImEX Online": "ImEX Lite", rome: features?.allAccess ? "RO Manager" : "RO Basic"})} shop setup has been completed, and this email will include all the information you need to begin.`,
body: `
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">To finish setting up your account, visit this link and enter your desired password. <a href=${resetLink} style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">Reset Password</a></p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">To access your ${InstanceManager({imex: features.allAccess ? "ImEX Online": "ImEX Lite", rome: features.allAccess ? "RO Manager" : "RO Basic"})} shop, visit <a href=${InstanceManager({imex: "https://imex.online/", rome: "https://romeonline.io/"})} style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${InstanceManager({imex: "imex.online", rome: "romeonline.io"})}</a>. Your username is your email, and your password is what you previously set up. Contact support for additional logins.</p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
${InstanceManager({
rome: `
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">To push estimates over from your estimating system, you must download the Web-Est EMS Unzipper & Rome Online Partner (Computers using Windows only). Here are some steps to help you get started.</p>
</td><tr>
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<ul style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 1%; padding-left: 30px;">
<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">Download and set up the Web-Est EMS Unzipper - <a href="https://help.imex.online/en/article/how-to-set-up-the-ems-unzip-downloader-on-web-est-n9hbcv/" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">How to setup the EMS Unzip Downloader on Web-Est</a></li>
<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">Download and set up Rome Online Partner - <a href="https://help.imex.online/en/article/setting-up-the-rome-online-partner-1xsw8tb/" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">Setting up the Rome Online Partner</a></li>
</ul>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">Once you successfully set up the partner, now it's time to do some initial in-product items: Please note, <b>an estimate must be exported from the estimating platform to use tours.</b></p>
</td><tr>
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<ul style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 1%; padding-left: 30px;">
<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">Send estimate from Web-Est to RO Basic - <a href="https://help.imex.online/en/article/how-to-send-estimates-from-web-est-to-the-management-system-ox0h9a/" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">How to setup the EMS Unzip Downloader on Web-Est</a></li>
<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">Once completed, learn how to use RO Basic by accessing the tours at the bottom middle of the screen (labeled “Training Tours”). These walkthroughs will show you how to navigate from creating an RO to closing an RO - <a href="https://www.youtube.com/watch?v=gcbSe5med0I" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">ROME Collision Management Youtube Training Videos</a></li>
</ul>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">If you need any assistance with setting up the programs, or if you want a dedicated Q&A session with one of our customer success specialists, schedule by clicking this link - <a href="https://rometech.zohobookings.com/#/PSAT" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">Rome Basic Training Booking</a></p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">If you have additional questions or need any support, feel free to use the RO Basic support chat (blue chat box located in the bottom right corner) or give us a call at <a href="tel:14103576700" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">(410) 357-6700</a>. We are here to help make your experience seamless!</p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
`
})}
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">In addition to the training tour, you can also book a live one-on-one demo to see exactly how our system can help streamline the repair process at your shop, schedule by clicking this link - <a href="https://outlook.office.com/bookwithme/user/0aa3ae2c6d59497d9f93fb72479848dc@imexsystems.ca/meetingtype/Qy7CsXl5MkuUJ0NRD7B1AA2?anonymous&ep=mlink" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${InstanceManager({imex: "ImEX Lite", rome: "Rome Basic"})} Demo Booking</a></p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 8px; width: 734px; padding-left: 0px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">Thanks,</p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 8px; width: 734px; padding-left: 0px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">The ${InstanceManager({imex: "ImEX Online", rome: "Rome Online"})} Team</p>
`,
dateLine
})
});
} catch (error) {
logger.log("server-email-failure", "error", null, null, { error });
}
};
const sendTaskEmail = async ({ to, subject, type = "text", html, text, attachments }) => {
try {
mailer.sendMail(
@@ -93,6 +198,7 @@ const sendTaskEmail = async ({ to, subject, type = "text", html, text, attachmen
...(type === "text" ? { text } : { html }),
attachments: attachments || null
},
// eslint-disable-next-line no-unused-vars
(err, info) => {
// (message, type, user, record, meta
logger.log("server-email", err ? "error" : "debug", null, null, { message: err?.message, stack: err?.stack });
@@ -143,22 +249,20 @@ const sendEmail = async (req, res) => {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
attachments:
[
...((req.body.attachments &&
req.body.attachments.map((a) => {
return {
filename: a.filename,
path: a.path
};
})) ||
[]),
...downloadedMedia.map((a) => {
attachments: [
...(req.body.attachments &&
req.body.attachments.map((a) => {
return {
path: a
filename: a.filename,
path: a.path
};
})
] || null,
})),
...downloadedMedia.map((a) => {
return {
path: a
};
})
],
html: isObject(req.body?.templateStrings) ? generateEmailTemplate(req.body.templateStrings) : req.body.html,
ses: {
// optional extra arguments for SendRawEmail
@@ -273,6 +377,7 @@ ${body.bounce?.bouncedRecipients.map(
)}
`
},
// eslint-disable-next-line no-unused-vars
(err, info) => {
logger.log("sns-error", err ? "error" : "debug", "api", null, {
errorMessage: err?.message,
@@ -294,5 +399,6 @@ module.exports = {
sendEmail,
sendServerEmail,
sendTaskEmail,
emailBounce
emailBounce,
sendWelcomeEmail
};

View File

@@ -17,11 +17,13 @@ const { formatTaskPriority } = require("../notifications/stringHelpers");
const tasksEmailQueue = taskEmailQueue();
// Cleanup function for the Tasks Email Queue
// eslint-disable-next-line no-unused-vars
const tasksEmailQueueCleanup = async () => {
try {
// Example async operation
// console.log("Performing Tasks Email Reminder process cleanup...");
await new Promise((resolve) => tasksEmailQueue.destroy(() => resolve()));
// eslint-disable-next-line no-unused-vars
} catch (err) {
// console.error("Tasks Email Reminder process cleanup failed:", err);
}
@@ -254,10 +256,15 @@ const tasksRemindEmail = async (req, res) => {
header: `${allTasks.length} Tasks require your attention`,
subHeader: `Please click on the Tasks below to view the Task.`,
dateLine,
body: `<ul>
body: `
<ul style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; margin: 1%; padding-left: 30px;">
${allTasks
.map((task) =>
`<li><a href="${InstanceEndpoints()}/manage/tasks/alltasks?taskid=${task.id}">${task.title} - Priority: ${formatTaskPriority(task.priority)} ${task.due_date ? `${formatDate(task.due_date)}` : ""} | Bodyshop: ${task.bodyshop.shopname}</a></li>`.trim()
`
<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">
<a href="${InstanceEndpoints()}/manage/tasks/alltasks?taskid=${task.id}" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${task.title} - Priority: ${formatTaskPriority(task.priority)} ${task.due_date ? `${formatDate(task.due_date)}` : ""} | Bodyshop: ${task.bodyshop.shopname}</a>
</li>
`.trim()
)
.join("")}
</ul>`

View File

@@ -1,14 +1,10 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const admin = require("firebase-admin");
const logger = require("../utils/logger");
//const { sendProManagerWelcomeEmail } = require("../email/sendemail");
const client = require("../graphql-client/graphql-client").client;
const serviceAccount = require(process.env.FIREBASE_ADMINSDK_JSON);
//const generateEmailTemplate = require("../email/generateTemplate");
const admin = require("firebase-admin");
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const client = require("../graphql-client/graphql-client").client;
const { sendWelcomeEmail } = require("../email/sendemail");
const { GET_USER_BY_EMAIL } = require("../graphql-client/queries");
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
@@ -201,6 +197,94 @@ const unsubscribe = async (req, res) => {
}
};
const getWelcomeEmail = async (req, res) => {
const { authid, email, bcc } = req.body;
try {
// Fetch user from Firebase
const userRecord = await admin.auth().getUser(authid);
if (!userRecord) {
throw { status: 404, message: "User not found in Firebase." };
}
// Fetch user data from the database using GraphQL
const dbUserResult = await client.request(GET_USER_BY_EMAIL, { email: email.toLowerCase() });
const dbUser = dbUserResult?.users?.[0];
if (!dbUser) {
throw { status: 404, message: "User not found in database." };
}
// Validate email before proceeding
if (!dbUser.validemail) {
logger.log("admin-send-welcome-email-skip", "debug", req.user.email, null, {
message: "User email is not valid, skipping email.",
email
});
return res.status(200).json({ message: "User email is not valid, email not sent." });
}
// Generate password reset link
const resetLink = await admin.auth().generatePasswordResetLink(dbUser.email);
// Send welcome email
await sendWelcomeEmail({
to: dbUser.email,
resetLink,
dateLine: moment().tz(dbUser.associations?.[0]?.bodyshop?.timezone).format("MM/DD/YYYY @ hh:mm a"),
features: dbUser.associations?.[0]?.bodyshop?.features,
bcc
});
// Log success and return response
logger.log("admin-send-welcome-email", "debug", req.user.email, null, {
request: req.body,
ioadmin: true,
emailSentTo: email
});
return res.status(200).json({ message: "Welcome email sent successfully." });
} catch (error) {
logger.log("admin-send-welcome-email-error", "ERROR", req.user.email, null, { error });
if (!res.headersSent) {
return res.status(error.status || 500).json({
message: error.message || "Error sending welcome email.",
error
});
}
}
};
const getResetLink = async (req, res) => {
const { authid, email } = req.body;
logger.log("admin-reset-link", "debug", req.user.email, null, { authid, email });
try {
// Fetch user from Firebase
const userRecord = await admin.auth().getUser(authid);
if (!userRecord) {
throw { status: 404, message: "User not found in Firebase." };
}
// Generate password reset link
const resetLink = await admin.auth().generatePasswordResetLink(email);
// Log success and return response
logger.log("admin-reset-link-success", "debug", req.user.email, null, {
request: req.body,
ioadmin: true
});
return res.status(200).json({ message: "Reset link generated successfully.", resetLink });
} catch (error) {
return res.status(error.status || 500).json({
message: error.message || "Error generating reset link.",
error
});
}
};
module.exports = {
admin,
createUser,
@@ -208,23 +292,7 @@ module.exports = {
getUser,
sendNotification,
subscribe,
unsubscribe
unsubscribe,
getWelcomeEmail,
getResetLink
};
//Admin claims code.
// const uid = "JEqqYlsadwPEXIiyRBR55fflfko1";
// admin
// .auth()
// .getUser(uid)
// .then((user) => {
// console.log(user);
// admin.auth().setCustomUserClaims(uid, {
// ioadmin: true,
// "https://hasura.io/jwt/claims": {
// "x-hasura-default-role": "debug",
// "x-hasura-allowed-roles": ["admin"],
// "x-hasura-user-id": uid,
// },
// });
// });

View File

@@ -1,17 +1,19 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
//New bug introduced with Graphql Request.
// https://github.com/prisma-labs/graphql-request/issues/206
// const { Headers } = require("cross-fetch");
// global.Headers = global.Headers || Headers;
exports.client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
"x-hasura-admin-secret": process.env.HASURA_ADMIN_SECRET
}
});
exports.unauthclient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT);
const unauthorizedClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT);
module.exports = {
client,
unauthorizedClient
};

View File

@@ -9,6 +9,7 @@ query FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID($mssid: String!, $phone: String!) {
}
}`;
// Unused
exports.GET_JOB_BY_RO_NUMBER = `
query GET_JOB_BY_RO_NUMBER($ro_number: String!) {
jobs(where:{ro_number:{_eq:$ro_number}}) {
@@ -221,6 +222,7 @@ query QUERY_JOBS_FOR_RECEIVABLES_EXPORT($ids: [uuid!]!) {
rate_mash
rate_matd
class
shopid
ca_bc_pvrt
ca_customer_gst
towing_payable
@@ -479,6 +481,7 @@ query QUERY_BILLS_FOR_PAYABLES_EXPORT($bills: [uuid!]!) {
ownr_ln
ownr_co_nm
class
shopid
}
billlines{
id
@@ -529,6 +532,7 @@ exports.QUERY_PAYMENTS_FOR_EXPORT = `
ownr_fn
ownr_ln
ownr_co_nm
shopid
bodyshop {
accountingconfig
md_responsibility_centers
@@ -874,6 +878,43 @@ exports.CHATTER_QUERY = `query CHATTER_EXPORT($start: timestamptz, $bodyshopid:
}
}`;
exports.CARFAX_QUERY = `query CARFAX_EXPORT($start: timestamptz, $bodyshopid: uuid!, $end: timestamptz) {
bodyshops_by_pk(id: $bodyshopid){
id
shopname
imexshopid
timezone
}
jobs(where: {_and: [{converted: {_eq: true}}, {v_vin: {_is_null: false}}, {date_invoiced: {_gt: $start}}, {date_invoiced: {_lte: $end}}, {shopid: {_eq: $bodyshopid}}]}) {
id
created_at
ro_number
v_model_yr
v_model_desc
v_make_desc
v_vin
date_estimated
date_open
date_invoiced
loss_date
ins_co_nm
loss_desc
theft_ind
tlos_ind
job_totals
area_of_damage
joblines(where: {removed: {_eq: false}}) {
line_desc
oem_partno
alt_partno
mod_lbr_ty
part_qty
part_type
act_price
}
}
}`;
exports.CLAIMSCORP_QUERY = `query CLAIMSCORP_EXPORT($start: timestamptz, $bodyshopid: uuid!, $end: timestamptz) {
bodyshops_by_pk(id: $bodyshopid){
id
@@ -1323,6 +1364,27 @@ exports.KAIZEN_QUERY = `query KAIZEN_EXPORT($start: timestamptz, $bodyshopid: uu
}
}`;
exports.PODIUM_QUERY = `query PODIUM_EXPORT($start: timestamptz, $bodyshopid: uuid!, $end: timestamptz) {
bodyshops_by_pk(id: $bodyshopid){
id
shopname
podiumid
timezone
}
jobs(where: {_and: [{converted: {_eq: true}}, {actual_delivery: {_gt: $start}}, {actual_delivery: {_lte: $end}}, {shopid: {_eq: $bodyshopid}}, {_or: [{ownr_ph1: {_is_null: false}}, {ownr_ea: {_is_null: false}}]}]}) {
actual_delivery
id
created_at
ro_number
ownr_fn
ownr_ln
ownr_co_nm
ownr_ph1
ownr_ph2
ownr_ea
}
}`;
exports.UPDATE_JOB = `
mutation UPDATE_JOB($jobId: uuid!, $job: jobs_set_input!) {
update_jobs(where: { id: { _eq: $jobId } }, _set: $job) {
@@ -1574,6 +1636,7 @@ query QUERY_JOB_COSTING_DETAILS($id: uuid!) {
ca_customer_gst
dms_allocation
cieca_pfl
cieca_stl
materials
joblines(where: { removed: { _eq: false } }) {
id
@@ -1690,6 +1753,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
ca_customer_gst
dms_allocation
cieca_pfl
cieca_stl
materials
joblines(where: {removed: {_eq: false}}) {
id
@@ -1752,6 +1816,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
}
}`;
// Exists in Commented out Query
exports.INSERT_IOEVENT = ` mutation INSERT_IOEVENT($event: ioevents_insert_input!) {
insert_ioevents_one(object: $event) {
id
@@ -1788,6 +1853,14 @@ exports.GET_CHATTER_SHOPS = `query GET_CHATTER_SHOPS {
}
}`;
exports.GET_CARFAX_SHOPS = `query GET_CARFAX_SHOPS {
bodyshops{
id
shopname
imexshopid
}
}`;
exports.GET_CLAIMSCORP_SHOPS = `query GET_CLAIMSCORP_SHOPS {
bodyshops(where: {claimscorpid: {_is_null: false}, _or: {claimscorpid: {_neq: ""}}}){
id
@@ -1848,6 +1921,16 @@ exports.GET_KAIZEN_SHOPS = `query GET_KAIZEN_SHOPS($imexshopid: [String]) {
}
}`;
exports.GET_PODIUM_SHOPS = `query GET_PODIUM_SHOPS {
bodyshops(where: {podiumid: {_is_null: false}, _or: {podiumid: {_neq: ""}}}){
id
shopname
podiumid
imexshopid
timezone
}
}`;
exports.DELETE_ALL_DMS_VEHICLES = `mutation DELETE_ALL_DMS_VEHICLES{
delete_dms_vehicles(where: {}) {
affected_rows
@@ -2771,6 +2854,7 @@ exports.GET_BODYSHOP_BY_ID = `
imexshopid
intellipay_config
state
notification_followers
}
}
`;
@@ -2807,6 +2891,26 @@ exports.GET_DOCUMENTS_BY_JOB = `
}
}
}`;
exports.GET_DOCUMENTS_BY_BILL = `
query GET_DOCUMENTS_BY_BILL($billId: uuid!) {
documents_aggregate(where: {billid: {_eq: $billId}}) {
aggregate {
sum {
size
}
}
}
documents(order_by: {takenat: desc}, where: {billid: {_eq: $billId}}) {
id
name
key
type
size
takenat
extension
}
}
`;
exports.QUERY_TEMPORARY_DOCS = ` query QUERY_TEMPORARY_DOCS {
documents(where: { jobid: { _is_null: true } }, order_by: { takenat: desc }) {
@@ -2853,3 +2957,147 @@ query GET_BODYSHOP_BY_MERCHANTID($merchantID: String!) {
email
}
}`;
exports.GET_USER_BY_EMAIL = `
query GET_USER_BY_EMAIL($email: String!) {
users(where: {email: {_eq: $email}}) {
email
validemail
associations {
id
shopid
bodyshop {
id
convenient_company
features
timezone
}
}
}
}`;
// Define the GraphQL query to get a job by RO number and shop ID
exports.GET_JOB_BY_RO_NUMBER_AND_SHOP_ID = `
query GET_JOB_BY_RO_NUMBER_AND_SHOP_ID($roNumber: String!, $shopId: uuid!) {
jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}, limit: 1) {
id
shopid
bodyshop {
timezone
}
}
}
`;
// Define the mutation to insert a new document
exports.INSERT_NEW_DOCUMENT = `
mutation INSERT_NEW_DOCUMENT($docInput: [documents_insert_input!]!) {
insert_documents(objects: $docInput) {
returning {
id
name
key
}
}
}
`;
exports.INSERT_JOB_WATCHERS = `
mutation INSERT_JOB_WATCHERS($watchers: [job_watchers_insert_input!]!) {
insert_job_watchers(objects: $watchers, on_conflict: { constraint: job_watchers_pkey, update_columns: [] }) {
affected_rows
}
}
`;
exports.GET_NOTIFICATION_WATCHERS = `
query GET_NOTIFICATION_WATCHERS($shopId: uuid!, $employeeIds: [uuid!]!) {
associations(where: {
_and: [
{ shopid: { _eq: $shopId } },
{ active: { _eq: true } },
{ notifications_autoadd: { _eq: true } }
]
}) {
id
useremail
}
employees(where: { id: { _in: $employeeIds }, shopid: { _eq: $shopId }, active: { _eq: true } }) {
user_email
}
}
`;
exports.GET_JOB_WATCHERS_MINIMAL = `
query GET_JOB_WATCHERS_MINIMAL($jobid: uuid!) {
job_watchers(where: { jobid: { _eq: $jobid } }) {
user_email
user {
authid
}
}
}
`;
exports.INSERT_INTEGRATION_LOG = `
mutation INSERT_INTEGRATION_LOG($log: integration_log_insert_input!) {
insert_integration_log_one(object: $log) {
id
}
}
`;
exports.INSERT_PHONE_NUMBER_OPT_OUT = `
mutation INSERT_PHONE_NUMBER_OPT_OUT($optOutInput: [phone_number_opt_out_insert_input!]!) {
insert_phone_number_opt_out(objects: $optOutInput, on_conflict: { constraint: phone_number_consent_bodyshopid_phone_number_key, update_columns: [updated_at] }) {
affected_rows
returning {
id
bodyshopid
phone_number
created_at
updated_at
}
}
}
`;
// Query to check if a phone number is opted out
exports.CHECK_PHONE_NUMBER_OPT_OUT = `
query CHECK_PHONE_NUMBER_OPT_OUT($bodyshopid: uuid!, $phone_number: String!) {
phone_number_opt_out(where: { bodyshopid: { _eq: $bodyshopid }, phone_number: { _eq: $phone_number } }) {
id
bodyshopid
phone_number
created_at
updated_at
}
}
`;
// Query to check if a phone number is opted out
exports.CHECK_PHONE_NUMBER_OPT_OUT = `
query CHECK_PHONE_NUMBER_OPT_OUT($bodyshopid: uuid!, $phone_number: String!) {
phone_number_opt_out(where: { bodyshopid: { _eq: $bodyshopid }, phone_number: { _eq: $phone_number } }) {
id
bodyshopid
phone_number
created_at
updated_at
}
}
`;
// Mutation to delete a phone number opt-out record
exports.DELETE_PHONE_NUMBER_OPT_OUT = `
mutation DELETE_PHONE_NUMBER_OPT_OUT($bodyshopid: uuid!, $phone_number: String!) {
delete_phone_number_opt_out(where: { bodyshopid: { _eq: $bodyshopid }, phone_number: { _eq: $phone_number } }) {
affected_rows
returning {
id
bodyshopid
phone_number
}
}
}
`;

View File

@@ -0,0 +1,143 @@
// Notes: At the moment we take in RO Number, and ShopID. This is not very good considering the RO number can often be null, need
// to ask if it is possible that we just send the Job ID itself, this way we don't need to really care about the bodyshop, and we
// don't risk getting a null
const axios = require("axios");
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../../graphql-client/queries");
const { InstanceRegion } = require("../../utils/instanceMgr");
const moment = require("moment/moment");
const client = require("../../graphql-client/graphql-client").client;
const S3_BUCKET = process.env?.IMGPROXY_DESTINATION_BUCKET;
/**
* @description VSSTA integration route
* @type {string[]}
*/
const requiredParams = [
"shop_id",
"ro_nbr",
"pdf_download_link",
"company_api_key",
"scan_type",
"scan_time",
"technician",
"year",
"make",
"model"
];
const vsstaIntegrationRoute = async (req, res) => {
const { logger } = req;
if (!S3_BUCKET) {
logger.log("vssta-integration-missing-bucket", "error", "api", "vssta");
return res.status(500).json({ error: "Improper configuration" });
}
try {
const missingParams = requiredParams.filter((param) => !req.body[param]);
if (missingParams.length > 0) {
logger.log(`vssta-integration-missing-param`, "error", "api", "vssta", {
params: missingParams
});
return res.status(400).json({
error: "Missing required parameters",
missingParams
});
}
// technician, year, make, model, is also available.
const { shop_id, ro_nbr, pdf_download_link, scan_type, scan_time, company_api_key } = req.body;
// 1. Get the job record by ro_number and shop_id
const jobResult = await client.request(GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, {
roNumber: ro_nbr,
shopId: shop_id
});
if (!jobResult.jobs || jobResult.jobs.length === 0) {
logger.log(`vssta-integration-missing-ro`, "error", "api", "vssta");
return res.status(404).json({ error: "Job not found" });
}
const job = jobResult.jobs[0];
// 2. Download the base64-encoded PDF string from the provided link
const pdfResponse = await axios.get(pdf_download_link, {
responseType: "text", // Expect base64 string
headers: {
"auth-token": company_api_key
}
});
// 3. Decode the base64 string to a PDF buffer
const base64String = pdfResponse.data.replace(/^data:application\/pdf;base64,/, "");
const pdfBuffer = Buffer.from(base64String, "base64");
// 4. Generate key for S3
const timestamp = moment(scan_time).tz(job.bodyshop.timezone).format("YYYYMMDD-HHmmss");
const fileName = `${timestamp}_VSSTA_${scan_type}`;
const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}.pdf`;
// 5. Generate presigned URL for S3 upload
const s3Client = new S3Client({ region: InstanceRegion() });
const putCommand = new PutObjectCommand({
Bucket: S3_BUCKET,
Key: s3Key,
ContentType: "application/pdf",
StorageClass: "INTELLIGENT_TIERING"
});
const presignedUrl = await getSignedUrl(s3Client, putCommand, { expiresIn: 360 });
// 6. Upload the decoded PDF to S3
await axios.put(presignedUrl, pdfBuffer, {
headers: { "Content-Type": "application/pdf" }
});
// 7. Create document record in database
const documentMeta = {
jobid: job.id,
uploaded_by: "VSSTA Integration",
name: fileName,
key: s3Key,
type: "application/pdf",
extension: "pdf",
bodyshopid: job.shopid,
size: pdfBuffer.length,
takenat: scan_time
};
const documentInsert = await client.request(INSERT_NEW_DOCUMENT, {
docInput: [documentMeta]
});
if (!documentInsert.insert_documents?.returning?.length) {
logger.log(`vssta-integration-failed-to-create-document-record`, "error", "api", "vssta", {
params: missingParams
});
return res.status(500).json({ error: "Failed to create document record" });
}
return res.status(200).json({
message: "VSSTA integration successful",
documentId: documentInsert.insert_documents.returning[0].id
});
} catch (error) {
logger.log(`vssta-integration-general`, "error", "api", "vssta", {
error: error?.message,
stack: error?.stack
});
return res.status(500).json({ error: error.message });
}
};
module.exports = vsstaIntegrationRoute;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,257 @@
const crypto = require("crypto");
const admin = require("firebase-admin");
const client = require("../../graphql-client/graphql-client").client;
const DefaultNewShop = require("./defaultNewShop.json");
/**
* Ensures that the required fields are present in the payload.
* @param payload
* @param fields
*/
const requireFields = (payload, fields) => {
for (const field of fields) {
if (!payload[field]) {
throw { status: 400, message: `${field} is required.` };
}
}
};
/**
* Ensures that the email is not already registered in Firebase.
* @param email
* @returns {Promise<void>}
*/
const ensureEmailNotRegistered = async (email) => {
try {
await admin.auth().getUserByEmail(email);
throw { status: 400, message: "userEmail is already registered in Firebase." };
} catch (err) {
if (err.code !== "auth/user-not-found") {
throw { status: 500, message: "Error validating userEmail uniqueness", detail: err };
}
}
};
/**
* Creates a new Firebase user with the provided email.
* @param email
* @returns {Promise<UserRecord>}
*/
const createFirebaseUser = async (email) => {
return admin.auth().createUser({ email });
};
/**
* Deletes a Firebase user by their UID.
* @param uid
* @returns {Promise<void>}
*/
const deleteFirebaseUser = async (uid) => {
return admin.auth().deleteUser(uid);
};
/**
* Generates a password reset link for the given email.
* @param email
* @returns {Promise<string>}
*/
const generateResetLink = async (email) => {
return admin.auth().generatePasswordResetLink(email);
};
/**
* Ensures that the external shop ID is unique in the database.
* @param externalId
* @returns {Promise<void>}
*/
const ensureExternalIdUnique = async (externalId) => {
const query = `
query CHECK_KEY($key: String!) {
bodyshops(where: { external_shop_id: { _eq: $key } }) {
external_shop_id
}
}`;
const resp = await client.request(query, { key: externalId });
if (resp.bodyshops.length) {
throw { status: 400, message: `external_shop_id '${externalId}' is already in use.` };
}
};
/**
* Inserts a new bodyshop into the database.
* @param input
* @returns {Promise<*>}
*/
const insertBodyshop = async (input) => {
const mutation = `
mutation CREATE_SHOP($bs: bodyshops_insert_input!) {
insert_bodyshops_one(object: $bs) { id }
}`;
const resp = await client.request(mutation, { bs: input });
return resp.insert_bodyshops_one.id;
};
/**
* Deletes all vendors associated with a specific shop ID.
* @param shopId
* @returns {Promise<void>}
*/
const deleteVendorsByShop = async (shopId) => {
const mutation = `
mutation DELETE_VENDORS($shopId: uuid!) {
delete_vendors(where: { shopid: { _eq: $shopId } }) {
affected_rows
}
}`;
await client.request(mutation, { shopId });
};
/**
* Deletes a bodyshop by its ID.
* @param shopId
* @returns {Promise<void>}
*/
const deleteBodyshop = async (shopId) => {
const mutation = `
mutation DELETE_SHOP($id: uuid!) {
delete_bodyshops_by_pk(id: $id) { id }
}`;
await client.request(mutation, { id: shopId });
};
/**
* Inserts a new user association into the database.
* @param uid
* @param email
* @param shopId
* @returns {Promise<*>}
*/
const insertUserAssociation = async (uid, email, shopId) => {
const mutation = `
mutation CREATE_USER($u: users_insert_input!) {
insert_users_one(object: $u) {
id: authid
email
}
}`;
const vars = {
u: {
email,
authid: uid,
validemail: true,
associations: {
data: [{ shopid: shopId, authlevel: 80, active: true }]
}
}
};
const resp = await client.request(mutation, vars);
return resp.insert_users_one;
};
/**
* Handles the provisioning of a new parts management shop and user.
* @param req
* @param res
* @returns {Promise<*>}
*/
const partsManagementProvisioning = async (req, res) => {
const { logger } = req;
const p = { ...req.body, userEmail: req.body.userEmail?.toLowerCase() };
try {
// Validate inputs
await ensureEmailNotRegistered(p.userEmail);
requireFields(p, [
"external_shop_id",
"shopname",
"address1",
"city",
"state",
"zip_post",
"country",
"email",
"phone",
"userEmail"
]);
await ensureExternalIdUnique(p.external_shop_id);
logger.log("admin-create-shop-user", "debug", p.userEmail, null, {
request: req.body,
ioadmin: true
});
// Create shop
const shopInput = {
shopname: p.shopname,
address1: p.address1,
address2: p.address2 || null,
city: p.city,
state: p.state,
zip_post: p.zip_post,
country: p.country,
email: p.email,
external_shop_id: p.external_shop_id,
timezone: p.timezone,
phone: p.phone,
logo_img_path: {
src: p.logoUrl,
width: "",
height: "",
headerMargin: DefaultNewShop.logo_img_path.headerMargin
},
md_ro_statuses: DefaultNewShop.md_ro_statuses,
vendors: {
data: p.vendors.map((v) => ({
name: v.name,
street1: v.street1 || null,
street2: v.street2 || null,
city: v.city || null,
state: v.state || null,
zip: v.zip || null,
country: v.country || null,
email: v.email || null,
discount: v.discount ?? 0,
due_date: v.due_date ?? null,
cost_center: v.cost_center || null,
favorite: v.favorite ?? [],
phone: v.phone || null,
active: v.active ?? true,
dmsid: v.dmsid || null
}))
}
};
const newShopId = await insertBodyshop(shopInput);
// Create user + association
const userRecord = await createFirebaseUser(p.userEmail);
const resetLink = await generateResetLink(p.userEmail);
const createdUser = await insertUserAssociation(userRecord.uid, p.userEmail, newShopId);
return res.status(200).json({
shop: { id: newShopId, shopname: p.shopname },
user: {
id: createdUser.id,
email: createdUser.email,
resetLink
}
});
} catch (err) {
logger.log("admin-create-shop-user-error", "error", p.userEmail, null, {
message: err.message,
detail: err.detail || err
});
// Cleanup on failure
if (err.userRecord) {
await deleteFirebaseUser(err.userRecord.uid).catch(() => {});
}
if (err.newShopId) {
await deleteVendorsByShop(err.newShopId).catch(() => {});
await deleteBodyshop(err.newShopId).catch(() => {});
}
return res.status(err.status || 500).json({ error: err.message || "Internal server error" });
}
};
module.exports = partsManagementProvisioning;

View File

@@ -0,0 +1,160 @@
openapi: 3.0.3
info:
title: Parts Management Provisioning API
description: API endpoint to provision a new shop and user in the Parts Management system.
version: 1.0.0
paths:
/parts-management/provision:
post:
summary: Provision a new parts management shop and user
operationId: partsManagementProvisioning
tags:
- Parts Management
requestBody:
required: true
content:
application/json:
schema:
type: object
required:
- external_shop_id
- shopname
- address1
- city
- state
- zip_post
- country
- email
- phone
- userEmail
properties:
external_shop_id:
type: string
description: External shop ID (must be unique)
shopname:
type: string
address1:
type: string
address2:
type: string
nullable: true
city:
type: string
state:
type: string
zip_post:
type: string
country:
type: string
email:
type: string
phone:
type: string
userEmail:
type: string
format: email
logoUrl:
type: string
format: uri
nullable: true
timezone:
type: string
nullable: true
vendors:
type: array
items:
type: object
properties:
name:
type: string
street1:
type: string
nullable: true
street2:
type: string
nullable: true
city:
type: string
nullable: true
state:
type: string
nullable: true
zip:
type: string
nullable: true
country:
type: string
nullable: true
email:
type: string
format: email
nullable: true
discount:
type: number
nullable: true
due_date:
type: string
format: date
nullable: true
cost_center:
type: string
nullable: true
favorite:
type: array
items:
type: string
nullable: true
phone:
type: string
nullable: true
active:
type: boolean
nullable: true
dmsid:
type: string
nullable: true
responses:
'200':
description: Shop and user successfully created
content:
application/json:
schema:
type: object
properties:
shop:
type: object
properties:
id:
type: string
format: uuid
shopname:
type: string
user:
type: object
properties:
id:
type: string
email:
type: string
resetLink:
type: string
format: uri
'400':
description: Bad request (missing or invalid fields)
content:
application/json:
schema:
type: object
properties:
error:
type: string
'500':
description: Internal server error
content:
application/json:
schema:
type: object
properties:
error:
type: string

View File

@@ -144,6 +144,7 @@ const paymentRefund = async (req, res) => {
logger.log("intellipay-refund-success", "DEBUG", req.user?.email, null, {
requestOptions: options,
response: response?.data,
...logResponseMeta
});

View File

@@ -107,18 +107,25 @@ const handleInvoiceBasedPayment = async (values, logger, logMeta, res) => {
});
// Create payment response record
const responseResults = await gqlClient.request(INSERT_PAYMENT_RESPONSE, {
paymentResponse: {
amount: values.total,
bodyshopid: bodyshop.id,
paymentid: paymentResult.id,
jobid: job.id,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
});
const responseResults = await gqlClient
.request(INSERT_PAYMENT_RESPONSE, {
paymentResponse: {
amount: values.total,
bodyshopid: bodyshop.id,
paymentid: paymentResult.insert_payments.returning[0].id,
jobid: job.id,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
})
.catch((err) => {
logger.log("intellipay-postback-invoice-response-error", "ERROR", "api", null, {
err,
...logMeta
});
});
logger.log("intellipay-postback-invoice-response-success", "DEBUG", "api", null, {
responseResults,

View File

@@ -1,5 +1,6 @@
const { sendTaskEmail } = require("../../email/sendemail");
const generateEmailTemplate = require("../../email/generateTemplate");
const { InstanceEndpoints } = require("../../utils/instanceMgr");
/**
* @description Send notification email to the user
@@ -22,11 +23,9 @@ const sendPaymentNotificationEmail = async (userEmail, jobs, partialPayments, lo
body: jobs.jobs
.map(
(job) =>
`Reference: <a href="${InstanceEndpoints()}/manage/jobs/${job.id}">${job.ro_number || "N/A"}</a> | ${
job.ownr_co_nm ? job.ownr_co_nm : `${job.ownr_fn || ""} ${job.ownr_ln || ""}`.trim()
} | ${`${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""}`.trim()} | $${partialPayments.find((p) => p.jobid === job.id).amount}`
`<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">Reference: <a href="${InstanceEndpoints()}/manage/jobs/${job.id}" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${job.ro_number || "N/A"}</a> | ${job.ownr_co_nm ? job.ownr_co_nm : `${job.ownr_fn || ""} ${job.ownr_ln || ""}`.trim()} | ${`${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""}`.trim()} | $${partialPayments.find((p) => p.jobid === job.id).amount}</p>`
)
.join("<br/>")
.join("")
})
});
} catch (error) {

View File

@@ -37,7 +37,9 @@ beforeEach(() => {
]
})
.mockResolvedValueOnce({
id: "payment123"
insert_payments: {
returning: [{ id: "payment123" }]
}
})
.mockResolvedValueOnce({
insert_payment_response: {

View File

@@ -19,7 +19,7 @@ async function JobCosting(req, res) {
const client = req.userGraphQLClient;
//Uncomment for further testing
// logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null);
logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null);
try {
const resp = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_JOB_COSTING_DETAILS, {
@@ -47,9 +47,9 @@ async function JobCostingMulti(req, res) {
const client = req.userGraphQLClient;
//Uncomment for further testing
// logger.log("job-costing-multi-start", "DEBUG", req?.user?.email, null, {
// jobids
// });
logger.log("job-costing-multi-start", "DEBUG", req?.user?.email, null, {
jobids
});
try {
const resp = await client
@@ -567,6 +567,29 @@ function GenerateCostingData(job) {
);
}
if (InstanceManager({ imex: false, rome: true })) {
const stlTowing = job.cieca_stl?.data.find((c) => c.ttl_type === "OTTW");
const stlStorage = job.cieca_stl?.data.find((c) => c.ttl_type === "OTST");
if (!jobLineTotalsByProfitCenter.additional[defaultProfits["TOW"]])
jobLineTotalsByProfitCenter.additional[defaultProfits["TOW"]] = Dinero();
jobLineTotalsByProfitCenter.additional[defaultProfits["TOW"]] = stlTowing
? Dinero({ amount: Math.round(stlTowing.ttl_amt * 100) })
: Dinero({
amount: Math.round((job.towing_payable || 0) * 100)
});
if (!jobLineTotalsByProfitCenter.additional[defaultProfits["STO"]])
jobLineTotalsByProfitCenter.additional[defaultProfits["STO"]] = Dinero();
jobLineTotalsByProfitCenter.additional[defaultProfits["STO"]] = stlStorage
? Dinero({ amount: Math.round(stlStorage.ttl_amt * 100) })
: Dinero({
amount: Math.round((job.storage_payable || 0) * 100)
});
}
//Is it a DMS Setup?
const selectedDmsAllocationConfig =
(job.bodyshop.md_responsibility_centers.dms_defaults &&

View File

@@ -8,6 +8,7 @@ const getLifecycleStatusColor = require("../utils/getLifecycleStatusColor");
const jobLifecycle = async (req, res) => {
// Grab the jobids and statuses from the request body
const { jobids, statuses } = req.body;
const { logger } = req;
if (!jobids) {
return res.status(400).json({
@@ -16,102 +17,118 @@ const jobLifecycle = async (req, res) => {
}
const jobIDs = _.isArray(jobids) ? jobids : [jobids];
const client = req.userGraphQLClient;
const resp = await client.request(queries.QUERY_TRANSITIONS_BY_JOBID, { jobids: jobIDs });
const transitions = resp.transitions;
logger.log("job-lifecycle-start", "DEBUG", req?.user?.email, null, {
jobids: jobIDs
});
try {
const client = req.userGraphQLClient;
const resp = await client.request(queries.QUERY_TRANSITIONS_BY_JOBID, { jobids: jobIDs });
const transitions = resp.transitions;
if (!transitions) {
return res.status(200).json({
jobIDs,
transitions: []
});
}
const transitionsByJobId = _.groupBy(resp.transitions, "jobid");
const groupedTransitions = {};
const allDurations = [];
for (let jobId in transitionsByJobId) {
let lifecycle = transitionsByJobId[jobId].map((transition) => {
transition.start_readable = transition.start ? moment(transition.start).fromNow() : "N/A";
transition.end_readable = transition.end ? moment(transition.end).fromNow() : "N/A";
if (transition.duration) {
transition.duration_seconds = Math.round(transition.duration / 1000);
transition.duration_minutes = Math.round(transition.duration_seconds / 60);
let duration = moment.duration(transition.duration);
transition.duration_readable = durationToHumanReadable(duration);
} else {
transition.duration_seconds = 0;
transition.duration_minutes = 0;
transition.duration_readable = "N/A";
}
return transition;
});
const durations = calculateStatusDuration(lifecycle, statuses);
groupedTransitions[jobId] = {
lifecycle,
durations
};
if (durations?.summations) {
allDurations.push(durations.summations);
}
}
const finalSummations = [];
const flatGroupedAllDurations = _.groupBy(allDurations.flat(), "status");
const finalStatusCounts = Object.keys(flatGroupedAllDurations).reduce((acc, status) => {
acc[status] = flatGroupedAllDurations[status].length;
return acc;
}, {});
// Calculate total value of all statuses
const finalTotal = Object.values(flatGroupedAllDurations).reduce((total, statusArr) => {
return total + statusArr.reduce((acc, curr) => acc + curr.value, 0);
}, 0);
Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = finalTotal > 0 ? (value / finalTotal) * 100 : 0;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
const averageValue = _.size(jobIDs) > 0 ? value / jobIDs.length : 0;
const averageHumanReadable = durationToHumanReadable(moment.duration(averageValue));
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage,
averageValue,
averageHumanReadable
});
});
if (!transitions) {
return res.status(200).json({
jobIDs,
transitions: []
});
}
const transitionsByJobId = _.groupBy(resp.transitions, "jobid");
const groupedTransitions = {};
const allDurations = [];
for (let jobId in transitionsByJobId) {
let lifecycle = transitionsByJobId[jobId].map((transition) => {
transition.start_readable = transition.start ? moment(transition.start).fromNow() : "N/A";
transition.end_readable = transition.end ? moment(transition.end).fromNow() : "N/A";
if (transition.duration) {
transition.duration_seconds = Math.round(transition.duration / 1000);
transition.duration_minutes = Math.round(transition.duration_seconds / 60);
let duration = moment.duration(transition.duration);
transition.duration_readable = durationToHumanReadable(duration);
} else {
transition.duration_seconds = 0;
transition.duration_minutes = 0;
transition.duration_readable = "N/A";
transition: groupedTransitions,
durations: {
jobs: jobIDs.length,
summations: finalSummations,
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal)),
averageValue: _.size(jobIDs) > 0 ? finalTotal / jobIDs.length : 0,
averageHumanReadable:
_.size(jobIDs) > 0
? durationToHumanReadable(moment.duration(finalTotal / jobIDs.length))
: durationToHumanReadable(moment.duration(0))
}
return transition;
});
const durations = calculateStatusDuration(lifecycle, statuses);
groupedTransitions[jobId] = {
lifecycle,
durations
};
if (durations?.summations) {
allDurations.push(durations.summations);
}
} catch (error) {
logger.log("job-lifecycle-error", "ERROR", req?.user?.email, null, {
jobids: jobIDs,
statuses: statuses ? JSON.stringify(statuses) : "N/A",
error: error.message
});
return res.status(500).json({
error: "Internal server error"
});
}
const finalSummations = [];
const flatGroupedAllDurations = _.groupBy(allDurations.flat(), "status");
const finalStatusCounts = Object.keys(flatGroupedAllDurations).reduce((acc, status) => {
acc[status] = flatGroupedAllDurations[status].length;
return acc;
}, {});
// Calculate total value of all statuses
const finalTotal = Object.values(flatGroupedAllDurations).reduce((total, statusArr) => {
return total + statusArr.reduce((acc, curr) => acc + curr.value, 0);
}, 0);
Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = finalTotal > 0 ? (value / finalTotal) * 100 : 0;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
const averageValue = _.size(jobIDs) > 0 ? value / jobIDs.length : 0;
const averageHumanReadable = durationToHumanReadable(moment.duration(averageValue));
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage,
averageValue,
averageHumanReadable
});
});
return res.status(200).json({
jobIDs,
transition: groupedTransitions,
durations: {
jobs: jobIDs.length,
summations: finalSummations,
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal)),
averageValue: _.size(jobIDs) > 0 ? finalTotal / jobIDs.length : 0,
averageHumanReadable:
_.size(jobIDs) > 0
? durationToHumanReadable(moment.duration(finalTotal / jobIDs.length))
: durationToHumanReadable(moment.duration(0))
}
});
};
module.exports = jobLifecycle;

View File

@@ -381,7 +381,7 @@ async function CalculateRatesTotals({ job, client }) {
if (item.mod_lbr_ty) {
//Check to see if it has 0 hours and a price instead.
if (item.mod_lb_hrs === 0 && item.act_price > 0 && item.lbr_op === "OP14") {
if (item.lbr_op === "OP14" && item.act_price > 0 && (!item.part_type || item.mod_lb_hrs === 0)) {
//Scenario where SGI may pay out hours using a part price.
if (!ret[item.mod_lbr_ty.toLowerCase()].total) {
ret[item.mod_lbr_ty.toLowerCase()].total = Dinero();

View File

@@ -314,7 +314,8 @@ function CalculateRatesTotals(ratesList) {
if (item.mod_lbr_ty) {
//Check to see if it has 0 hours and a price instead.
if (item.mod_lb_hrs === 0 && item.act_price > 0 && item.lbr_op === "OP14") {
//Extend for when there are hours and a price.
if (item.lbr_op === "OP14" && item.act_price > 0 && (!item.part_type || item.mod_lb_hrs === 0)) {
//Scenario where SGI may pay out hours using a part price.
if (!ret[item.mod_lbr_ty.toLowerCase()].total) {
ret[item.mod_lbr_ty.toLowerCase()].total = Dinero();

View File

@@ -1,8 +1,12 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../utils/logger");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { InstanceRegion } = require("../utils/instanceMgr");
const archiver = require("archiver");
const stream = require("node:stream");
const base64UrlEncode = require("./util/base64UrlEncode");
const createHmacSha256 = require("./util/createHmacSha256");
const {
S3Client,
PutObjectCommand,
@@ -10,35 +14,38 @@ const {
CopyObjectCommand,
DeleteObjectCommand
} = require("@aws-sdk/client-s3");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const crypto = require("crypto");
const { InstanceRegion } = require("../utils/instanceMgr");
const {
GET_DOCUMENTS_BY_JOB,
QUERY_TEMPORARY_DOCS,
GET_DOCUMENTS_BY_IDS,
GET_DOCUMENTS_BY_BILL,
DELETE_MEDIA_DOCUMENTS
} = require("../graphql-client/queries");
const archiver = require("archiver");
const stream = require("node:stream");
const yazl = require("yazl");
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN.
const imgproxyKey = process.env.IMGPROXY_KEY;
const imgproxySalt = process.env.IMGPROXY_SALT;
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET;
//Generate a signed upload link for the S3 bucket.
//All uploads must be going to the same shop and jobid.
exports.generateSignedUploadUrls = async (req, res) => {
/**
* Generate a Signed URL Link for the s3 bucket.
* All Uploads must be going to the same Shop and JobId
* @param req
* @param res
* @returns {Promise<*>}
*/
const generateSignedUploadUrls = async (req, res) => {
const { filenames, bodyshopid, jobid } = req.body;
try {
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid });
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, {
filenames,
bodyshopid,
jobid
});
const signedUrls = [];
for (const filename of filenames) {
const key = filename;
const key = filename;
const client = new S3Client({ region: InstanceRegion() });
const command = new PutObjectCommand({
Bucket: imgproxyDestinationBucket,
@@ -50,24 +57,32 @@ exports.generateSignedUploadUrls = async (req, res) => {
}
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
res.json({
return res.json({
success: true,
signedUrls
});
} catch (error) {
res.status(400).json({
success: false,
logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack
});
logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
return res.status(400).json({
success: false,
message: error.message,
stack: error.stack
});
}
};
exports.getThumbnailUrls = async (req, res) => {
/**
* Get Thumbnail URLS
* @param req
* @param res
* @returns {Promise<*>}
*/
const getThumbnailUrls = async (req, res) => {
const { jobid, billid } = req.body;
try {
@@ -76,9 +91,11 @@ exports.getThumbnailUrls = async (req, res) => {
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//If there's no jobid and no billid, we're in temporary documents.
const data = await (jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const data = await (
billid ? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid }) :
jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const s3client = new S3Client({ region: InstanceRegion() });
@@ -86,24 +103,19 @@ exports.getThumbnailUrls = async (req, res) => {
for (const document of data.documents) {
//Format to follow:
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with unencoded/unhashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key;
if (/\.[^/.]+$/.test(document.key)) {
key = document.key;
} else {
key = `${document.key}.${document.extension.toLowerCase()}`;
}
let key = keyStandardize(document)
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
//Thumbnail Generation Block
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`);
//Full Size URL block
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
@@ -114,8 +126,8 @@ exports.getThumbnailUrls = async (req, res) => {
Bucket: imgproxyDestinationBucket,
Key: key
});
const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
s3Props.presignedGetUrl = presignedGetUrl;
s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
@@ -133,7 +145,7 @@ exports.getThumbnailUrls = async (req, res) => {
});
}
res.json(proxiedUrls);
return res.json(proxiedUrls);
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
@@ -142,78 +154,95 @@ exports.getThumbnailUrls = async (req, res) => {
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
return res.status(400).json({ message: error.message, stack: error.stack });
}
};
exports.getBillFiles = async (req, res) => {
//Givena bill ID, get the documents associated to it.
};
/**
* Download Files
* @param req
* @param res
* @returns {Promise<*>}
*/
const downloadFiles = async (req, res) => {
const { jobId, billid, documentids } = req.body;
exports.downloadFiles = async (req, res) => {
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
const { jobid, billid, documentids } = req.body;
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
const client = req.userGraphQLClient;
let data;
try {
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobid, { billid, jobid, documentids });
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//Query for the keys of the document IDs
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
//Using the Keys, get all of the S3 links, zip them, and send back to the client.
const s3client = new S3Client({ region: InstanceRegion() });
const archiveStream = archiver("zip");
archiveStream.on("error", (error) => {
console.error("Archival encountered an error:", error);
throw new Error(error);
});
const passthrough = new stream.PassThrough();
archiveStream.pipe(passthrough);
for (const key of data.documents.map((d) => d.key)) {
const response = await s3client.send(new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }));
// :: `response.Body` is a Buffer
console.log(path.basename(key));
archiveStream.append(response.Body, { name: path.basename(key) });
}
archiveStream.finalize();
const archiveKey = `archives/${jobid}/archive-${new Date().toISOString()}.zip`;
const parallelUploads3 = new Upload({
client: s3client,
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passthrough }
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
const uploadResult = await parallelUploads3.done();
//Generate the presigned URL to download it.
const presignedUrl = await getSignedUrl(
s3client,
new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: archiveKey }),
{ expiresIn: 360 }
);
res.json({ success: true, url: presignedUrl });
//Iterate over them, build the link based on the media type, and return the array.
data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
} catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
jobid,
logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
jobId,
billid,
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
return res.status(400).json({ message: error.message });
}
const s3client = new S3Client({ region: InstanceRegion() });
const zipfile = new yazl.ZipFile();
const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`;
res.setHeader("Content-Type", "application/zip");
res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
zipfile.outputStream.pipe(res);
try {
for (const doc of data.documents) {
let key = keyStandardize(doc)
let response;
try {
response = await s3client.send(
new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
}
zipfile.end();
} catch (error) {
logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
jobId,
billid,
message: error.message,
stack: error.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(error);
}
};
exports.deleteFiles = async (req, res) => {
/**
* Delete Files
* @param req
* @param res
* @returns {Promise<*>}
*/
const deleteFiles = async (req, res) => {
//Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future.
//Mark as deleted from the documents section of the database.
const { ids } = req.body;
@@ -232,7 +261,7 @@ exports.deleteFiles = async (req, res) => {
(async () => {
try {
// Delete the original object
const deleteResult = await s3client.send(
await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.key
@@ -250,23 +279,30 @@ exports.deleteFiles = async (req, res) => {
const result = await Promise.all(deleteTransactions);
const errors = result.filter((d) => d.error);
//Delete only the succesful deletes.
//Delete only the successful deletes.
const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, {
ids: result.filter((t) => !t.error).map((d) => d.id)
});
res.json({ errors, deleteMutationResult });
return res.json({ errors, deleteMutationResult });
} catch (error) {
logger.log("imgproxy-delete-files-error", "ERROR", req.user.email, null, {
ids,
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
return res.status(400).json({ message: error.message, stack: error.stack });
}
};
exports.moveFiles = async (req, res) => {
/**
* Move Files
* @param req
* @param res
* @returns {Promise<*>}
*/
const moveFiles = async (req, res) => {
const { documents, tojobid } = req.body;
try {
logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid });
@@ -278,7 +314,7 @@ exports.moveFiles = async (req, res) => {
(async () => {
try {
// Copy the object to the new key
const copyresult = await s3client.send(
await s3client.send(
new CopyObjectCommand({
Bucket: imgproxyDestinationBucket,
CopySource: `${imgproxyDestinationBucket}/${document.from}`,
@@ -288,7 +324,7 @@ exports.moveFiles = async (req, res) => {
);
// Delete the original object
const deleteResult = await s3client.send(
await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.from
@@ -297,7 +333,12 @@ exports.moveFiles = async (req, res) => {
return document;
} catch (error) {
return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket };
return {
id: document.id,
from: document.from,
error: error,
bucket: imgproxyDestinationBucket
};
}
})()
);
@@ -307,6 +348,7 @@ exports.moveFiles = async (req, res) => {
const errors = result.filter((d) => d.error);
let mutations = "";
result
.filter((d) => !d.error)
.forEach((d, idx) => {
@@ -321,14 +363,16 @@ exports.moveFiles = async (req, res) => {
});
const client = req.userGraphQLClient;
if (mutations !== "") {
const mutationResult = await client.request(`mutation {
${mutations}
}`);
res.json({ errors, mutationResult });
} else {
res.json({ errors: "No images were succesfully moved on remote server. " });
return res.json({ errors, mutationResult });
}
return res.json({ errors: "No images were successfully moved on remote server. " });
} catch (error) {
logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, {
documents,
@@ -336,13 +380,24 @@ exports.moveFiles = async (req, res) => {
message: error.message,
stack: error.stack
});
res.status(400).json({ message: error.message, stack: error.stack });
return res.status(400).json({ message: error.message, stack: error.stack });
}
};
function base64UrlEncode(str) {
return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
}
function createHmacSha256(data) {
return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
}
const keyStandardize = (doc) => {
if (/\.[^/.]+$/.test(doc.key)) {
return doc.key;
} else {
return `${doc.key}.${doc.extension.toLowerCase()}`;
}
};
module.exports = {
generateSignedUploadUrls,
getThumbnailUrls,
downloadFiles,
deleteFiles,
moveFiles
};

View File

@@ -1,42 +1,55 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const determineFileType = require("./util/determineFileType");
const { DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
var cloudinary = require("cloudinary").v2;
const cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL);
/**
* @description Creates a signed upload URL for Cloudinary.
* @param req
* @param res
*/
const createSignedUploadURL = (req, res) => {
logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET));
};
exports.createSignedUploadURL = createSignedUploadURL;
/**
* @description Downloads files from Cloudinary.
* @param req
* @param res
*/
const downloadFiles = (req, res) => {
const { ids } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const url = cloudinary.utils.download_zip_url({
public_ids: ids,
flatten_folders: true
});
res.send(url);
};
exports.downloadFiles = downloadFiles;
/**
* @description Deletes files from Cloudinary and Apollo.
* @param req
* @param res
* @returns {Promise<void>}
*/
const deleteFiles = async (req, res) => {
const { ids } = req.body;
const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
const types = _.groupBy(ids, (x) => determineFileType(x.type));
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
const returns = [];
if (types.image) {
//delete images
@@ -47,8 +60,8 @@ const deleteFiles = async (req, res) => {
)
);
}
if (types.video) {
//delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.video.map((x) => x.key),
@@ -56,8 +69,8 @@ const deleteFiles = async (req, res) => {
)
);
}
if (types.raw) {
//delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.raw.map((x) => `${x.key}.${x.extension}`),
@@ -68,6 +81,7 @@ const deleteFiles = async (req, res) => {
// Delete it on apollo.
const successfulDeletes = [];
returns.forEach((resType) => {
Object.keys(resType.deleted).forEach((key) => {
if (resType.deleted[key] === "deleted" || resType.deleted[key] === "not_found") {
@@ -77,7 +91,7 @@ const deleteFiles = async (req, res) => {
});
try {
const result = await client.request(queries.DELETE_MEDIA_DOCUMENTS, {
const result = await client.request(DELETE_MEDIA_DOCUMENTS, {
ids: ids.filter((i) => successfulDeletes.includes(i.key)).map((i) => i.id)
});
@@ -91,24 +105,29 @@ const deleteFiles = async (req, res) => {
}
};
exports.deleteFiles = deleteFiles;
/**
* @description Renames keys in Cloudinary and updates the database.
* @param req
* @param res
* @returns {Promise<void>}
*/
const renameKeys = async (req, res) => {
const { documents, tojobid } = req.body;
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
const proms = [];
documents.forEach((d) => {
proms.push(
(async () => {
try {
const res = {
return {
id: d.id,
...(await cloudinary.uploader.rename(d.from, d.to, {
resource_type: DetermineFileType(d.type)
resource_type: determineFileType(d.type)
}))
};
return res;
} catch (error) {
return { id: d.id, from: d.from, error: error };
}
@@ -148,18 +167,13 @@ const renameKeys = async (req, res) => {
}`);
res.json({ errors, mutationResult });
} else {
res.json({ errors: "No images were succesfully moved on remote server. " });
res.json({ errors: "No images were successfully moved on remote server. " });
}
};
exports.renameKeys = renameKeys;
//Also needs to be updated in upload utility and mobile app.
function DetermineFileType(filetype) {
if (!filetype) return "auto";
else if (filetype.startsWith("image")) return "image";
else if (filetype.startsWith("video")) return "video";
else if (filetype.startsWith("application/pdf")) return "image";
else if (filetype.startsWith("application")) return "raw";
return "auto";
}
module.exports = {
createSignedUploadURL,
downloadFiles,
deleteFiles,
renameKeys
};

View File

@@ -0,0 +1,98 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import determineFileType from "../util/determineFileType";
import base64UrlEncode from "../util/base64UrlEncode";
describe("Media Utils", () => {
describe("base64UrlEncode", () => {
it("should encode string to base64url format", () => {
expect(base64UrlEncode("hello world")).toBe("aGVsbG8gd29ybGQ");
});
it('should replace "+" with "-"', () => {
// '+' in base64 appears when encoding specific binary data
expect(base64UrlEncode("hello+world")).toBe("aGVsbG8rd29ybGQ");
});
it('should replace "/" with "_"', () => {
expect(base64UrlEncode("path/to/resource")).toBe("cGF0aC90by9yZXNvdXJjZQ");
});
it('should remove trailing "=" characters', () => {
// Using a string that will produce padding in base64
expect(base64UrlEncode("padding==")).toBe("cGFkZGluZz09");
});
});
describe("createHmacSha256", () => {
let createHmacSha256;
const originalEnv = process.env;
beforeEach(async () => {
vi.resetModules();
process.env = { ...originalEnv };
process.env.IMGPROXY_KEY = "test-key";
// Dynamically import the module after setting env var
const module = await import("../util/createHmacSha256");
createHmacSha256 = module.default;
});
afterEach(() => {
process.env = originalEnv;
});
it("should create a valid HMAC SHA-256 hash", () => {
const result = createHmacSha256("test-data");
expect(typeof result).toBe("string");
expect(result.length).toBeGreaterThan(0);
});
it("should produce consistent hashes for the same input", () => {
const hash1 = createHmacSha256("test-data");
const hash2 = createHmacSha256("test-data");
expect(hash1).toBe(hash2);
});
it("should produce different hashes for different inputs", () => {
const hash1 = createHmacSha256("test-data-1");
const hash2 = createHmacSha256("test-data-2");
expect(hash1).not.toBe(hash2);
});
});
describe("determineFileType", () => {
it('should return "auto" when no filetype is provided', () => {
expect(determineFileType()).toBe("auto");
expect(determineFileType(null)).toBe("auto");
expect(determineFileType(undefined)).toBe("auto");
});
it('should return "image" for image filetypes', () => {
expect(determineFileType("image/jpeg")).toBe("image");
expect(determineFileType("image/png")).toBe("image");
expect(determineFileType("image/gif")).toBe("image");
});
it('should return "video" for video filetypes', () => {
expect(determineFileType("video/mp4")).toBe("video");
expect(determineFileType("video/quicktime")).toBe("video");
expect(determineFileType("video/x-msvideo")).toBe("video");
});
it('should return "image" for PDF files', () => {
expect(determineFileType("application/pdf")).toBe("image");
});
it('should return "raw" for other application types', () => {
expect(determineFileType("application/zip")).toBe("raw");
expect(determineFileType("application/json")).toBe("raw");
expect(determineFileType("application/msword")).toBe("raw");
});
it('should return "auto" for unrecognized types', () => {
expect(determineFileType("audio/mpeg")).toBe("auto");
expect(determineFileType("text/html")).toBe("auto");
expect(determineFileType("unknown-type")).toBe("auto");
});
});
});

View File

@@ -0,0 +1,9 @@
/**
* @description Converts a string to a base64url encoded string.
* @param str
* @returns {string}
*/
const base64UrlEncode = (str) =>
Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
module.exports = base64UrlEncode;

View File

@@ -0,0 +1,12 @@
const crypto = require("crypto");
const imgproxyKey = process.env.IMGPROXY_KEY;
/**
* @description Creates a HMAC SHA-256 hash of the given data.
* @param data
* @returns {string}
*/
const createHmacSha256 = (data) => crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
module.exports = createHmacSha256;

View File

@@ -0,0 +1,17 @@
/**
* @description Determines the file type based on the filetype string.
* @note Also needs to be updated in the mobile app utility.
* @param filetype
* @returns {string}
*/
const determineFileType = (filetype) => {
if (!filetype) return "auto";
else if (filetype.startsWith("image")) return "image";
else if (filetype.startsWith("video")) return "video";
else if (filetype.startsWith("application/pdf")) return "image";
else if (filetype.startsWith("application")) return "raw";
return "auto";
};
module.exports = determineFileType;

View File

@@ -0,0 +1,23 @@
/**
* Middleware to check if the request is authorized for Parts Management Integration.
* @param req
* @param res
* @param next
* @returns {*}
*/
const partsManagementIntegrationMiddleware = (req, res, next) => {
const secret = process.env.PARTS_MANAGEMENT_INTEGRATION_SECRET;
if (typeof secret !== "string" || secret.length === 0) {
return res.status(500).send("Server misconfiguration");
}
const headerValue = req.headers["parts-management-integration-secret"];
if (typeof headerValue !== "string" || headerValue.trim() !== secret) {
return res.status(401).send("Unauthorized");
}
req.isPartsManagementIntegrationAuthorized = true;
next();
};
module.exports = partsManagementIntegrationMiddleware;

View File

@@ -0,0 +1,20 @@
/**
* VSSTA Integration Middleware
* Fails closed if the env var is missing or empty, and strictly compares header.
*/
const vsstaIntegrationMiddleware = (req, res, next) => {
const secret = process.env.VSSTA_INTEGRATION_SECRET;
if (typeof secret !== "string" || secret.length === 0) {
return res.status(500).send("Server misconfiguration");
}
const headerValue = req.headers["vssta-integration-secret"];
if (typeof headerValue !== "string" || headerValue.trim() !== secret) {
return res.status(401).send("Unauthorized");
}
req.isVsstaIntegrationAuthorized = true;
next();
};
module.exports = vsstaIntegrationMiddleware;

View File

@@ -0,0 +1,132 @@
/**
* @module autoAddWatchers
* @description
* This module handles automatically adding watchers to new jobs based on the notifications_autoadd
* boolean field in the associations table and the notification_followers JSON field in the bodyshops table.
* It ensures users are not added twice and logs the process.
*/
const { client: gqlClient } = require("../graphql-client/graphql-client");
const { isEmpty } = require("lodash");
const {
GET_JOB_WATCHERS_MINIMAL,
GET_NOTIFICATION_WATCHERS,
INSERT_JOB_WATCHERS
} = require("../graphql-client/queries");
// If true, the user who commits the action will NOT receive notifications; if false, they will.
const FILTER_SELF_FROM_WATCHERS = process.env?.FILTER_SELF_FROM_WATCHERS !== "false";
/**
* Adds watchers to a new job based on notifications_autoadd and notification_followers.
*
* @param {Object} req - The request object containing event data and logger.
* @returns {Promise<void>} Resolves when watchers are added or if no action is needed.
* @throws {Error} If critical data (e.g., jobId, shopId) is missing.
*/
const autoAddWatchers = async (req) => {
const { event, trigger } = req.body;
const {
logger,
sessionUtils: { getBodyshopFromRedis }
} = req;
// Validate that this is an INSERT event, bail
if (trigger?.name !== "notifications_jobs_autoadd" || event.op !== "INSERT" || event.data.old) {
return;
}
const jobId = event?.data?.new?.id;
const shopId = event?.data?.new?.shopid;
const roNumber = event?.data?.new?.ro_number || "unknown";
if (!jobId || !shopId) {
throw new Error(`Missing jobId (${jobId}) or shopId (${shopId}) for auto-add watchers`);
}
const hasuraUserRole = event?.session_variables?.["x-hasura-role"];
const hasuraUserId = event?.session_variables?.["x-hasura-user-id"];
try {
// Fetch bodyshop data from Redis
const bodyshopData = await getBodyshopFromRedis(shopId);
let notificationFollowers = bodyshopData?.notification_followers;
// Bail if notification_followers is missing or not an array
if (!notificationFollowers || !Array.isArray(notificationFollowers)) {
return;
}
// Execute queries in parallel
const [notificationData, existingWatchersData] = await Promise.all([
gqlClient.request(GET_NOTIFICATION_WATCHERS, {
shopId,
employeeIds: notificationFollowers.filter((id) => id)
}),
gqlClient.request(GET_JOB_WATCHERS_MINIMAL, { jobid: jobId })
]);
// Get users with notifications_autoadd: true
const autoAddUsers =
notificationData?.associations?.map((assoc) => ({
email: assoc.useremail,
associationId: assoc.id
})) || [];
// Get users from notification_followers
const followerEmails =
notificationData?.employees
?.filter((e) => e.user_email)
?.map((e) => ({
email: e.user_email,
associationId: null
})) || [];
// Combine and deduplicate emails (use email as the unique key)
const usersToAdd = [...autoAddUsers, ...followerEmails].reduce((acc, user) => {
if (!acc.some((u) => u.email === user.email)) {
acc.push(user);
}
return acc;
}, []);
if (isEmpty(usersToAdd)) {
return;
}
// Check existing watchers to avoid duplicates
const existingWatcherEmails = existingWatchersData?.job_watchers?.map((w) => w.user_email) || [];
// Filter out already existing watchers and optionally the user who created the job
const newWatchers = usersToAdd
.filter((user) => !existingWatcherEmails.includes(user.email))
.filter((user) => {
if (FILTER_SELF_FROM_WATCHERS && hasuraUserRole === "user") {
const userData = existingWatchersData?.job_watchers?.find((w) => w.user?.authid === hasuraUserId);
return userData ? user.email !== userData.user_email : true;
}
return true;
})
.map((user) => ({
jobid: jobId,
user_email: user.email
}));
if (isEmpty(newWatchers)) {
return;
}
// Insert new watchers
await gqlClient.request(INSERT_JOB_WATCHERS, { watchers: newWatchers });
} catch (error) {
logger.log("Error adding auto-add watchers", "error", "notifications", null, {
message: error?.message,
stack: error?.stack,
jobId,
roNumber
});
throw error; // Re-throw to ensure the error is logged in the handler
}
};
module.exports = { autoAddWatchers };

View File

@@ -6,6 +6,7 @@
*/
const scenarioParser = require("./scenarioParser");
const { autoAddWatchers } = require("./autoAddWatchers"); // New module
/**
* Processes a notification event by invoking the scenario parser.
@@ -144,15 +145,70 @@ const handleNotesChange = async (req, res) =>
const handlePaymentsChange = async (req, res) =>
processNotificationEvent(req, res, "req.body.event.new.jobid", "Payments Changed Notification Event Handled.");
/**
* Handle task socket emit.
* @param req
*/
const handleTaskSocketEmit = (req) => {
const {
logger,
ioRedis,
ioHelpers: { getBodyshopRoom }
} = req;
const event = req.body.event;
const op = event.op;
let taskData;
let type;
let bodyshopId;
if (op === "INSERT") {
taskData = event.data.new;
if (taskData.deleted) {
logger.log("tasks-event-insert-deleted", "warn", "notifications", null, { id: taskData.id });
} else {
type = "task-created";
bodyshopId = taskData.bodyshopid;
}
} else if (op === "UPDATE") {
const newData = event.data.new;
const oldData = event.data.old;
taskData = newData;
bodyshopId = newData.bodyshopid;
if (newData.deleted && !oldData.deleted) {
type = "task-deleted";
taskData = { id: newData.id, assigned_to: newData.assigned_to };
} else if (!newData.deleted && oldData.deleted) {
type = "task-created";
} else if (!newData.deleted) {
type = "task-updated";
}
} else {
logger.log("tasks-event-unknown-op", "warn", "notifications", null, { op });
}
if (bodyshopId && ioRedis && type) {
const room = getBodyshopRoom(bodyshopId);
ioRedis.to(room).emit("bodyshop-message", { type, payload: taskData });
logger.log("tasks-event-emitted", "info", "notifications", null, { type, bodyshopId });
} else if (type) {
logger.log("tasks-event-missing-data", "error", "notifications", null, { bodyshopId, hasIo: !!ioRedis, type });
}
};
/**
* Handle tasks change notifications.
* Note: this also handles task center notifications.
*
* @param {Object} req - Express request object.
* @param {Object} res - Express response object.
* @returns {Promise<Object>} JSON response with a success message.
*/
const handleTasksChange = async (req, res) =>
const handleTasksChange = async (req, res) => {
// Handle Notification Event
processNotificationEvent(req, res, "req.body.event.new.jobid", "Tasks Notifications Event Handled.");
handleTaskSocketEmit(req);
};
/**
* Handle time tickets change notifications.
@@ -185,6 +241,27 @@ const handlePartsDispatchChange = (req, res) => res.status(200).json({ message:
*/
const handlePartsOrderChange = (req, res) => res.status(200).json({ message: "Parts Order change handled." });
/**
* Handle auto-add watchers for new jobs.
*
* @param {Object} req - Express request object.
* @param {Object} res - Express response object.
* @returns {Promise<Object>} JSON response with a success message.
*/
const handleAutoAddWatchers = async (req, res) => {
const { logger } = req;
// Call autoAddWatchers but don't await it; log any error that occurs.
autoAddWatchers(req).catch((error) => {
logger.log("auto-add-watchers-error", "error", "notifications", null, {
message: error?.message,
stack: error?.stack
});
});
return res.status(200).json({ message: "Auto-Add Watchers Event Handled." });
};
module.exports = {
handleJobsChange,
handleBillsChange,
@@ -195,5 +272,6 @@ module.exports = {
handlePartsOrderChange,
handlePaymentsChange,
handleTasksChange,
handleTimeTicketsChange
handleTimeTicketsChange,
handleAutoAddWatchers
};

View File

@@ -133,11 +133,19 @@ const loadEmailQueue = async ({ pubClient, logger }) => {
subHeader: `Dear ${firstName},`,
dateLine: moment().tz(timezone).format("MM/DD/YYYY hh:mm a"),
body: `
<p>There have been updates to job ${jobRoNumber || "N/A"} at ${bodyShopName}:</p><br/>
<ul>
${messages.map((msg) => `<li>${msg}</li>`).join("")}
</ul><br/><br/>
<p><a href="${InstanceEndpoints()}/manage/jobs/${jobId}">Please check the job for more details.</a></p>
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 100%;">There have been updates to job ${jobRoNumber || "N/A"} at ${bodyShopName}:</p>
</td></tr></table></th>
</tr></tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<ul style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 1%; padding-left: 30px;">
${messages.map((msg) => `<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">${msg}</li>`).join("")}
</ul>
</td></tr></table></th>
</tr><tbody></table>
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;"><a href="${InstanceEndpoints()}/manage/jobs/${jobId}" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">Please check the job for more details.</a></p>
`
});
await sendTaskEmail({
@@ -226,6 +234,7 @@ const getQueue = () => {
* @param {Object} options.logger - Logger instance for logging dispatch events.
* @returns {Promise<void>} Resolves when all notifications are added to the queue.
*/
// eslint-disable-next-line no-unused-vars
const dispatchEmailsToQueue = async ({ emailsToDispatch, logger }) => {
const emailAddQueue = getQueue();

View File

@@ -182,7 +182,7 @@ const newMediaAddedReassignedBuilder = (data) => {
: data.changedFields?.jobid && data.changedFields.jobid.old !== data.changedFields.jobid.new
? "moved to this job"
: "updated";
const body = `An ${mediaType} has been ${action}.`;
const body = `A ${mediaType} has been ${action}.`;
return buildNotification(data, "notifications.job.newMediaAdded", body, {
mediaType,

View File

@@ -63,7 +63,9 @@ const scenarioParser = async (req, jobIdField) => {
}
if (!jobId) {
logger.log(`No jobId found using path "${jobIdField}", skipping notification parsing`, "info", "notifications");
if (process?.env?.NODE_ENV === "development") {
logger.log(`No jobId found using path "${jobIdField}", skipping notification parsing`, "info", "notifications");
}
return;
}
@@ -88,7 +90,9 @@ const scenarioParser = async (req, jobIdField) => {
// Exit early if no job watchers are found for this job
if (isEmpty(jobWatchers)) {
logger.log(`No watchers found for jobId "${jobId}", skipping notification parsing`, "info", "notifications");
if (process?.env?.NODE_ENV === "development") {
logger.log(`No watchers found for jobId "${jobId}", skipping notification parsing`, "info", "notifications");
}
return;
}
@@ -130,11 +134,13 @@ const scenarioParser = async (req, jobIdField) => {
// Exit early if no matching scenarios are identified
if (isEmpty(matchingScenarios)) {
logger.log(
`No matching scenarios found for jobId "${jobId}", skipping notification dispatch`,
"info",
"notifications"
);
if (process?.env?.NODE_ENV === "development") {
logger.log(
`No matching scenarios found for jobId "${jobId}", skipping notification dispatch`,
"info",
"notifications"
);
}
return;
}
@@ -157,11 +163,13 @@ const scenarioParser = async (req, jobIdField) => {
// Exit early if no notification associations are found
if (isEmpty(associationsData?.associations)) {
logger.log(
`No notification associations found for jobId "${jobId}", skipping notification dispatch`,
"info",
"notifications"
);
if (process?.env?.NODE_ENV === "development") {
logger.log(
`No notification associations found for jobId "${jobId}", skipping notification dispatch`,
"info",
"notifications"
);
}
return;
}
@@ -196,11 +204,13 @@ const scenarioParser = async (req, jobIdField) => {
// Exit early if no scenarios have eligible watchers after filtering
if (isEmpty(finalScenarioData?.matchingScenarios)) {
logger.log(
`No eligible watchers after filtering for jobId "${jobId}", skipping notification dispatch`,
"info",
"notifications"
);
if (process?.env?.NODE_ENV === "development") {
logger.log(
`No eligible watchers after filtering for jobId "${jobId}", skipping notification dispatch`,
"info",
"notifications"
);
}
return;
}
@@ -259,7 +269,9 @@ const scenarioParser = async (req, jobIdField) => {
}
if (isEmpty(scenariosToDispatch)) {
logger.log(`No scenarios to dispatch for jobId "${jobId}" after building`, "info", "notifications");
if (process?.env?.NODE_ENV === "development") {
logger.log(`No scenarios to dispatch for jobId "${jobId}" after building`, "info", "notifications");
}
return;
}

View File

@@ -64,7 +64,7 @@ async function OpenSearchUpdateHandler(req, res) {
document = pick(req.body.event.data.new, ["id", "ownr_fn", "ownr_ln", "ownr_co_nm", "ownr_ph1", "ownr_ph2"]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "bills":
case "bills": {
const bill = await client.request(
`query ADMIN_GET_BILL_BY_ID($billId: uuid!) {
bills_by_pk(id: $billId) {
@@ -97,7 +97,8 @@ async function OpenSearchUpdateHandler(req, res) {
bodyshopid: bill.bills_by_pk.job.shopid
};
break;
case "payments":
}
case "payments": {
//Query to get the job and RO number
const payment = await client.request(
@@ -141,6 +142,7 @@ async function OpenSearchUpdateHandler(req, res) {
bodyshopid: payment.payments_by_pk.job.shopid
};
break;
}
}
const payload = {
id: req.body.event.data.new.id,
@@ -255,6 +257,7 @@ async function OpenSearchSearchHandler(req, res) {
"*ownr_co_nm^8",
"*ownr_ph1^8",
"*ownr_ph2^8",
"*vendor.name^8",
"*comment^6"
// "*"
]

View File

@@ -1,11 +1,10 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const _ = require("lodash");
const rdiff = require("recursive-diff");
const logger = require("../utils/logger");
const { json } = require("body-parser");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";

View File

@@ -1,16 +1,11 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../utils/logger");
//const inlineCssTool = require("inline-css");
const juice = require("juice");
exports.inlinecss = async (req, res) => {
//Perform request validation
exports.inlineCSS = async (req, res) => {
const { logger } = req;
const { html } = req.body;
logger.log("email-inline-css", "DEBUG", req.user.email, null, null);
const { html, url } = req.body;
try {
const inlinedHtml = juice(html, {
applyAttributesTableElements: false,
@@ -24,15 +19,4 @@ exports.inlinecss = async (req, res) => {
});
res.send(error.message);
}
// inlineCssTool(html, { url: url })
// .then((inlinedHtml) => {
// res.send(inlinedHtml);
// })
// .catch((error) => {
// logger.log("email-inline-css-error", "ERROR", req.user.email, null, {
// error
// });
// });
};

View File

@@ -2,7 +2,7 @@ const express = require("express");
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { createAssociation, createShop, updateShop, updateCounter } = require("../admin/adminops");
const { updateUser, getUser, createUser } = require("../firebase/firebase-handler");
const { updateUser, getUser, createUser, getWelcomeEmail, getResetLink } = require("../firebase/firebase-handler");
const validateAdminMiddleware = require("../middleware/validateAdminMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
@@ -15,5 +15,7 @@ router.post("/updatecounter", updateCounter);
router.post("/updateuser", updateUser);
router.post("/getuser", getUser);
router.post("/createuser", createUser);
router.post("/sendwelcome", getWelcomeEmail);
router.post("/resetlink", getResetLink);
module.exports = router;

View File

@@ -1,11 +1,13 @@
const express = require("express");
const router = express.Router();
const { autohouse, claimscorp, chatter, kaizen, usageReport } = require("../data/data");
const { autohouse, claimscorp, chatter, kaizen, usageReport, podium, carfax } = require("../data/data");
router.post("/ah", autohouse);
router.post("/cc", claimscorp);
router.post("/chatter", chatter);
router.post("/kaizen", kaizen);
router.post("/usagereport", usageReport);
router.post("/podium", podium);
router.post("/carfax", carfax);
module.exports = router;

View File

@@ -0,0 +1,27 @@
const express = require("express");
const router = express.Router();
// Pull secrets from env
const { VSSTA_INTEGRATION_SECRET, PARTS_MANAGEMENT_INTEGRATION_SECRET } = process.env;
// Only load VSSTA routes if the secret is set
if (typeof VSSTA_INTEGRATION_SECRET === "string" && VSSTA_INTEGRATION_SECRET.length > 0) {
const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegrationRoute");
const vsstaMiddleware = require("../middleware/vsstaIntegrationMiddleware");
router.post("/vssta", vsstaMiddleware, vsstaIntegration);
} else {
console.warn("VSSTA_INTEGRATION_SECRET is not set — skipping /vssta integration route");
}
// Only load Parts Management routes if that secret is set
if (typeof PARTS_MANAGEMENT_INTEGRATION_SECRET === "string" && PARTS_MANAGEMENT_INTEGRATION_SECRET.length > 0) {
const partsManagementProvisioning = require("../integrations/partsManagement/partsManagementProvisioning");
const partsManagementIntegrationMiddleware = require("../middleware/partsManagementIntegrationMiddleware");
router.post("/parts-management/provision", partsManagementIntegrationMiddleware, partsManagementProvisioning);
} else {
console.warn("PARTS_MANAGEMENT_INTEGRATION_SECRET is not set — skipping /parts-management/provision route");
}
module.exports = router;

View File

@@ -1,6 +1,5 @@
const express = require("express");
const router = express.Router();
const job = require("../job/job");
const ppc = require("../ccc/partspricechange");
const { partsScan } = require("../parts-scan/parts-scan");
const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMIddleware");

View File

@@ -3,7 +3,6 @@ const router = express.Router();
const logger = require("../../server/utils/logger");
const sendEmail = require("../email/sendemail");
const data = require("../data/data");
const bodyParser = require("body-parser");
const ioevent = require("../ioevent/ioevent");
const taskHandler = require("../tasks/tasks");
const os = require("../opensearch/os-handler");
@@ -123,7 +122,7 @@ router.post("/ioevent", ioevent.default);
// Email
router.post("/sendemail", validateFirebaseIdTokenMiddleware, sendEmail.sendEmail);
router.post("/emailbounce", bodyParser.text(), sendEmail.emailBounce);
router.post("/emailbounce", express.text(), sendEmail.emailBounce);
// Tasks Email Handler
router.post("/tasks-assigned-handler", eventAuthorizationMiddleware, taskAssignedEmail);
@@ -139,6 +138,9 @@ router.post("/canvastest", validateFirebaseIdTokenMiddleware, canvastest);
// Alert Check
router.post("/alertcheck", eventAuthorizationMiddleware, alertCheck);
//EMS Upload
router.post("/emsupload", validateFirebaseIdTokenMiddleware, data.emsUpload);
// Redis Cache Routes
router.post("/bodyshop-cache", eventAuthorizationMiddleware, updateBodyshopCache);

View File

@@ -12,7 +12,8 @@ const {
handleNotesChange,
handlePaymentsChange,
handleDocumentsChange,
handleJobLinesChange
handleJobLinesChange,
handleAutoAddWatchers
} = require("../notifications/eventHandlers");
const router = express.Router();
@@ -33,5 +34,6 @@ router.post("/events/handleNotesChange", eventAuthorizationMiddleware, handleNot
router.post("/events/handlePaymentsChange", eventAuthorizationMiddleware, handlePaymentsChange);
router.post("/events/handleDocumentsChange", eventAuthorizationMiddleware, handleDocumentsChange);
router.post("/events/handleJobLinesChange", eventAuthorizationMiddleware, handleJobLinesChange);
router.post("/events/handleAutoAdd", eventAuthorizationMiddleware, handleAutoAddWatchers);
module.exports = router;

View File

@@ -1,12 +1,12 @@
const express = require("express");
const router = express.Router();
const { inlinecss } = require("../render/inlinecss");
const { inlineCSS } = require("../render/inlinecss");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { canvas } = require("../render/canvas-handler");
const validateCanvasInputMiddleware = require("../middleware/validateCanvasInputMiddleware");
// Define the route for inline CSS rendering
router.post("/inlinecss", validateFirebaseIdTokenMiddleware, inlinecss);
router.post("/inlinecss", validateFirebaseIdTokenMiddleware, inlineCSS);
router.post("/canvas-skia", validateFirebaseIdTokenMiddleware, validateCanvasInputMiddleware, canvas);
router.post("/canvas", validateFirebaseIdTokenMiddleware, validateCanvasInputMiddleware, canvas);

View File

@@ -7,7 +7,7 @@ const { status, markConversationRead } = require("../sms/status");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Twilio Webhook Middleware for production
// TODO: Look into this because it technically is never validating anything
// TODO: This is never actually doing anything, we should probably verify
const twilioWebhookMiddleware = twilio.webhook({ validate: process.env.NODE_ENV === "PRODUCTION" });
router.post("/receive", twilioWebhookMiddleware, receive);

View File

@@ -0,0 +1,11 @@
const express = require("express");
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const { cannySsoHandler } = require("../sso/canny");
router.use(validateFirebaseIdTokenMiddleware);
router.post("/canny", withUserGraphQLClientMiddleware, cannySsoHandler);
module.exports = router;

View File

@@ -1,17 +1,36 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const {
FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID,
UNARCHIVE_CONVERSATION,
CREATE_CONVERSATION,
INSERT_MESSAGE,
CHECK_PHONE_NUMBER_OPT_OUT,
DELETE_PHONE_NUMBER_OPT_OUT,
INSERT_PHONE_NUMBER_OPT_OUT
} = require("../graphql-client/queries");
const { phone } = require("phone");
const { admin } = require("../firebase/firebase-handler");
const logger = require("../utils/logger");
const InstanceManager = require("../utils/instanceMgr").default;
exports.receive = async (req, res) => {
// Note: When we handle different languages, we might need to adjust these keywords accordingly.
const optInKeywords = ["START", "YES", "UNSTOP"];
const optOutKeywords = ["STOP", "STOPALL", "UNSUBSCRIBE", "CANCEL", "END", "QUIT", "REVOKE", "OPTOUT"];
// System Message text, will also need to be localized if we support multiple languages
const systemMessageOptions = {
optIn: "Customer has opted-in",
optOut: "Customer has opted-out"
};
/**
* Receive SMS messages from Twilio and process them
* @param req
* @param res
* @returns {Promise<*>}
*/
const receive = async (req, res) => {
const {
logger,
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
@@ -20,7 +39,7 @@ exports.receive = async (req, res) => {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body)
image_path: generateMediaArray(req.body, logger)
};
logger.log("sms-inbound", "DEBUG", "api", null, loggerData);
@@ -35,7 +54,7 @@ exports.receive = async (req, res) => {
try {
// Step 1: Find the bodyshop and existing conversation
const response = await client.request(queries.FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
const response = await client.request(FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
mssid: req.body.MessagingServiceSid,
phone: phone(req.body.From).phoneNumber
});
@@ -45,37 +64,27 @@ exports.receive = async (req, res) => {
}
const bodyshop = response.bodyshops[0];
const normalizedPhone = phone(req.body.From).phoneNumber.replace(/^\+1/, ""); // Normalize phone number (remove +1 for CA numbers)
const messageText = (req.body.Body || "").trim().toUpperCase();
// Sort conversations by `updated_at` (or `created_at`) and pick the last one
// Step 2: Process conversation
const sortedConversations = bodyshop.conversations.sort((a, b) => new Date(a.created_at) - new Date(b.created_at));
const existingConversation = sortedConversations.length
? sortedConversations[sortedConversations.length - 1]
: null;
let conversationid;
let newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
isoutbound: false,
userid: null // Add additional fields as necessary
};
if (existingConversation) {
// Use the existing conversation
conversationid = existingConversation.id;
// Unarchive the conversation if necessary
if (existingConversation.archived) {
await client.request(queries.UNARCHIVE_CONVERSATION, {
await client.request(UNARCHIVE_CONVERSATION, {
id: conversationid,
archived: false
});
}
} else {
// Create a new conversation
const newConversationResponse = await client.request(queries.CREATE_CONVERSATION, {
const newConversationResponse = await client.request(CREATE_CONVERSATION, {
conversation: {
bodyshopid: bodyshop.id,
phone_num: phone(req.body.From).phoneNumber,
@@ -86,13 +95,137 @@ exports.receive = async (req, res) => {
conversationid = createdConversation.id;
}
// Ensure `conversationid` is added to the message
newMessage.conversationid = conversationid;
// Step 3: Handle opt-in or opt-out keywords
let systemMessageText = "";
let socketEventType = "";
// Step 3: Insert the message into the conversation
const insertresp = await client.request(queries.INSERT_MESSAGE, {
if (optInKeywords.includes(messageText) || optOutKeywords.includes(messageText)) {
// Check if the phone number is in phone_number_opt_out
const optOutCheck = await client.request(CHECK_PHONE_NUMBER_OPT_OUT, {
bodyshopid: bodyshop.id,
phone_number: normalizedPhone
});
// Opt In
if (optInKeywords.includes(messageText)) {
// Handle opt-in
if (optOutCheck.phone_number_opt_out.length > 0) {
// Phone number is opted out; delete the record
const deleteResponse = await client.request(DELETE_PHONE_NUMBER_OPT_OUT, {
bodyshopid: bodyshop.id,
phone_number: normalizedPhone
});
logger.log("sms-opt-in-success", "INFO", "api", null, {
msid: req.body.SmsMessageSid,
bodyshopid: bodyshop.id,
phone_number: normalizedPhone,
affected_rows: deleteResponse.delete_phone_number_opt_out.affected_rows
});
systemMessageText = systemMessageOptions.optIn;
socketEventType = "phone-number-opted-in";
}
}
// Opt Out
else if (optOutKeywords.includes(messageText)) {
// Handle opt-out
if (optOutCheck.phone_number_opt_out.length === 0) {
// Phone number is not opted out; insert a new record
const now = new Date().toISOString();
const optOutInput = {
bodyshopid: bodyshop.id,
phone_number: normalizedPhone,
created_at: now,
updated_at: now
};
const insertResponse = await client.request(INSERT_PHONE_NUMBER_OPT_OUT, {
optOutInput: [optOutInput]
});
logger.log("sms-opt-out-success", "INFO", "api", null, {
msid: req.body.SmsMessageSid,
bodyshopid: bodyshop.id,
phone_number: normalizedPhone,
affected_rows: insertResponse.insert_phone_number_opt_out.affected_rows
});
systemMessageText = systemMessageOptions.optOut;
socketEventType = "phone-number-opted-out";
}
}
// Insert system message if an opt-in or opt-out action was taken
if (systemMessageText) {
const systemMessage = {
msid: `SYS_${req.body.SmsMessageSid}_${Date.now()}`, // Unique ID for system message
text: systemMessageText,
conversationid,
isoutbound: false,
userid: null,
image: false,
image_path: null,
is_system: true
};
const systemMessageResponse = await client.request(INSERT_MESSAGE, {
msg: systemMessage,
conversationid
});
const insertedSystemMessage = systemMessageResponse.insert_messages.returning[0];
// Emit WebSocket events for system message
const broadcastRoom = getBodyshopRoom(bodyshop.id);
const conversationRoom = getBodyshopConversationRoom({
bodyshopId: bodyshop.id,
conversationId: conversationid
});
const systemPayload = {
isoutbound: false,
conversationId: conversationid,
updated_at: insertedSystemMessage.updated_at,
msid: insertedSystemMessage.msid,
existingConversation: !!existingConversation,
newConversation: !existingConversation ? insertedSystemMessage.conversation : null
};
ioRedis.to(broadcastRoom).emit("new-message-summary", {
...systemPayload,
summary: true
});
ioRedis.to(conversationRoom).emit("new-message-detailed", {
newMessage: insertedSystemMessage,
...systemPayload,
summary: false
});
// Emit opt-in or opt-out event
ioRedis.to(broadcastRoom).emit(socketEventType, {
bodyshopid: bodyshop.id,
phone_number: normalizedPhone
});
}
}
// Step 4: Insert the original message
const newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body, logger),
isoutbound: false,
userid: null,
conversationid,
is_system: false
};
const insertresp = await client.request(INSERT_MESSAGE, {
msg: newMessage,
conversationid: conversationid
conversationid
});
const message = insertresp?.insert_messages?.returning?.[0];
@@ -102,8 +235,7 @@ exports.receive = async (req, res) => {
throw new Error("Conversation data is missing from the response.");
}
// Step 4: Notify clients through Redis
const broadcastRoom = getBodyshopRoom(conversation.bodyshop.id);
// Step 5: Notify clients for original message
const conversationRoom = getBodyshopConversationRoom({
bodyshopId: conversation.bodyshop.id,
conversationId: conversation.id
@@ -113,9 +245,11 @@ exports.receive = async (req, res) => {
isoutbound: false,
conversationId: conversation.id,
updated_at: message.updated_at,
msid: message.sid
msid: message.msid
};
const broadcastRoom = getBodyshopRoom(conversation.bodyshop.id);
ioRedis.to(broadcastRoom).emit("new-message-summary", {
...commonPayload,
existingConversation: !!existingConversation,
@@ -131,13 +265,13 @@ exports.receive = async (req, res) => {
summary: false
});
// Step 5: Send FCM notification
// Step 6: Send FCM notification
const fcmresp = await admin.messaging().send({
topic: `${message.conversation.bodyshop.imexshopid}-messaging`,
notification: {
title: InstanceManager({
imex: `ImEX Online Message - ${message.conversation.phone_num}`,
rome: `Rome Online Message - ${message.conversation.phone_num}`,
rome: `Rome Online Message - ${message.conversation.phone_num}`
}),
body: message.image_path ? `Image ${message.text}` : message.text
},
@@ -157,11 +291,17 @@ exports.receive = async (req, res) => {
res.status(200).send("");
} catch (e) {
handleError(req, e, res, "RECEIVE_MESSAGE");
handleError(req, e, res, "RECEIVE_MESSAGE", logger);
}
};
const generateMediaArray = (body) => {
/**
* Generate media array from the request body
* @param body
* @param logger
* @returns {null|*[]}
*/
const generateMediaArray = (body, logger) => {
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
const ret = [];
@@ -174,12 +314,20 @@ const generateMediaArray = (body) => {
}
};
const handleError = (req, error, res, context) => {
/**
* Handle error logging and response
* @param req
* @param error
* @param res
* @param context
* @param logger
*/
const handleError = (req, error, res, context, logger) => {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
image_path: generateMediaArray(req.body, logger),
messagingServiceSid: req.body.MessagingServiceSid,
context,
error
@@ -187,3 +335,7 @@ const handleError = (req, error, res, context) => {
res.status(500).json({ error: error.message || "Internal Server Error" });
};
module.exports = {
receive
};

View File

@@ -1,19 +1,20 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const twilio = require("twilio");
const { phone } = require("phone");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const { INSERT_MESSAGE } = require("../graphql-client/queries");
const client = twilio(process.env.TWILIO_AUTH_TOKEN, process.env.TWILIO_AUTH_KEY);
const gqlClient = require("../graphql-client/graphql-client").client;
exports.send = async (req, res) => {
/**
* Send an outbound SMS message
* @param req
* @param res
* @returns {Promise<void>}
*/
const send = async (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
const {
ioRedis,
logger,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
@@ -25,8 +26,8 @@ exports.send = async (req, res) => {
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
image: selectedMedia.length > 0,
image_path: selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
if (!to || !messagingServiceSid || (!body && selectedMedia.length === 0) || !conversationid) {
@@ -38,8 +39,8 @@ exports.send = async (req, res) => {
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
image: selectedMedia.length > 0,
image_path: selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
res.status(400).json({ success: false, message: "Missing required parameter(s)." });
return;
@@ -59,12 +60,15 @@ exports.send = async (req, res) => {
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
image: selectedMedia.length > 0,
image_path: selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
};
try {
const gqlResponse = await gqlClient.request(queries.INSERT_MESSAGE, { msg: newMessage, conversationid });
const gqlResponse = await gqlClient.request(INSERT_MESSAGE, {
msg: newMessage,
conversationid
});
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
@@ -111,3 +115,7 @@ exports.send = async (req, res) => {
res.status(500).json({ success: false, message: "Failed to send message through Twilio." });
}
};
module.exports = {
send
};

View File

@@ -1,14 +1,21 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const {
UPDATE_MESSAGE_STATUS,
MARK_MESSAGES_AS_READ,
INSERT_PHONE_NUMBER_OPT_OUT,
FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID
} = require("../graphql-client/queries");
const logger = require("../utils/logger");
const { phone } = require("phone");
exports.status = async (req, res) => {
const { SmsSid, SmsStatus } = req.body;
/**
* Handle the status of an SMS message
* @param req
* @param res
* @returns {Promise<*>}
*/
const status = async (req, res) => {
const { SmsSid, SmsStatus, ErrorCode, To, MessagingServiceSid } = req.body;
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
@@ -20,18 +27,76 @@ exports.status = async (req, res) => {
return res.status(200).json({ message: "Status 'queued' disregarded." });
}
// Handle ErrorCode 21610 (Attempt to send to unsubscribed recipient) first
if (ErrorCode === "21610" && To && MessagingServiceSid) {
try {
// Step 1: Find the bodyshop by MessagingServiceSid
const bodyshopResponse = await client.request(FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
mssid: MessagingServiceSid,
phone: phone(To).phoneNumber // Pass the normalized phone number as required
});
const bodyshop = bodyshopResponse.bodyshops[0];
if (!bodyshop) {
logger.log("sms-opt-out-error", "ERROR", "api", null, {
msid: SmsSid,
messagingServiceSid: MessagingServiceSid,
to: To,
error: "No matching bodyshop found"
});
} else {
// Step 2: Insert into phone_number_opt_out table
const now = new Date().toISOString();
const optOutInput = {
bodyshopid: bodyshop.id,
phone_number: phone(To).phoneNumber.replace(/^\+1/, ""), // Normalize phone number (remove +1 for CA numbers)
created_at: now,
updated_at: now
};
const optOutResponse = await client.request(INSERT_PHONE_NUMBER_OPT_OUT, {
optOutInput: [optOutInput]
});
logger.log("sms-opt-out-success", "INFO", null, null, {
msid: SmsSid,
bodyshopid: bodyshop.id,
phone_number: optOutInput.phone_number,
affected_rows: optOutResponse.insert_phone_number_opt_out.affected_rows
});
// Store bodyshopid for potential use in WebSocket notification
const broadcastRoom = getBodyshopRoom(bodyshop.id);
ioRedis.to(broadcastRoom).emit("phone-number-opted-out", {
bodyshopid: bodyshop.id,
phone_number: optOutInput.phone_number
// Note: conversationId is not included yet; will be set after message lookup
});
}
} catch (error) {
logger.log("sms-opt-out-error", "ERROR", "api", null, {
msid: SmsSid,
messagingServiceSid: MessagingServiceSid,
to: To,
error: error.message,
stack: error.stack
});
// Continue processing to update message status
}
}
// Update message status in the database
const response = await client.request(queries.UPDATE_MESSAGE_STATUS, {
const response = await client.request(UPDATE_MESSAGE_STATUS, {
msid: SmsSid,
fields: { status: SmsStatus }
});
const message = response.update_messages.returning[0];
const message = response.update_messages?.returning?.[0];
if (message) {
logger.log("sms-status-update", "DEBUG", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus }
status: SmsStatus
});
// Emit WebSocket event to notify the change in message status
@@ -46,26 +111,32 @@ exports.status = async (req, res) => {
type: "status-changed"
});
} else {
logger.log("sms-status-update-warning", "WARN", "api", null, {
logger.log("sms-status-update-warning", "WARN", null, null, {
msid: SmsSid,
fields: { status: SmsStatus },
warning: "No message returned from the database update."
status: SmsStatus,
warning: "No message found in database for update"
});
}
res.sendStatus(200);
} catch (error) {
} catch (err) {
logger.log("sms-status-update-error", "ERROR", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
stack: error.stack,
message: error.message
status: SmsStatus,
error: err.message,
stack: err.stack
});
res.status(500).json({ error: "Failed to update message status." });
}
};
exports.markConversationRead = async (req, res) => {
/**
* Mark a conversation as read
* @param req
* @param res
* @returns {Promise<*>}
*/
const markConversationRead = async (req, res) => {
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
@@ -80,7 +151,7 @@ exports.markConversationRead = async (req, res) => {
}
try {
const response = await client.request(queries.MARK_MESSAGES_AS_READ, {
const response = await client.request(MARK_MESSAGES_AS_READ, {
conversationId
});
@@ -104,3 +175,8 @@ exports.markConversationRead = async (req, res) => {
res.status(500).json({ error: "Failed to mark conversation as read." });
}
};
module.exports = {
status,
markConversationRead
};

24
server/sso/canny.js Normal file
View File

@@ -0,0 +1,24 @@
const logger = require("../utils/logger");
const jwt = require("jsonwebtoken");
const cannySsoHandler = async (req, res) => {
try {
const userData = {
//avatarURL: user.avatarURL, // optional, but preferred
email: req.user.email,
id: req.user.uid,
name: req.user.displayName || req.user.email
};
return res.status(200).send(jwt.sign(userData, process.env.CANNY_PRIVATE_KEY, { algorithm: "HS256" }));
} catch (error) {
logger.log("sso-canny-error", "error", req?.user?.email, null, {
message: error.message,
stack: error.stack
});
res.status(500).json({ error: error.message });
}
};
module.exports = {
cannySsoHandler
};

View File

@@ -1,3 +1,11 @@
/**
* @module ioHelpers
* @param app
* @param api
* @param io
* @param logger
* @returns {{getBodyshopRoom: (function(*): string), getBodyshopConversationRoom: (function({bodyshopId: *, conversationId: *}): string)}}
*/
const applyIOHelpers = ({ app, api, io, logger }) => {
// Global Bodyshop Room
const getBodyshopRoom = (bodyshopId) => `bodyshop-broadcast-room:${bodyshopId}`;

View File

@@ -12,6 +12,9 @@ const { uploadFileToS3 } = require("./s3");
const { v4 } = require("uuid");
const { InstanceRegion } = require("./instanceMgr");
const getHostNameOrIP = require("./getHostNameOrIP");
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
@@ -99,13 +102,11 @@ const createLogger = () => {
const labelColor = "\x1b[33m"; // Yellow
const separatorColor = "\x1b[35m|\x1b[0m"; // Magenta for separators
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${
user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${
meta
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${meta
? `\n${separatorColor} ${labelColor}meta:\x1b[0m ${JSON.stringify(meta, null, 2)} ${separatorColor}`
: ""
}`;
}`;
})
)
})
@@ -194,9 +195,45 @@ const createLogger = () => {
winstonLogger.log(logEntry);
};
const LogIntegrationCall = async ({ platform, method, name, jobid, paymentid, billid, status, bodyshopid, email }) => {
try {
//Insert the record.
await client.request(queries.INSERT_INTEGRATION_LOG, {
log: {
platform,
method,
name,
jobid,
paymentid,
billid,
status: status?.toString() ?? "0",
bodyshopid,
email
}
});
} catch (error) {
console.trace("Stack", error?.stack);
log("integration-log-error", "ERROR", email, null, {
message: error?.message,
stack: error?.stack,
platform,
method,
name,
jobid,
paymentid,
billid,
status,
bodyshopid,
email
});
}
};
return {
log,
logger: winstonLogger
logger: winstonLogger,
LogIntegrationCall
};
} catch (e) {
console.error("Error setting up enhanced Logger, defaulting to console.: " + e?.message || "");

View File

@@ -9,6 +9,7 @@ const {
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { InstanceRegion } = require("./instanceMgr");
const { isString, isEmpty } = require("lodash");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const createS3Client = () => {
const S3Options = {
@@ -95,6 +96,17 @@ const createS3Client = () => {
throw error;
}
};
const getPresignedUrl = async ({ bucketName, key }) => {
const command = new PutObjectCommand({
Bucket: bucketName,
Key: key,
StorageClass: "INTELLIGENT_TIERING"
});
const presignedUrl = await getSignedUrl(s3Client, command, { expiresIn: 360 });
return presignedUrl;
}
return {
uploadFileToS3,
downloadFileFromS3,
@@ -102,8 +114,12 @@ const createS3Client = () => {
deleteFileFromS3,
copyFileInS3,
fileExistsInS3,
getPresignedUrl,
...s3Client
};
};
module.exports = createS3Client();

View File

@@ -38,6 +38,7 @@ const redisSocketEvents = ({
try {
const user = await admin.auth().verifyIdToken(token);
socket.user = user;
socket.bodyshopId = bodyshopId;
await addUserSocketMapping(user.email, socket.id, bodyshopId);
next();
} catch (error) {
@@ -67,12 +68,8 @@ const redisSocketEvents = ({
return;
}
socket.user = user;
socket.bodyshopId = bodyshopId;
await refreshUserSocketTTL(user.email, bodyshopId);
createLogEvent(
socket,
"debug",
`Token updated successfully for socket ID: ${socket.id} (bodyshop: ${bodyshopId})`
);
socket.emit("token-updated", { success: true });
} catch (error) {
if (error.code === "auth/id-token-expired") {
@@ -94,7 +91,6 @@ const redisSocketEvents = ({
try {
const room = getBodyshopRoom(bodyshopUUID);
socket.join(room);
// createLogEvent(socket, "debug", `Client joined bodyshop room: ${room}`);
} catch (error) {
createLogEvent(socket, "error", `Error joining room: ${error}`);
}
@@ -104,7 +100,6 @@ const redisSocketEvents = ({
try {
const room = getBodyshopRoom(bodyshopUUID);
socket.leave(room);
createLogEvent(socket, "debug", `Client left bodyshop room: ${room}`);
} catch (error) {
createLogEvent(socket, "error", `Error joining room: ${error}`);
}
@@ -114,8 +109,6 @@ const redisSocketEvents = ({
try {
const room = getBodyshopRoom(bodyshopUUID);
io.to(room).emit("bodyshop-message", message);
// We do not need this as these can be debugged live
// createLogEvent(socket, "debug", `Broadcast message to bodyshop ${room}`);
} catch (error) {
createLogEvent(socket, "error", `Error getting room: ${error}`);
}
@@ -201,7 +194,6 @@ const redisSocketEvents = ({
const registerSyncEvents = (socket) => {
socket.on("sync-notification-read", async ({ email, bodyshopId, notificationId }) => {
try {
const userEmail = socket.user.email;
const socketMapping = await getUserSocketMappingByBodyshop(email, bodyshopId);
const timestamp = new Date().toISOString();
@@ -212,11 +204,6 @@ const redisSocketEvents = ({
io.to(socketId).emit("sync-notification-read", { notificationId, timestamp });
}
});
createLogEvent(
socket,
"debug",
`Synced notification ${notificationId} read for ${userEmail} in bodyshop ${bodyshopId}`
);
}
} catch (error) {
createLogEvent(socket, "error", `Error syncing notification read: ${error.message}`);
@@ -235,7 +222,6 @@ const redisSocketEvents = ({
io.to(socketId).emit("sync-all-notifications-read", { timestamp });
}
});
createLogEvent(socket, "debug", `Synced all notifications read for ${email} in bodyshop ${bodyshopId}`);
}
} catch (error) {
createLogEvent(socket, "error", `Error syncing all notifications read: ${error.message}`);
@@ -301,12 +287,34 @@ const redisSocketEvents = ({
});
};
// Task Events
const registerTaskEvents = (socket) => {
socket.on("task-created", (payload) => {
if (!payload) return;
const room = getBodyshopRoom(socket.bodyshopId);
io.to(room).emit("bodyshop-message", { type: "task-created", payload });
});
socket.on("task-updated", (payload) => {
if (!payload) return;
const room = getBodyshopRoom(socket.bodyshopId);
io.to(room).emit("bodyshop-message", { type: "task-updated", payload });
});
socket.on("task-deleted", (payload) => {
if (!payload || !payload.id) return;
const room = getBodyshopRoom(socket.bodyshopId);
io.to(room).emit("bodyshop-message", { type: "task-deleted", payload });
});
};
// Call Handlers
registerRoomAndBroadcastEvents(socket);
registerUpdateEvents(socket);
registerMessagingEvents(socket);
registerDisconnectEvents(socket);
registerSyncEvents(socket);
registerTaskEvents(socket);
registerFortellisEvents(socket);
};