- merge adjustments

Signed-off-by: Dave Richer <dave@imexsystems.ca>
This commit is contained in:
Dave Richer
2024-01-26 21:20:49 -05:00
142 changed files with 54394 additions and 3099 deletions

View File

@@ -166,7 +166,7 @@ async function CheckForErrors(socket, response) {
CdkBase.createLogEvent(
socket,
"DEBUG",
`Succesful response from DMS. ${response.Message || ""}`
`Successful response from DMS. ${response.Message || ""}`
);
} else {
CdkBase.createLogEvent(

View File

@@ -18,10 +18,10 @@ const {
} = require("./qbo-callback");
const OAuthClient = require("intuit-oauth");
const moment = require("moment-timezone");
const GraphQLClient = require("graphql-request").GraphQLClient;
const findTaxCode = require("../qb-receivables-lines").findTaxCode;
exports.default = async (req, res) => {
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
@@ -30,29 +30,31 @@ exports.default = async (req, res) => {
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true,
});
try {
//Fetch the API Access Tokens & Set them for the session.
const response = await apiGqlClient.request(queries.GET_QBO_AUTH, {
email: req.user.email,
});
const { qbo_realmId } = response.associations[0];
oauthClient.setToken(response.associations[0].qbo_auth);
if (!qbo_realmId) {
res.status(401).json({ error: "No company associated." });
return;
}
await refreshOauthToken(oauthClient, req);
const BearerToken = req.headers.authorization;
const { bills: billsToQuery, elgen } = req.body;
//Query Job Info
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payable-create", "DEBUG", req.user.email, billsToQuery);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {

View File

@@ -51,15 +51,13 @@ exports.default = async (req, res) => {
}
await refreshOauthToken(oauthClient, req);
const BearerToken = req.headers.authorization;
const { payments: paymentsToQuery, elgen } = req.body;
//Query Job Info
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payment-create", "DEBUG", req.user.email, paymentsToQuery);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_PAYMENTS_FOR_EXPORT, {

View File

@@ -45,15 +45,14 @@ exports.default = async (req, res) => {
await refreshOauthToken(oauthClient, req);
const BearerToken = req.headers.authorization;
const { jobIds, elgen } = req.body;
//Query Job Info
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-receivable-create", "DEBUG", req.user.email, jobIds);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, {

View File

@@ -3,10 +3,11 @@ const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
var builder = require("xmlbuilder2");
const builder = require("xmlbuilder2");
const QbXmlUtils = require("./qbxml-utils");
const moment = require("moment-timezone");
const logger = require("../../utils/logger");
const logger = require('../../utils/logger');
require("dotenv").config({
path: path.resolve(
process.cwd(),
@@ -15,14 +16,10 @@ require("dotenv").config({
});
exports.default = async (req, res) => {
const BearerToken = req.headers.authorization;
const { bills: billsToQuery } = req.body;
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log(

View File

@@ -1,13 +1,12 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
var builder = require("xmlbuilder2");
const builder = require("xmlbuilder2");
const moment = require("moment-timezone");
const QbXmlUtils = require("./qbxml-utils");
const QbxmlReceivables = require("./qbxml-receivables");
const logger = require("../../utils/logger");
const logger = require('../../utils/logger');
require("dotenv").config({
path: path.resolve(
@@ -19,14 +18,10 @@ require("dotenv").config({
const { generateJobTier, generateOwnerTier, generateSourceTier } = QbXmlUtils;
exports.default = async (req, res) => {
const BearerToken = req.headers.authorization;
const { payments: paymentsToQuery } = req.body;
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log(

View File

@@ -1,13 +1,12 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const builder = require("xmlbuilder2");
const QbXmlUtils = require("./qbxml-utils");
const logger = require("../../utils/logger");
const CreateInvoiceLines = require("../qb-receivables-lines").default;
const logger = require('../../utils/logger');
require("dotenv").config({
path: path.resolve(
@@ -20,14 +19,10 @@ Dinero.globalRoundingMode = "HALF_EVEN";
const { generateJobTier, generateOwnerTier, generateSourceTier } = QbXmlUtils;
exports.default = async (req, res) => {
const BearerToken = req.headers.authorization;
const { jobIds } = req.body;
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log(

View File

@@ -5,7 +5,6 @@ require("dotenv").config({
`.env.${process.env.NODE_ENV || "development"}`
),
});
const GraphQLClient = require("graphql-request").GraphQLClient;
const soap = require("soap");
const queries = require("../graphql-client/queries");
@@ -34,16 +33,11 @@ const { CDK_CREDENTIALS, CheckCdkResponseForError } = require("./cdk-wsdl");
exports.default = async function ReloadCdkMakes(req, res) {
const { bodyshopid, cdk_dealerid } = req.body;
try {
const BearerToken = req.headers.authorization;
//Query all CDK Models
const newList = await GetCdkMakes(req, cdk_dealerid);
//Clear out the existing records
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const deleteResult = await client
.setHeaders({ Authorization: BearerToken })

View File

@@ -507,7 +507,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
Body: repairCosts.BodyLaborTotalCost.toFormat(CCDineroFormat),
Paint: repairCosts.RefinishLaborTotalCost.toFormat(CCDineroFormat),
Prep: Dinero().toFormat(CCDineroFormat),
Frame: Dinero(job.job_totals.rates.laf.total).toFormat(CCDineroFormat),
Frame: repairCosts.FrameLaborTotalCost.toFormat(CCDineroFormat),
Mech: repairCosts.MechanicalLaborTotalCost.toFormat(CCDineroFormat),
Glass: repairCosts.GlassLaborTotalCost.toFormat(CCDineroFormat),
Elec: repairCosts.ElectricalLaborTotalCost.toFormat(CCDineroFormat),

View File

@@ -1,3 +1,4 @@
exports.arms = require("./arms").default;
exports.autohouse = require("./autohouse").default;
exports.claimscorp = require("./claimscorp").default;
exports.arms = require("./arms").default;
exports.kaizen = require("./kaizen").default;

837
server/data/kaizen.js Normal file
View File

@@ -0,0 +1,837 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const _ = require("lodash");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const DineroFormat = "0,0.00";
const DateFormat = "MM/DD/YYYY";
const repairOpCodes = ["OP4", "OP9", "OP10"];
const replaceOpCodes = ["OP2", "OP5", "OP11", "OP12"];
const ftpSetup = {
host: process.env.KAIZEN_HOST,
port: process.env.KAIZEN_PORT,
username: process.env.KAIZEN_USER,
password: process.env.KAIZEN_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: [
"ssh-rsa",
"ssh-dss",
"rsa-sha2-256",
"rsa-sha2-512",
"ecdsa-sha2-nistp256",
"ecdsa-sha2-nistp384",
],
},
};
exports.default = async (req, res) => {
//Query for the List of Bodyshop Clients.
logger.log("kaizen-start", "DEBUG", "api", null, null);
const kaizenShopsIDs = ["SUMMIT", "STRATHMORE", "SUNRIDGE"];
const { bodyshops } = await client.request(queries.GET_KAIZEN_SHOPS, {
imexshopid: kaizenShopsIDs,
});
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
try {
for (const bodyshop of specificShopIds
? bodyshops.filter((b) => specificShopIds.includes(b.id))
: bodyshops) {
logger.log("kaizen-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname,
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(
queries.KAIZEN_QUERY,
{
bodyshopid: bodyshop.id,
start: start
? moment(start).startOf("hours")
: moment().subtract(2, "hours").startOf("hour"),
...(end && { end: moment(end).endOf("hours") }),
}
);
const kaizenObject = {
DataFeed: {
ShopInfo: {
ShopName: bodyshops_by_pk.shopname,
Jobs: jobs.map((j) =>
CreateRepairOrderTag(
{ ...j, bodyshop: bodyshops_by_pk },
function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
}
)
),
},
},
};
if (erroredJobs.length > 0) {
logger.log("kaizen-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number)),
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
kaizenObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: kaizenObject.DataFeed.ShopInfo.Jobs.length,
xml: ret,
filename: `${bodyshop.shopname}-${moment().format(
"YYYYMMDDTHHMMss"
)}.xml`,
});
logger.log("kaizen-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname,
});
} catch (error) {
//Error at the shop level.
logger.log("kaizen-error-shop", "ERROR", "api", bodyshop.id, {
...error,
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()],
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error,
})),
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`,
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("kaizen-sftp-error", "ERROR", "api", null, {
...errors,
})
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("kaizen-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
});
const uploadResult = await sftp.put(
Buffer.from(xmlObj.xml),
`/${xmlObj.filename}`
);
logger.log("kaizen-sftp-upload-result", "DEBUG", "api", null, {
uploadResult,
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
} catch (error) {
logger.log("kaizen-sftp-error", "ERROR", "api", null, {
...error,
});
} finally {
sftp.end();
}
sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`,
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
if (!job.job_totals) {
errorCallback({
jobid: job.id,
job: job,
ro_number: job.ro_number,
error: { toString: () => "No job totals for RO." },
});
return {};
}
const repairCosts = CreateCosts(job);
try {
const ret = {
JobID: job.id,
RoNumber: job.ro_number,
JobStatus: job.tlos_ind
? "Total Loss"
: job.ro_number
? job.status
: "Estimate",
Customer: {
CompanyName: job.ownr_co_nm?.trim() || "",
FirstName: job.ownr_fn?.trim() || "",
LastName: job.ownr_ln?.trim() || "",
Address1: job.ownr_addr1?.trim() || "",
Address2: job.ownr_addr2?.trim() || "",
City: job.ownr_city?.trim() || "",
State: job.ownr_st?.trim() || "",
Zip: job.ownr_zip?.trim() || "",
},
Vehicle: {
Year: job.v_model_yr
? parseInt(job.v_model_yr.match(/\d/g))
? parseInt(job.v_model_yr.match(/\d/g).join(""), 10)
: ""
: "",
Make: job.v_make_desc || "",
Model: job.v_model_desc || "",
BodyStyle: job.vehicle?.v_bstyle || "",
Color: job.v_color || "",
VIN: job.v_vin || "",
PlateNo: job.plate_no || "",
},
InsuranceCompany: job.ins_co_nm || "",
Claim: job.clm_no || "",
Contacts: {
CSR: job.employee_csr_rel
? `${
job.employee_csr_rel.last_name
? job.employee_csr_rel.last_name
: ""
}${job.employee_csr_rel.last_name ? ", " : ""}${
job.employee_csr_rel.first_name
? job.employee_csr_rel.first_name
: ""
}`
: "",
Estimator: `${job.est_ct_ln ? job.est_ct_ln : ""}${
job.est_ct_ln ? ", " : ""
}${job.est_ct_fn ? job.est_ct_fn : ""}`,
},
Dates: {
DateEstimated:
(job.date_estimated &&
moment(job.date_estimated).format(DateFormat)) ||
"",
DateOpened:
(job.date_opened && moment(job.date_opened).format(DateFormat)) || "",
DateScheduled:
(job.scheduled_in &&
moment(job.scheduled_in)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateArrived:
(job.actual_in &&
moment(job.actual_in)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateStart: job.date_repairstarted
? (job.date_repairstarted &&
moment(job.date_repairstarted)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
""
: (job.actual_in &&
moment(job.actual_in)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateScheduledCompletion:
(job.scheduled_completion &&
moment(job.scheduled_completion)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateCompleted:
(job.actual_completion &&
moment(job.actual_completion)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateScheduledDelivery:
(job.scheduled_delivery &&
moment(job.scheduled_delivery)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateDelivered:
(job.actual_delivery &&
moment(job.actual_delivery)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateInvoiced:
(job.date_invoiced &&
moment(job.date_invoiced)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
DateExported:
(job.date_exported &&
moment(job.date_exported)
.tz(job.bodyshop.timezone)
.format(DateFormat)) ||
"",
},
Sales: {
Labour: {
Aluminum: Dinero(job.job_totals.rates.laa.total).toFormat(
DineroFormat
),
Body: Dinero(job.job_totals.rates.lab.total).toFormat(DineroFormat),
Diagnostic: Dinero(job.job_totals.rates.lad.total).toFormat(
DineroFormat
),
Electrical: Dinero(job.job_totals.rates.lae.total).toFormat(
DineroFormat
),
Frame: Dinero(job.job_totals.rates.laf.total).toFormat(DineroFormat),
Glass: Dinero(job.job_totals.rates.lag.total).toFormat(DineroFormat),
Mechanical: Dinero(job.job_totals.rates.lam.total).toFormat(
DineroFormat
),
OtherLabour: Dinero(job.job_totals.rates.la1.total)
.add(Dinero(job.job_totals.rates.la2.total))
.add(Dinero(job.job_totals.rates.la3.total))
.add(Dinero(job.job_totals.rates.la4.total))
.add(Dinero(job.job_totals.rates.lau.total))
.toFormat(DineroFormat),
Refinish: Dinero(job.job_totals.rates.lar.total).toFormat(
DineroFormat
),
Structural: Dinero(job.job_totals.rates.las.total).toFormat(
DineroFormat
),
},
Materials: {
Body: Dinero(job.job_totals.rates.mash.total).toFormat(DineroFormat),
Refinish: Dinero(job.job_totals.rates.mapa.total).toFormat(
DineroFormat
),
},
Parts: {
Aftermarket: Dinero(
job.job_totals.parts.parts.list.PAA &&
job.job_totals.parts.parts.list.PAA.total
).toFormat(DineroFormat),
LKQ: Dinero(
job.job_totals.parts.parts.list.PAL &&
job.job_totals.parts.parts.list.PAL.total
).toFormat(DineroFormat),
OEM: Dinero(
job.job_totals.parts.parts.list.PAN &&
job.job_totals.parts.parts.list.PAN.total
)
.add(
Dinero(
job.job_totals.parts.parts.list.PAP &&
job.job_totals.parts.parts.list.PAP.total
)
)
.toFormat(DineroFormat),
OtherParts: Dinero(
job.job_totals.parts.parts.list.PAO &&
job.job_totals.parts.parts.list.PAO.total
).toFormat(DineroFormat),
Reconditioned: Dinero(
job.job_totals.parts.parts.list.PAM &&
job.job_totals.parts.parts.list.PAM.total
).toFormat(DineroFormat),
TotalParts: Dinero(
job.job_totals.parts.parts.list.PAA &&
job.job_totals.parts.parts.list.PAA.total
)
.add(
Dinero(
job.job_totals.parts.parts.list.PAL &&
job.job_totals.parts.parts.list.PAL.total
)
)
.add(
Dinero(
job.job_totals.parts.parts.list.PAN &&
job.job_totals.parts.parts.list.PAN.total
)
)
.add(
Dinero(
job.job_totals.parts.parts.list.PAO &&
job.job_totals.parts.parts.list.PAO.total
)
)
.add(
Dinero(
job.job_totals.parts.parts.list.PAM &&
job.job_totals.parts.parts.list.PAM.total
)
)
.toFormat(DineroFormat),
},
OtherSales: Dinero(job.job_totals.additional.storage).toFormat(
DineroFormat
),
Sublet: Dinero(job.job_totals.parts.sublets.total).toFormat(
DineroFormat
),
Towing: Dinero(job.job_totals.additional.towing).toFormat(DineroFormat),
ATS:
job.job_totals.additional.additionalCostItems.includes(
"ATS Amount"
) === true
? Dinero(
job.job_totals.additional.additionalCostItems[
job.job_totals.additional.additionalCostItems.indexOf(
"ATS Amount"
)
].total
).toFormat(DineroFormat)
: Dinero().toFormat(DineroFormat),
SaleSubtotal: Dinero(job.job_totals.totals.subtotal).toFormat(
DineroFormat
),
Tax: Dinero(job.job_totals.totals.local_tax)
.add(Dinero(job.job_totals.totals.state_tax))
.add(Dinero(job.job_totals.totals.federal_tax))
.add(Dinero(job.job_totals.additional.pvrt))
.toFormat(DineroFormat),
SaleTotal: Dinero(job.job_totals.totals.total_repairs).toFormat(
DineroFormat
),
},
SaleHours: {
Aluminum: job.job_totals.rates.laa.hours.toFixed(2),
Body: job.job_totals.rates.lab.hours.toFixed(2),
Diagnostic: job.job_totals.rates.lad.hours.toFixed(2),
Electrical: job.job_totals.rates.lae.hours.toFixed(2),
Frame: job.job_totals.rates.laf.hours.toFixed(2),
Glass: job.job_totals.rates.lag.hours.toFixed(2),
Mechanical: job.job_totals.rates.lam.hours.toFixed(2),
Other: (
job.job_totals.rates.la1.hours +
job.job_totals.rates.la2.hours +
job.job_totals.rates.la3.hours +
job.job_totals.rates.la4.hours +
job.job_totals.rates.lau.hours
).toFixed(2),
Refinish: job.job_totals.rates.lar.hours.toFixed(2),
Structural: job.job_totals.rates.las.hours.toFixed(2),
TotalHours: job.joblines
.reduce((acc, val) => acc + val.mod_lb_hrs, 0)
.toFixed(2),
},
Costs: {
Labour: {
Aluminum: repairCosts.AluminumLabourTotalCost.toFormat(DineroFormat),
Body: repairCosts.BodyLabourTotalCost.toFormat(DineroFormat),
Diagnostic:
repairCosts.DiagnosticLabourTotalCost.toFormat(DineroFormat),
Electrical:
repairCosts.ElectricalLabourTotalCost.toFormat(DineroFormat),
Frame: repairCosts.FrameLabourTotalCost.toFormat(DineroFormat),
Glass: repairCosts.GlassLabourTotalCost.toFormat(DineroFormat),
Mechancial:
repairCosts.MechanicalLabourTotalCost.toFormat(DineroFormat),
OtherLabour: repairCosts.LabourMiscTotalCost.toFormat(DineroFormat),
Refinish: repairCosts.RefinishLabourTotalCost.toFormat(DineroFormat),
Structural:
repairCosts.StructuralLabourTotalCost.toFormat(DineroFormat),
TotalLabour: repairCosts.LabourTotalCost.toFormat(DineroFormat),
},
Materials: {
Body: repairCosts.BMTotalCost.toFormat(DineroFormat),
Refinish: repairCosts.PMTotalCost.toFormat(DineroFormat),
},
Parts: {
Aftermarket: repairCosts.PartsAMCost.toFormat(DineroFormat),
LKQ: repairCosts.PartsRecycledCost.toFormat(DineroFormat),
OEM: repairCosts.PartsOemCost.toFormat(DineroFormat),
OtherCost: repairCosts.PartsOtherCost.toFormat(DineroFormat),
Reconditioned:
repairCosts.PartsReconditionedCost.toFormat(DineroFormat),
TotalParts: repairCosts.PartsAMCost.add(repairCosts.PartsRecycledCost)
.add(repairCosts.PartsReconditionedCost)
.add(repairCosts.PartsOemCost)
.add(repairCosts.PartsOtherCost)
.toFormat(DineroFormat),
},
Sublet: repairCosts.SubletTotalCost.toFormat(DineroFormat),
Towing: repairCosts.TowingTotalCost.toFormat(DineroFormat),
ATS: Dinero().toFormat(DineroFormat),
Storage: repairCosts.StorageTotalCost.toFormat(DineroFormat),
CostTotal: repairCosts.TotalCost.toFormat(DineroFormat),
},
CostHours: {
Aluminum: repairCosts.AluminumLabourTotalHrs.toFixed(2),
Body: repairCosts.BodyLabourTotalHrs.toFixed(2),
Diagnostic: repairCosts.DiagnosticLabourTotalHrs.toFixed(2),
Refinish: repairCosts.RefinishLabourTotalHrs.toFixed(2),
Frame: repairCosts.FrameLabourTotalHrs.toFixed(2),
Mechanical: repairCosts.MechanicalLabourTotalHrs.toFixed(2),
Glass: repairCosts.GlassLabourTotalHrs.toFixed(2),
Electrical: repairCosts.ElectricalLabourTotalHrs.toFixed(2),
Structural: repairCosts.StructuralLabourTotalHrs.toFixed(2),
Other: repairCosts.LabourMiscTotalHrs.toFixed(2),
CostTotalHours: repairCosts.TotalHrs.toFixed(2),
},
};
return ret;
} catch (error) {
logger.log("kaizen-job-calculate-error", "ERROR", "api", null, {
error,
});
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};
const CreateCosts = (job) => {
//Create a mapping based on AH Requirements
//For DMS, the keys in the object below are the CIECA part types.
const billTotalsByCostCenters = job.bills.reduce((bill_acc, bill_val) => {
//At the bill level.
bill_val.billlines.map((line_val) => {
//At the bill line level.
if (!bill_acc[line_val.cost_center])
bill_acc[line_val.cost_center] = Dinero();
bill_acc[line_val.cost_center] = bill_acc[line_val.cost_center].add(
Dinero({
amount: Math.round((line_val.actual_cost || 0) * 100),
})
.multiply(line_val.quantity)
.multiply(bill_val.is_credit_memo ? -1 : 1)
);
return null;
});
return bill_acc;
}, {});
//If the hourly rates for job costing are set, add them in.
if (
job.bodyshop.jc_hourly_rates &&
(job.bodyshop.jc_hourly_rates.mapa ||
typeof job.bodyshop.jc_hourly_rates.mapa === "number" ||
isNaN(job.bodyshop.jc_hourly_rates.mapa) === false)
) {
if (
!billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
]
)
billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
] = Dinero();
if (job.bodyshop.use_paint_scale_data === true) {
if (job.mixdata.length > 0) {
billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
] = Dinero({
amount: Math.round(
((job.mixdata[0] && job.mixdata[0].totalliquidcost) || 0) * 100
),
});
} else {
billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
] = billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
].add(
Dinero({
amount: Math.round(
(job.bodyshop.jc_hourly_rates &&
job.bodyshop.jc_hourly_rates.mapa * 100) ||
0
),
}).multiply(job.job_totals.rates.mapa.hours)
);
}
} else {
billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
] = billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MAPA
].add(
Dinero({
amount: Math.round(
(job.bodyshop.jc_hourly_rates &&
job.bodyshop.jc_hourly_rates.mapa * 100) ||
0
),
}).multiply(job.job_totals.rates.mapa.hours)
);
}
}
if (job.bodyshop.jc_hourly_rates && job.bodyshop.jc_hourly_rates.mash) {
if (
!billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MASH
]
)
billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MASH
] = Dinero();
billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MASH
] = billTotalsByCostCenters[
job.bodyshop.md_responsibility_centers.defaults.costs.MASH
].add(
Dinero({
amount: Math.round(
(job.bodyshop.jc_hourly_rates &&
job.bodyshop.jc_hourly_rates.mash * 100) ||
0
),
}).multiply(job.job_totals.rates.mash.hours)
);
}
//Uses CIECA Labour types.
const ticketTotalsByCostCenter = job.timetickets.reduce(
(ticket_acc, ticket_val) => {
//At the invoice level.
if (!ticket_acc[ticket_val.cost_center])
ticket_acc[ticket_val.cost_center] = Dinero();
ticket_acc[ticket_val.cost_center] = ticket_acc[
ticket_val.cost_center
].add(
Dinero({
amount: Math.round((ticket_val.rate || 0) * 100),
}).multiply(
(ticket_val.flat_rate
? ticket_val.productivehrs
: ticket_val.actualhrs) || 0
)
);
return ticket_acc;
},
{}
);
const ticketHrsByCostCenter = job.timetickets.reduce(
(ticket_acc, ticket_val) => {
//At the invoice level.
if (!ticket_acc[ticket_val.cost_center])
ticket_acc[ticket_val.cost_center] = 0;
ticket_acc[ticket_val.cost_center] =
ticket_acc[ticket_val.cost_center] +
(ticket_val.flat_rate
? ticket_val.productivehrs
: ticket_val.actualhrs) || 0;
return ticket_acc;
},
{}
);
//CIECA STANDARD MAPPING OBJECT.
const ciecaObj = {
ATS: "ATS",
LA1: "LA1",
LA2: "LA2",
LA3: "LA3",
LA4: "LA4",
LAA: "LAA",
LAB: "LAB",
LAD: "LAD",
LAE: "LAE",
LAF: "LAF",
LAG: "LAG",
LAM: "LAM",
LAR: "LAR",
LAS: "LAS",
LAU: "LAU",
PAA: "PAA",
PAC: "PAC",
PAG: "PAG",
PAL: "PAL",
PAM: "PAM",
PAN: "PAN",
PAO: "PAO",
PAP: "PAP",
PAR: "PAR",
PAS: "PAS",
TOW: "TOW",
MAPA: "MAPA",
MASH: "MASH",
PASL: "PASL",
};
const defaultCosts =
job.bodyshop.cdk_dealerid || job.bodyshop.pbs_serialnumber
? ciecaObj
: job.bodyshop.md_responsibility_centers.defaults.costs;
return {
PartsTotalCost: Object.keys(billTotalsByCostCenters).reduce((acc, key) => {
if (
key !== defaultCosts.PAS &&
key !== defaultCosts.PASL &&
key !== defaultCosts.MAPA &&
key !== defaultCosts.MASH &&
key !== defaultCosts.TOW
)
return acc.add(billTotalsByCostCenters[key]);
return acc;
}, Dinero()),
PartsOemCost: (billTotalsByCostCenters[defaultCosts.PAN] || Dinero()).add(
billTotalsByCostCenters[defaultCosts.PAP] || Dinero()
),
PartsAMCost: billTotalsByCostCenters[defaultCosts.PAA] || Dinero(),
PartsReconditionedCost:
billTotalsByCostCenters[defaultCosts.PAM] || Dinero(),
PartsRecycledCost: billTotalsByCostCenters[defaultCosts.PAL] || Dinero(),
PartsOtherCost: billTotalsByCostCenters[defaultCosts.PAO] || Dinero(),
SubletTotalCost:
billTotalsByCostCenters[defaultCosts.PAS] ||
Dinero(billTotalsByCostCenters[defaultCosts.PASL] || Dinero()),
AluminumLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAA] || Dinero(),
AluminumLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAA] || 0,
BodyLabourTotalCost: ticketTotalsByCostCenter[defaultCosts.LAB] || Dinero(),
BodyLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAB] || 0,
DiagnosticLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAD] || Dinero(),
DiagnosticLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAD] || 0,
ElectricalLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAE] || Dinero(),
ElectricalLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAE] || 0,
FrameLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAF] || Dinero(),
FrameLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAF] || 0,
GlassLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAG] || Dinero(),
GlassLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAG] || 0,
LabourMiscTotalCost: (
ticketTotalsByCostCenter[defaultCosts.LA1] || Dinero()
)
.add(ticketTotalsByCostCenter[defaultCosts.LA2] || Dinero())
.add(ticketTotalsByCostCenter[defaultCosts.LA2] || Dinero())
.add(ticketTotalsByCostCenter[defaultCosts.LA3] || Dinero())
.add(ticketTotalsByCostCenter[defaultCosts.LA4] || Dinero())
.add(ticketTotalsByCostCenter[defaultCosts.LAU] || Dinero()),
LabourMiscTotalHrs:
(ticketHrsByCostCenter[defaultCosts.LA1] || 0) +
(ticketHrsByCostCenter[defaultCosts.LA2] || 0) +
(ticketHrsByCostCenter[defaultCosts.LA3] || 0) +
(ticketHrsByCostCenter[defaultCosts.LA4] || 0) +
(ticketHrsByCostCenter[defaultCosts.LAU] || 0),
MechanicalLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAM] || Dinero(),
MechanicalLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAM] || 0,
RefinishLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAR] || Dinero(),
RefinishLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAR] || 0,
StructuralLabourTotalCost:
ticketTotalsByCostCenter[defaultCosts.LAS] || Dinero(),
StructuralLabourTotalHrs: ticketHrsByCostCenter[defaultCosts.LAS] || 0,
PMTotalCost: billTotalsByCostCenters[defaultCosts.MAPA] || Dinero(),
BMTotalCost: billTotalsByCostCenters[defaultCosts.MASH] || Dinero(),
MiscTotalCost: billTotalsByCostCenters[defaultCosts.PAO] || Dinero(),
TowingTotalCost: billTotalsByCostCenters[defaultCosts.TOW] || Dinero(),
StorageTotalCost: Dinero(),
DetailTotal: Dinero(),
DetailTotalCost: Dinero(),
SalesTaxTotalCost: Dinero(),
LabourTotalCost: Object.keys(ticketTotalsByCostCenter).reduce(
(acc, key) => {
return acc.add(ticketTotalsByCostCenter[key]);
},
Dinero()
),
TotalCost: Object.keys(billTotalsByCostCenters).reduce((acc, key) => {
return acc.add(billTotalsByCostCenters[key]);
}, Dinero()),
TotalHrs: job.timetickets.reduce((acc, ticket_val) => {
return (
acc +
(ticket_val.flat_rate
? ticket_val.productivehrs
: ticket_val.actualhrs) || 0
);
}, 0),
};
};

View File

@@ -1,14 +1,14 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
const axios = require("axios");
let nodemailer = require("nodemailer");
let aws = require("@aws-sdk/client-ses");
let { defaultProvider } = require("@aws-sdk/credential-provider-node");
let {defaultProvider} = require("@aws-sdk/credential-provider-node");
const logger = require("../utils/logger");
const client = require("../graphql-client/graphql-client").client;
@@ -18,251 +18,252 @@ const ses = new aws.SES({
// The key apiVersion is no longer supported in v3, and can be removed.
// @deprecated The client uses the "latest" apiVersion.
apiVersion: "latest",
defaultProvider,
region: "us-east-2",
});
let transporter = nodemailer.createTransport({
SES: { ses, aws },
SES: {ses, aws},
});
exports.sendServerEmail = async function ({ subject, text }) {
if (process.env.NODE_ENV === undefined) return;
try {
transporter.sendMail(
{
from: `Rome Online API - ${process.env.NODE_ENV} <noreply@romeonline.io>`,
to: ["patrick@imexsystems.ca"],
subject: subject,
text: text,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
exports.sendServerEmail = async function ({subject, text}) {
if (process.env.NODE_ENV === undefined) return;
try {
transporter.sendMail(
{
Name: "tag_name",
Value: "tag_value",
from: `Rome Online API - ${process.env.NODE_ENV} <noreply@romeonline.io>`,
to: ["patrick@imexsystems.ca"],
subject: subject,
text: text,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
Name: "tag_name",
Value: "tag_value",
},
],
},
},
],
},
},
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
};
exports.sendTaskEmail = async function ({ to, subject, text, attachments }) {
try {
transporter.sendMail(
{
from: `Rome Online <noreply@romeonline.io>`,
to: to,
subject: subject,
text: text,
attachments: attachments || null,
},
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
exports.sendTaskEmail = async function ({to, subject, text, attachments}) {
try {
transporter.sendMail(
{
from: `Rome Online <noreply@romeonline.io>`,
to: to,
subject: subject,
text: text,
attachments: attachments || null,
},
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
};
exports.sendEmail = async (req, res) => {
logger.log("send-email", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
});
logger.log("send-email", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
});
let downloadedMedia = [];
if (req.body.media && req.body.media.length > 0) {
downloadedMedia = await Promise.all(
req.body.media.map((m) => {
try {
return getImage(m);
} catch (error) {
logger.log("send-email-error", "ERROR", req.user.email, null, {
let downloadedMedia = [];
if (req.body.media && req.body.media.length > 0) {
downloadedMedia = await Promise.all(
req.body.media.map((m) => {
try {
return getImage(m);
} catch (error) {
logger.log("send-email-error", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error,
});
}
})
);
}
transporter.sendMail(
{
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error,
});
attachments:
[
...((req.body.attachments &&
req.body.attachments.map((a) => {
return {
filename: a.filename,
path: a.path,
};
})) ||
[]),
...downloadedMedia.map((a) => {
return {
path: a,
};
}),
] || null,
html: req.body.html,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
Name: "tag_name",
Value: "tag_value",
},
],
},
},
(err, info) => {
console.log(err || info);
if (info) {
logger.log("send-email-success", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
// info,
});
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
messageId: info.response,
});
res.json({
success: true, //response: info
});
} else {
logger.log("send-email-failure", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error: err,
});
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
bodyshopid: req.body.bodyshopid,
});
res.status(500).json({success: false, error: err});
}
}
})
);
}
transporter.sendMail(
{
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
attachments:
[
...((req.body.attachments &&
req.body.attachments.map((a) => {
return {
filename: a.filename,
path: a.path,
};
})) ||
[]),
...downloadedMedia.map((a) => {
return {
path: a,
};
}),
] || null,
html: req.body.html,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
Name: "tag_name",
Value: "tag_value",
},
],
},
},
(err, info) => {
console.log(err || info);
if (info) {
logger.log("send-email-success", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
// info,
});
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
messageId: info.response,
});
res.json({
success: true, //response: info
});
} else {
logger.log("send-email-failure", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error: err,
});
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
bodyshopid: req.body.bodyshopid,
});
res.status(500).json({ success: false, error: err });
}
}
);
};
async function getImage(imageUrl) {
let image = await axios.get(imageUrl, { responseType: "arraybuffer" });
let raw = Buffer.from(image.data).toString("base64");
return "data:" + image.headers["content-type"] + ";base64," + raw;
let image = await axios.get(imageUrl, {responseType: "arraybuffer"});
let raw = Buffer.from(image.data).toString("base64");
return "data:" + image.headers["content-type"] + ";base64," + raw;
}
async function logEmail(req, email) {
try {
const insertresult = await client.request(queries.INSERT_EMAIL_AUDIT, {
email: {
to: email.to,
cc: email.cc,
subject: email.subject,
bodyshopid: req.body.bodyshopid,
useremail: req.user.email,
contents: req.body.html,
jobid: req.body.jobid,
sesmessageid: email.messageId,
status: "Sent",
},
});
console.log(insertresult);
} catch (error) {
logger.log("email-log-error", "error", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
// info,
});
}
try {
const insertresult = await client.request(queries.INSERT_EMAIL_AUDIT, {
email: {
to: email.to,
cc: email.cc,
subject: email.subject,
bodyshopid: req.body.bodyshopid,
useremail: req.user.email,
contents: req.body.html,
jobid: req.body.jobid,
sesmessageid: email.messageId,
status: "Sent",
},
});
console.log(insertresult);
} catch (error) {
logger.log("email-log-error", "error", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
// info,
});
}
}
exports.emailBounce = async function (req, res, next) {
try {
const body = JSON.parse(req.body);
if (body.Type === "SubscriptionConfirmation") {
logger.log("SNS-message", "DEBUG", "api", null, {
body: req.body,
});
}
const message = JSON.parse(body.Message);
if (message.notificationType === "Bounce") {
let replyTo, subject, messageId;
message.mail.headers.forEach((header) => {
if (header.name === "Reply-To") {
replyTo = header.value;
} else if (header.name === "Subject") {
subject = header.value;
exports.emailBounce = async function (req, res) {
try {
const body = JSON.parse(req.body);
if (body.Type === "SubscriptionConfirmation") {
logger.log("SNS-message", "DEBUG", "api", null, {
body: req.body,
});
}
});
messageId = message.mail.messageId;
if (replyTo === "noreply@romeonline.io") {
res.sendStatus(200);
return;
}
//If it's bounced, log it as bounced in audit log. Send an email to the user.
const result = await client.request(queries.UPDATE_EMAIL_AUDIT, {
sesid: messageId,
status: "Bounced",
context: message.bounce?.bouncedRecipients,
});
transporter.sendMail(
{
from: `Rome Online <noreply@romeonline.io>`,
to: replyTo,
subject: `Rome Online Bounced Email - RE: ${subject}`,
text: `Rome Online has tried to deliver an email with the subject: ${subject} to the intended recipients but encountered an error.
const message = JSON.parse(body.Message);
if (message.notificationType === "Bounce") {
let replyTo, subject, messageId;
message.mail.headers.forEach((header) => {
if (header.name === "Reply-To") {
replyTo = header.value;
} else if (header.name === "Subject") {
subject = header.value;
}
});
messageId = message.mail.messageId;
if (replyTo === "noreply@romeonline.io") {
res.sendStatus(200);
return;
}
//If it's bounced, log it as bounced in audit log. Send an email to the user.
const result = await client.request(queries.UPDATE_EMAIL_AUDIT, {
sesid: messageId,
status: "Bounced",
context: message.bounce?.bouncedRecipients,
});
transporter.sendMail(
{
from: `Rome Online <noreply@romeonline.io>`,
to: replyTo,
//bcc: "patrick@snapt.ca",
subject: `Rome Online Bounced Email - RE: ${subject}`,
text: `Rome Online has tried to deliver an email with the subject: ${subject} to the intended recipients but encountered an error.
${body.bounce?.bouncedRecipients.map(
(r) =>
`Recipient: ${r.emailAddress} | Status: ${r.action} | Code: ${r.diagnosticCode}
(r) =>
`Recipient: ${r.emailAddress} | Status: ${r.action} | Code: ${r.diagnosticCode}
`
)}
)}
`,
},
(err, info) => {
console.log("***", err || info);
},
(err, info) => {
console.log("***", err || info);
}
);
}
);
} catch (error) {
logger.log("sns-error", "ERROR", "api", null, {
error: JSON.stringify(error),
});
}
} catch (error) {
logger.log("sns-error", "ERROR", "api", null, {
error: JSON.stringify(error),
});
}
res.sendStatus(200);
res.sendStatus(200);
};

View File

@@ -1,287 +1,215 @@
var admin = require("firebase-admin");
const admin = require("firebase-admin");
const logger = require("../utils/logger");
const path = require("path");
const { auth } = require("firebase-admin");
const {auth} = require("firebase-admin");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
const client = require("../graphql-client/graphql-client").client;
var serviceAccount = require(process.env.FIREBASE_ADMINSDK_JSON);
const serviceAccount = require(process.env.FIREBASE_ADMINSDK_JSON);
const adminEmail = require("../utils/adminEmail");
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: process.env.FIREBASE_DATABASE_URL,
credential: admin.credential.cert(serviceAccount),
databaseURL: process.env.FIREBASE_DATABASE_URL,
});
exports.admin = admin;
const adminEmail = [
"patrick@imex.dev",
//"patrick@imex.test",
"patrick@imex.prod",
"patrick@imexsystems.ca",
"patrick@thinkimex.com",
];
exports.createUser = async (req, res) => {
logger.log("admin-create-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
logger.log("admin-create-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
const { email, displayName, password, shopid, authlevel } = req.body;
try {
const userRecord = await admin
.auth()
.createUser({ email, displayName, password });
const {email, displayName, password, shopid, authlevel} = req.body;
try {
const userRecord = await admin
.auth()
.createUser({email, displayName, password});
// See the UserRecord reference doc for the contents of userRecord.
// See the UserRecord reference doc for the contents of userRecord.
const result = await client.request(
`
const result = await client.request(
`
mutation INSERT_USER($user: users_insert_input!) {
insert_users_one(object: $user) {
email
}
}
`,
{
user: {
email: email.toLowerCase(),
authid: userRecord.uid,
associations: {
data: [{ shopid, authlevel, active: true }],
},
},
}
);
{
user: {
email: email.toLowerCase(),
authid: userRecord.uid,
associations: {
data: [{shopid, authlevel, active: true}],
},
},
}
);
res.json({ userRecord, result });
} catch (error) {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
}
res.json({userRecord, result});
} catch (error) {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
}
};
exports.updateUser = (req, res) => {
logger.log("admin-update-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
logger.log("admin-update-user", "ADMIN", req.user.email, null, {
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
return;
}
admin
.auth()
.updateUser(
req.body.uid,
req.body.user
// {
// email: "modifiedUser@example.com",
// phoneNumber: "+11234567890",
// emailVerified: true,
// password: "newPassword",
// displayName: "Jane Doe",
// photoURL: "http://www.example.com/12345678/photo.png",
// disabled: true,
// }
)
.then((userRecord) => {
// See the UserRecord reference doc for the contents of userRecord.
logger.log("admin-update-user-success", "ADMIN", req.user.email, null, {
userRecord,
ioadmin: true,
});
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
});
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
return;
}
admin
.auth()
.updateUser(
req.body.uid,
req.body.user
// {
// email: "modifiedUser@example.com",
// phoneNumber: "+11234567890",
// emailVerified: true,
// password: "newPassword",
// displayName: "Jane Doe",
// photoURL: "http://www.example.com/12345678/photo.png",
// disabled: true,
// }
)
.then((userRecord) => {
// See the UserRecord reference doc for the contents of userRecord.
logger.log("admin-update-user-success", "ADMIN", req.user.email, null, {
userRecord,
ioadmin: true,
});
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
});
};
exports.getUser = (req, res) => {
logger.log("admin-get-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
logger.log("admin-get-user", "ADMIN", req.user.email, null, {
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
return;
}
admin
.auth()
.getUser(req.body.uid)
.then((userRecord) => {
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-get-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
ioadmin: true,
});
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
return;
}
admin
.auth()
.getUser(req.body.uid)
.then((userRecord) => {
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-get-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
});
};
exports.sendNotification = async (req, res) => {
setTimeout(() => {
// Send a message to the device corresponding to the provided
// registration token.
admin
.messaging()
.send({
topic: "PRD_PATRICK-messaging",
notification: {
title: `ImEX Online Message - +16049992002`,
body: "Test Noti.",
//imageUrl: "https://thinkimex.com/img/io-fcm.png",
},
data: {
type: "messaging-inbound",
conversationid: "e0eb17c3-3a78-4e3f-b932-55ef35aa2297",
text: "Hello. ",
image_path: "",
phone_num: "+16049992002",
},
})
.then((response) => {
// Response is a message ID string.
console.log("Successfully sent message:", response);
})
.catch((error) => {
console.log("Error sending message:", error);
});
setTimeout(() => {
// Send a message to the device corresponding to the provided
// registration token.
admin
.messaging()
.send({
topic: "PRD_PATRICK-messaging",
notification: {
title: `ImEX Online Message - +16049992002`,
body: "Test Noti.",
//imageUrl: "https://thinkimex.com/img/io-fcm.png",
},
data: {
type: "messaging-inbound",
conversationid: "e0eb17c3-3a78-4e3f-b932-55ef35aa2297",
text: "Hello. ",
image_path: "",
phone_num: "+16049992002",
},
})
.then((response) => {
// Response is a message ID string.
console.log("Successfully sent message:", response);
})
.catch((error) => {
console.log("Error sending message:", error);
});
res.sendStatus(200);
}, 500);
res.sendStatus(200);
}, 500);
};
exports.subscribe = async (req, res) => {
const result = await admin
.messaging()
.subscribeToTopic(
req.body.fcm_tokens,
`${req.body.imexshopid}-${req.body.type}`
);
const result = await admin
.messaging()
.subscribeToTopic(
req.body.fcm_tokens,
`${req.body.imexshopid}-${req.body.type}`
);
res.json(result);
res.json(result);
};
exports.unsubscribe = async (req, res) => {
try {
const result = await admin
.messaging()
.unsubscribeFromTopic(
req.body.fcm_tokens,
`${req.body.imexshopid}-${req.body.type}`
);
try {
const result = await admin
.messaging()
.unsubscribeFromTopic(
req.body.fcm_tokens,
`${req.body.imexshopid}-${req.body.type}`
);
res.json(result);
} catch (error) {
res.sendStatus(500);
}
res.json(result);
} catch (error) {
res.sendStatus(500);
}
};
exports.validateFirebaseIdToken = async (req, res, next) => {
if (
(!req.headers.authorization ||
!req.headers.authorization.startsWith("Bearer ")) &&
!(req.cookies && req.cookies.__session)
) {
console.error("Unauthorized attempt. No authorization provided.");
res.status(403).send("Unauthorized");
return;
}
let idToken;
if (
req.headers.authorization &&
req.headers.authorization.startsWith("Bearer ")
) {
// console.log('Found "Authorization" header');
// Read the ID Token from the Authorization header.
idToken = req.headers.authorization.split("Bearer ")[1];
} else if (req.cookies) {
//console.log('Found "__session" cookie');
// Read the ID Token from cookie.
idToken = req.cookies.__session;
} else {
// No cookie
console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie",
});
res.status(403).send("Unauthorized");
return;
}
try {
const decodedIdToken = await admin.auth().verifyIdToken(idToken);
//console.log("ID Token correctly decoded", decodedIdToken);
req.user = decodedIdToken;
next();
return;
} catch (error) {
logger.log("api-unauthorized-call", "WARN", null, null, {
path: req.path,
body: req.body,
type: "unauthroized",
...error,
});
res.status(401).send("Unauthorized");
return;
}
};
exports.validateAdmin = async (req, res, next) => {
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log("admin-validation-failed", "ERROR", req.user.email, null, {
request: req.body,
user: req.user,
});
res.sendStatus(404);
return;
} else {
next();
return;
}
};
//Admin claims code.
// const uid = "JEqqYlsadwPEXIiyRBR55fflfko1";

View File

@@ -521,6 +521,21 @@ exports.QUERY_PAYMENTS_FOR_EXPORT = `
}
}`;
exports.QUERY_TRANSITIONS_BY_JOBID = `query QUERY_TRANSITIONS_BY_JOBID($jobids: [uuid!]!) {
transitions(where: {jobid: {_in: $jobids}}, order_by: {end: desc}) {
start
end
value
prev_value
next_value
duration
type
created_at
updated_at
jobid
}
}`;
exports.QUERY_UPCOMING_APPOINTMENTS = `query QUERY_UPCOMING_APPOINTMENTS($now: timestamptz!, $jobId: uuid!) {
jobs_by_pk(id: $jobId) {
bodyshop {
@@ -1073,6 +1088,183 @@ query ENTEGRAL_EXPORT($bodyshopid: uuid!) {
}
}`;
exports.KAIZEN_QUERY = `query KAIZEN_EXPORT($start: timestamptz, $bodyshopid: uuid!, $end: timestamptz) {
bodyshops_by_pk(id: $bodyshopid){
id
shopname
address1
city
state
zip_post
country
phone
last_name_first
md_ro_statuses
md_order_statuses
md_responsibility_centers
jc_hourly_rates
cdk_dealerid
pbs_serialnumber
use_paint_scale_data
timezone
}
jobs(where: {_and: [{updated_at: {_gt: $start}}, {updated_at: {_lte: $end}}, {shopid: {_eq: $bodyshopid}}]}) {
actual_completion
actual_delivery
actual_in
asgn_date
bills {
billlines {
actual_cost
cost_center
id
quantity
}
federal_tax_rate
id
is_credit_memo
local_tax_rate
state_tax_rate
}
created_at
clm_no
date_estimated
date_exported
date_invoiced
date_open
date_repairstarted
employee_body_rel {
first_name
last_name
employee_number
id
}
employee_csr_rel {
first_name
last_name
employee_number
id
}
employee_prep_rel {
first_name
last_name
employee_number
id
}
employee_refinish_rel {
first_name
last_name
employee_number
id
}
est_ct_fn
est_ct_ln
id
ins_co_nm
joblines(where: {removed: {_eq: false}}) {
act_price
billlines(order_by: {bill: {date: desc_nulls_last}} limit: 1) {
actual_cost
actual_price
quantity
bill {
vendor {
name
}
invoice_number
date
}
}
db_price
id
lbr_op
line_desc
line_ind
line_no
mod_lb_hrs
mod_lbr_ty
parts_order_lines(order_by: {parts_order: {order_date: desc_nulls_last}} limit: 1){
parts_order{
id
order_date
}
}
part_qty
part_type
profitcenter_part
profitcenter_labor
prt_dsmk_m
prt_dsmk_p
oem_partno
status
}
job_totals
loss_date
mixdata(limit: 1, order_by: {updated_at: desc}) {
jobid
totalliquidcost
}
ownr_addr1
ownr_addr2
ownr_city
ownr_co_nm
ownr_fn
ownr_ln
ownr_st
ownr_zip
parts_orders(limit: 1, order_by: {created_at: desc}) {
created_at
}
parts_tax_rates
plate_no
rate_la1
rate_la2
rate_la3
rate_la4
rate_laa
rate_lab
rate_lad
rate_lae
rate_laf
rate_lag
rate_lam
rate_lar
rate_las
rate_lau
rate_ma2s
rate_ma2t
rate_ma3s
rate_mabl
rate_macs
rate_mahw
rate_matd
rate_mapa
rate_mash
ro_number
scheduled_completion
scheduled_delivery
scheduled_in
status
timetickets {
id
rate
cost_center
actualhrs
productivehrs
flat_rate
}
tlos_ind
v_color
v_model_yr
v_model_desc
v_make_desc
v_vin
vehicle {
v_bstyle
}
}
}`;
exports.UPDATE_JOB = `
mutation UPDATE_JOB($jobId: uuid!, $job: jobs_set_input!) {
update_jobs(where: { id: { _eq: $jobId } }, _set: $job) {
@@ -1538,7 +1730,7 @@ exports.GET_CLAIMSCORP_SHOPS = `query GET_CLAIMSCORP_SHOPS {
}
}`;
exports.GET_ENTEGRAL_SHOPS = `query GET_AUTOHOUSE_SHOPS {
exports.GET_ENTEGRAL_SHOPS = `query GET_ENTEGRAL_SHOPS {
bodyshops(where: {entegral_id: {_is_null: false}, _or: {entegral_id: {_neq: ""}}}){
id
shopname
@@ -1558,6 +1750,26 @@ exports.GET_ENTEGRAL_SHOPS = `query GET_AUTOHOUSE_SHOPS {
}
}`;
exports.GET_KAIZEN_SHOPS = `query GET_KAIZEN_SHOPS($imexshopid: [String]) {
bodyshops(where: {imexshopid: {_in: $imexshopid}}){
id
shopname
address1
city
state
zip_post
country
phone
md_ro_statuses
md_order_statuses
autohouseid
md_responsibility_centers
jc_hourly_rates
imexshopid
timezone
}
}`;
exports.DELETE_ALL_DMS_VEHICLES = `mutation DELETE_ALL_DMS_VEHICLES{
delete_dms_vehicles(where: {}) {
affected_rows

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,68 @@
const _ = require("lodash");
const queries = require("../graphql-client/queries");
const moment = require("moment");
const durationToHumanReadable = require("../utils/durationToHumanReadable");
const calculateStatusDuration = require("../utils/calculateStatusDuration");
const jobLifecycle = async (req, res) => {
// Grab the jobids and statuses from the request body
const {
jobids,
statuses
} = req.body;
if (!jobids) {
return res.status(400).json({
error: "Missing jobids"
});
}
const jobIDs = _.isArray(jobids) ? jobids : [jobids];
const client = req.userGraphQLClient;
const resp = await client.request(queries.QUERY_TRANSITIONS_BY_JOBID, {jobids: jobIDs,});
const transitions = resp.transitions;
if (!transitions) {
return res.status(200).json({
jobIDs,
transitions: []
});
}
const transitionsByJobId = _.groupBy(resp.transitions, 'jobid');
const groupedTransitions = {};
for (let jobId in transitionsByJobId) {
let lifecycle = transitionsByJobId[jobId].map(transition => {
transition.start_readable = transition.start ? moment(transition.start).fromNow() : 'N/A';
transition.end_readable = transition.end ? moment(transition.end).fromNow() : 'N/A';
if (transition.duration) {
transition.duration_seconds = Math.round(transition.duration / 1000);
transition.duration_minutes = Math.round(transition.duration_seconds / 60);
let duration = moment.duration(transition.duration);
transition.duration_readable = durationToHumanReadable(duration);
} else {
transition.duration_seconds = 0;
transition.duration_minutes = 0;
transition.duration_readable = 'N/A';
}
return transition;
});
groupedTransitions[jobId] = {
lifecycle: lifecycle,
durations: calculateStatusDuration(lifecycle, statuses),
};
}
return res.status(200).json({
jobIDs,
transition: groupedTransitions,
});
}
module.exports = jobLifecycle;

View File

@@ -9,83 +9,84 @@ const logger = require("../utils/logger");
Dinero.globalRoundingMode = "HALF_EVEN";
const path = require("path");
const client = require("../graphql-client/graphql-client").client;
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
async function StatusTransition(req, res) {
if (req.headers["event-secret"] !== process.env.EVENT_SECRET) {
res.status(401).send("Unauthorized");
return;
}
res.sendStatus(200);
return;
const {
id: jobid,
status: value,
shopid: bodyshopid,
} = req.body.event.data.new;
try {
const { update_transitions } = await client.request(
queries.UPDATE_OLD_TRANSITION,
{
jobid: jobid,
existingTransition: {
end: new Date(),
next_value: value,
const {
id: jobid,
status: value,
shopid: bodyshopid,
} = req.body.event.data.new;
//duration
},
}
);
// Create record OPEN on new item, enter state
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record, create a new record saying we came from previous status going to previous status
// (Timeline)
// Final status is exported, there is no end date as there is no further transition (has no end date)
try {
const {update_transitions} = await client.request(
queries.UPDATE_OLD_TRANSITION,
{
jobid: jobid,
existingTransition: {
end: new Date(),
next_value: value,
let duration =
update_transitions.affected_rows === 0
? 0
: new Date(update_transitions.returning[0].end) -
new Date(update_transitions.returning[0].start);
//duration
},
}
);
const resp2 = await client.request(queries.INSERT_NEW_TRANSITION, {
oldTransitionId:
update_transitions.affected_rows === 0
? null
: update_transitions.returning[0].id,
duration,
newTransition: {
bodyshopid: bodyshopid,
jobid: jobid,
start:
update_transitions.affected_rows === 0
? new Date()
: update_transitions.returning[0].end,
prev_value:
update_transitions.affected_rows === 0
? null
: update_transitions.returning[0].value,
value: value,
type: "status",
},
});
let duration =
update_transitions.affected_rows === 0
? 0
: new Date(update_transitions.returning[0].end) -
new Date(update_transitions.returning[0].start);
//Check to see if there is an existing status transition record.
//Query using Job ID, start is not null, end is null.
const resp2 = await client.request(queries.INSERT_NEW_TRANSITION, {
oldTransitionId:
update_transitions.affected_rows === 0
? null
: update_transitions.returning[0].id,
duration,
newTransition: {
bodyshopid: bodyshopid,
jobid: jobid,
start:
update_transitions.affected_rows === 0
? new Date()
: update_transitions.returning[0].end,
prev_value:
update_transitions.affected_rows === 0
? null
: update_transitions.returning[0].value,
value: value,
type: "status",
},
});
//If there is no existing record, this is the start of the transition life cycle.
// Create the initial transition record.
//Check to see if there is an existing status transition record.
//Query using Job ID, start is not null, end is null.
//If there is a current status transition record, update it with the end date, duration, and next value.
//If there is no existing record, this is the start of the transition life cycle.
// Create the initial transition record.
res.sendStatus(200); //.json(ret);
} catch (error) {
logger.log("job-status-transition-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack,
});
//If there is a current status transition record, update it with the end date, duration, and next value.
res.status(400).send(JSON.stringify(error));
}
res.sendStatus(200); //.json(ret);
} catch (error) {
logger.log("job-status-transition-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack,
});
res.status(400).send(JSON.stringify(error));
}
}
exports.statustransition = StatusTransition;

View File

@@ -1,22 +1,20 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const adminClient = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
exports.totalsSsu = async function (req, res) {
const BearerToken = req.headers.authorization;
const { id } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("job-totals-ssu", "DEBUG", req.user.email, id, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
try {
const job = await client
@@ -77,21 +75,19 @@ async function TotalsServerSide(req, res) {
}
async function Totals(req, res) {
const { job } = req.body;
const { job, id } = req.body;
const logger = req.logger;
const client = req.userGraphQLClient;
logger.log("job-totals", "DEBUG", req.user.email, job.id, {
jobid: job.id,
});
const BearerToken = req.headers.authorization;
const { id } = req.body;
logger.log("job-totals-ssu", "DEBUG", req.user.email, id, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
await AutoAddAtsIfRequired({ job, client });
try {
let ret = {
rates: await CalculateRatesTotals({ job, client }),

View File

@@ -3,3 +3,4 @@ exports.totalsSsu = require("./job-totals").totalsSsu;
exports.costing = require("./job-costing").JobCosting;
exports.costingmulti = require("./job-costing").JobCostingMulti;
exports.statustransition = require("./job-status-transition").statustransition;
exports.lifecycle = require('./job-lifecycle');

View File

@@ -0,0 +1,20 @@
const path = require("path");
/**
* Checks if the event secret is correct
* It adds the following properties to the request object:
* - req.isEventAuthorized - Returns true if the event secret is correct
* @param req
* @param res
* @param next
*/
function eventAuthorizationMiddleware(req, res, next) {
if (req.headers["event-secret"] !== process.env.EVENT_SECRET) {
return res.status(401).send("Unauthorized");
}
req.isEventAuthorized = true;
next();
}
module.exports = eventAuthorizationMiddleware;

View File

@@ -0,0 +1,26 @@
const logger = require("../utils/logger");
const adminEmail = require("../utils/adminEmail");
/**
* Validate admin middleware
* It adds the following properties to the request object:
* - req.isAdmin - returns true if the user passed an admin check
* @param req
* @param res
* @param next
* @returns {*}
*/
const validateAdminMiddleware = (req, res, next) => {
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log("admin-validation-failed", "ERROR", req.user.email, null, {
request: req.body,
user: req.user,
});
return res.sendStatus(404);
}
req.isAdmin = true;
next();
};
module.exports = validateAdminMiddleware;

View File

@@ -0,0 +1,69 @@
const logger = require("../utils/logger");
const admin = require("firebase-admin");
/**
* Middleware to validate Firebase ID Tokens.
* This middleware is used to protect API endpoints from unauthorized access.
* It adds the following properties to the request object:
* - req.user - the decoded Firebase ID Token
* @param req
* @param res
* @param next
* @returns {Promise<void>}
*/
const validateFirebaseIdTokenMiddleware = async (req, res, next) => {
if (
(
!req.headers.authorization ||
!req.headers.authorization.startsWith("Bearer ")) &&
!(req.cookies && req.cookies.__session
)
) {
console.error("Unauthorized attempt. No authorization provided.");
return res.status(403).send("Unauthorized");
}
let idToken;
if (
req.headers.authorization &&
req.headers.authorization.startsWith("Bearer ")
) {
// console.log('Found "Authorization" header');
// Read the ID Token from the Authorization header.
idToken = req.headers.authorization.split("Bearer ")[1];
} else if (req.cookies) {
//console.log('Found "__session" cookie');
// Read the ID Token from cookie.
idToken = req.cookies.__session;
} else {
// No cookie
console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie",
});
return res.status(403).send("Unauthorized");
}
try {
const decodedIdToken = await admin.auth().verifyIdToken(idToken);
//console.log("ID Token correctly decoded", decodedIdToken);
req.user = decodedIdToken;
next();
} catch (error) {
logger.log("api-unauthorized-call", "WARN", null, null, {
path: req.path,
body: req.body,
type: "unauthroized",
...error,
});
return res.status(401).send("Unauthorized");
}
};
module.exports = validateFirebaseIdTokenMiddleware;

View File

@@ -0,0 +1,24 @@
const {GraphQLClient} = require("graphql-request");
/**
* Middleware to add a GraphQL Client to the request object
* Adds the following to the request object:
* req.userGraphQLClient - GraphQL Client with user Bearer Token
* req.BearerToken - Bearer Token
* @param req
* @param res
* @param next
*/
const withUserGraphQLClientMiddleware = (req, res, next) => {
const BearerToken = req.headers.authorization;
req.userGraphQLClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
req.BearerToken = BearerToken;
next();
};
module.exports = withUserGraphQLClientMiddleware;

View File

@@ -1,9 +1,8 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const xml2js = require("xml2js");
const GraphQLClient = require("graphql-request").GraphQLClient;
const queries = require("../graphql-client/queries");
const logger = require('../utils/logger');
require("dotenv").config({
path: path.resolve(
@@ -15,13 +14,10 @@ require("dotenv").config({
exports.mixdataUpload = async (req, res) => {
const { bodyshopid } = req.body;
const BearerToken = req.headers.authorization;
const client = req.userGraphQLClient;
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
try {
for (const element of req.files) {

View File

@@ -5,7 +5,6 @@ require("dotenv").config({
),
});
const GraphQLClient = require("graphql-request").GraphQLClient;
//const client = require("../graphql-client/graphql-client").client;
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
@@ -15,10 +14,6 @@ const {getClient} = require('../../libs/awsUtils');
async function OpenSearchUpdateHandler(req, res) {
if (req.headers["event-secret"] !== process.env.EVENT_SECRET) {
res.status(401).send("Unauthorized");
return;
}
try {
const osClient = await getClient();
@@ -186,12 +181,8 @@ async function OpenSearchSearchHandler(req, res) {
search,
});
const BearerToken = req.headers.authorization;
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const assocs = await client
.setHeaders({Authorization: BearerToken})

View File

@@ -1,21 +1,19 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const logger = require('../utils/logger');
const { job } = require("../scheduling/scheduling-job");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const _ = require("lodash");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
exports.partsScan = async function (req, res) {
const BearerToken = req.headers.authorization;
const { jobid } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("job-parts-scan", "DEBUG", req.user?.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
try {
//Query all jobline data using the user's authorization.

View File

@@ -12,14 +12,11 @@ const {
Dinero.globalRoundingMode = "HALF_EVEN";
exports.calculatelabor = async function (req, res) {
const BearerToken = req.headers.authorization;
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-calculate-labor", "DEBUG", req.user.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
const { jobs_by_pk: job } = await client

View File

@@ -12,14 +12,11 @@ const moment = require("moment");
Dinero.globalRoundingMode = "HALF_EVEN";
exports.claimtask = async function (req, res) {
const BearerToken = req.headers.authorization;
const { jobid, task, calculateOnly, employee } = req.body;
logger.log("job-payroll-pay-all", "DEBUG", req.user.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
const { jobs_by_pk: job } = await client

View File

@@ -11,14 +11,11 @@ const { json } = require("body-parser");
Dinero.globalRoundingMode = "HALF_EVEN";
exports.payall = async function (req, res) {
const BearerToken = req.headers.authorization;
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-pay-all", "DEBUG", req.user.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
const { jobs_by_pk: job } = await client

View File

@@ -0,0 +1,13 @@
const express = require('express');
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {payments, payables, receivables} = require("../accounting/qbxml/qbxml");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/qbxml/receivables', withUserGraphQLClientMiddleware, receivables);
router.post('/qbxml/payables', withUserGraphQLClientMiddleware, payables);
router.post('/qbxml/payments', withUserGraphQLClientMiddleware, payments);
module.exports = router;

View File

@@ -0,0 +1,18 @@
const express = require('express');
const router = express.Router();
const fb = require('../firebase/firebase-handler');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {createAssociation, createShop, updateShop, updateCounter} = require("../admin/adminops");
const validateAdminMiddleware = require("../middleware/validateAdminMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/createassociation', validateAdminMiddleware, createAssociation);
router.post('/createshop', validateAdminMiddleware, createShop);
router.post('/updateshop', validateAdminMiddleware, updateShop);
router.post('/updatecounter', validateAdminMiddleware, updateCounter);
router.post('/updateuser', fb.updateUser);
router.post('/getuser', fb.getUser);
router.post('/createuser', fb.createUser);
module.exports = router;

View File

@@ -0,0 +1,11 @@
const express = require('express');
const router = express.Router();
const cdkGetMake = require('../cdk/cdk-get-makes');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/getvehicles', withUserGraphQLClientMiddleware, cdkGetMake.default);
module.exports = router;

View File

@@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const {autohouse, claimscorp, kaizen} = require('../data/data');
router.post('/ah', autohouse);
router.post('/cc', claimscorp);
router.post('/kaizen', kaizen);
module.exports = router;

View File

@@ -0,0 +1,11 @@
const express = require('express');
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {lightbox_credentials, payment_refund, generate_payment_url, postback} = require("../intellipay/intellipay");
router.post('/lightbox_credentials', validateFirebaseIdTokenMiddleware, lightbox_credentials);
router.post('/payment_refund', validateFirebaseIdTokenMiddleware, payment_refund);
router.post('/generate_payment_url', validateFirebaseIdTokenMiddleware, generate_payment_url);
router.post('/postback', postback);
module.exports = router;

View File

@@ -0,0 +1,18 @@
const express = require('express');
const router = express.Router();
const job = require('../job/job');
const {partsScan} = require('../parts-scan/parts-scan');
const eventAuthorizationMiddleware = require('../middleware/eventAuthorizationMIddleware');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {totals, statustransition, totalsSsu, costing, lifecycle, costingmulti} = require("../job/job");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.post('/totals', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totals);
router.post('/statustransition', eventAuthorizationMiddleware, statustransition);
router.post('/totalsssu', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware,totalsSsu);
router.post('/costing', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware,costing);
router.post('/lifecycle', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, lifecycle);
router.post('/costingmulti', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, costingmulti);
router.post('/partsscan', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, partsScan);
module.exports = router;

View File

@@ -0,0 +1,13 @@
const express = require('express');
const router = express.Router();
const {createSignedUploadURL, downloadFiles, renameKeys, deleteFiles} = require('../media/media');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/sign', createSignedUploadURL);
router.post('/download', downloadFiles);
router.post('/rename', renameKeys);
router.post('/delete', deleteFiles);
module.exports = router;

View File

@@ -0,0 +1,51 @@
const express = require('express');
const router = express.Router();
const logger = require("../../server/utils/logger");
const sendEmail = require("../email/sendemail");
const data = require("../data/data");
const bodyParser = require("body-parser");
const ioevent = require("../ioevent/ioevent");
const taskHandler = require("../tasks/tasks");
const os = require("../opensearch/os-handler");
const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMIddleware");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
//Test route to ensure Express is responding.
router.get("/test", async function (req, res) {
const commit = require("child_process").execSync(
"git rev-parse --short HEAD"
);
// console.log(app.get('trust proxy'));
// console.log("remoteAddress", req.socket.remoteAddress);
// console.log("X-Forwarded-For", req.header('x-forwarded-for'));
logger.log("test-api-status", "DEBUG", "api", {commit});
// sendEmail.sendServerEmail({
// subject: `API Check - ${process.env.NODE_ENV}`,
// text: `Server API check has come in. Remote IP: ${req.socket.remoteAddress}, X-Forwarded-For: ${req.header('x-forwarded-for')}`,
// });
sendEmail.sendServerEmail({
subject: `API Check - ${process.env.NODE_ENV}`,
text: `Server API check has come in.`,
});
res.status(200).send(`OK - ${commit}`);
});
// Search
router.post("/search", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, os.search);
router.post("/opensearch", eventAuthorizationMiddleware, os.handler);
// IO Events
router.post('/ioevent', ioevent.default);
// Email
router.post('/sendemail', validateFirebaseIdTokenMiddleware, sendEmail.sendEmail);
router.post('/emailbounce', bodyParser.text(), sendEmail.emailBounce);
// Handlers
router.post('/record-handler/arms', data.arms);
router.post("/taskHandler", validateFirebaseIdTokenMiddleware, taskHandler.taskHandler);
module.exports = router;

View File

@@ -0,0 +1,11 @@
const express = require('express');
const router = express.Router();
const multer = require('multer');
const upload = multer();
const {mixdataUpload} = require('../mixdata/mixdata');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.post('/upload', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, upload.any(), mixdataUpload);
module.exports = router;

View File

@@ -0,0 +1,11 @@
const express = require('express');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {subscribe, unsubscribe} = require("../firebase/firebase-handler");
const router = express.Router();
router.use(validateFirebaseIdTokenMiddleware);
router.post('/subscribe', subscribe);
router.post('/unsubscribe', unsubscribe);
module.exports = router;

View File

@@ -0,0 +1,15 @@
const express = require('express');
const router = express.Router();
const payroll = require('../payroll/payroll');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.use(withUserGraphQLClientMiddleware);
router.post("/calculatelabor", payroll.calculatelabor);
router.post("payall", payroll.payall);
router.post("claimtask", payroll.claimtask);
module.exports = router;

View File

@@ -0,0 +1,14 @@
const express = require('express');
const router = express.Router();
const {authorize, callback, receivables, payables, payments} = require('../accounting/qbo/qbo');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware"); // Assuming you have a qbo module for handling QuickBooks Online related functionalities
// Define the routes for QuickBooks Online
router.post('/authorize', validateFirebaseIdTokenMiddleware, authorize);
router.get('/callback', callback);
router.post('/receivables', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, receivables);
router.post('/payables', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, payables);
router.post('/payments', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, payments);
module.exports = router;

View File

@@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const {inlinecss} = require('../render/inlinecss');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Define the route for inline CSS rendering
router.post('/inlinecss', validateFirebaseIdTokenMiddleware, inlinecss);
module.exports = router;

View File

@@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const {job} = require('../scheduling/scheduling-job');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.post('/job', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, job);
module.exports = router;

View File

@@ -0,0 +1,17 @@
const express = require('express');
const router = express.Router();
const twilio = require('twilio');
const {receive} = require('../sms/receive');
const {send} = require('../sms/send');
const {status, markConversationRead} = require('../sms/status');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Twilio Webhook Middleware for production
const twilioWebhookMiddleware = twilio.webhook({ validate: process.env.NODE_ENV === "PRODUCTION" });
router.post('/receive', twilioWebhookMiddleware, receive);
router.post('/send', validateFirebaseIdTokenMiddleware, send);
router.post('/status', twilioWebhookMiddleware, status);
router.post('/markConversationRead', validateFirebaseIdTokenMiddleware, markConversationRead);
module.exports = router;

View File

@@ -0,0 +1,8 @@
const express = require('express');
const router = express.Router();
const {techLogin} = require('../tech/tech');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
router.post('/login', validateFirebaseIdTokenMiddleware, techLogin);
module.exports = router;

View File

@@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const {servertime, jsrAuth} = require('../utils/utils');
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
router.post('/time', servertime);
router.post('/jsr', validateFirebaseIdTokenMiddleware, jsrAuth);
module.exports = router;

View File

@@ -1,4 +1,3 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
@@ -14,17 +13,14 @@ require("dotenv").config({
});
exports.job = async (req, res) => {
const BearerToken = req.headers.authorization;
const { jobId } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log("smart-scheduling-start", "DEBUG", req.user.email, jobId, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_UPCOMING_APPOINTMENTS, {

View File

@@ -0,0 +1,13 @@
/**
* List of admin email addresses
* @type {string[]}
*/
const adminEmail = [
"patrick@imex.dev",
//"patrick@imex.test",
"patrick@imex.prod",
"patrick@imexsystems.ca",
"patrick@thinkimex.com",
];
module.exports = adminEmail;

View File

@@ -0,0 +1,100 @@
const durationToHumanReadable = require("./durationToHumanReadable");
const moment = require("moment");
const _ = require("lodash");
const crypto = require('crypto');
const getColor = (key) => {
const hash = crypto.createHash('sha256');
hash.update(key);
const hashedKey = hash.digest('hex');
const num = parseInt(hashedKey, 16);
return '#' + (num % 16777215).toString(16).padStart(6, '0');
};
const calculateStatusDuration = (transitions, statuses) => {
let statusDuration = {};
let totalDuration = 0;
let totalCurrentStatusDuration = null;
let summations = [];
transitions.forEach((transition, index) => {
let duration = transition.duration;
totalDuration += duration;
if (transition.start && !transition.end) {
const startMoment = moment(transition.start);
const nowMoment = moment();
const duration = moment.duration(nowMoment.diff(startMoment));
totalCurrentStatusDuration = {
value: duration.asMilliseconds(),
humanReadable: durationToHumanReadable(duration)
};
}
if (!transition.prev_value) {
statusDuration[transition.value] = {
value: duration,
humanReadable: transition.duration_readable
};
} else if (!transition.next_value) {
if (statusDuration[transition.value]) {
statusDuration[transition.value].value += duration;
statusDuration[transition.value].humanReadable = transition.duration_readable;
} else {
statusDuration[transition.value] = {
value: duration,
humanReadable: transition.duration_readable
};
}
} else {
if (statusDuration[transition.value]) {
statusDuration[transition.value].value += duration;
statusDuration[transition.value].humanReadable = transition.duration_readable;
} else {
statusDuration[transition.value] = {
value: duration,
humanReadable: transition.duration_readable
};
}
}
});
// Calculate the percentage for each status
// Calculate the percentage for each status
let totalPercentage = 0;
const statusKeys = Object.keys(statusDuration);
statusKeys.forEach((status, index) => {
if (index !== statusKeys.length - 1) {
const percentage = (statusDuration[status].value / totalDuration) * 100;
totalPercentage += percentage;
statusDuration[status].percentage = percentage;
} else {
statusDuration[status].percentage = 100 - totalPercentage;
}
});
for (let [status, {value, humanReadable}] of Object.entries(statusDuration)) {
if (status !== 'total') {
summations.push({
status,
value,
humanReadable,
percentage: statusDuration[status].percentage,
color: getColor(status),
roundedPercentage: `${Math.round(statusDuration[status].percentage)}%`
});
}
}
const humanReadableTotal = durationToHumanReadable(moment.duration(totalDuration));
return {
summations: _.isArray(statuses) && !_.isEmpty(statuses) ? summations.sort((a, b) => {
return statuses.indexOf(a.status) - statuses.indexOf(b.status);
}) : summations,
totalStatuses: summations.length,
total: totalDuration,
totalCurrentStatusDuration,
humanReadableTotal
};
}
module.exports = calculateStatusDuration;

View File

@@ -0,0 +1,22 @@
const durationToHumanReadable = (duration) => {
if (!duration) return 'N/A';
let parts = [];
let years = duration.years();
let months = duration.months();
let days = duration.days();
let hours = duration.hours();
let minutes = duration.minutes();
let seconds = duration.seconds();
if (years) parts.push(years + ' year' + (years > 1 ? 's' : ''));
if (months) parts.push(months + ' month' + (months > 1 ? 's' : ''));
if (days) parts.push(days + ' day' + (days > 1 ? 's' : ''));
if (hours) parts.push(hours + ' hour' + (hours > 1 ? 's' : ''));
if (minutes) parts.push(minutes + ' minute' + (minutes > 1 ? 's' : ''));
if (seconds) parts.push(seconds + ' second' + (seconds > 1 ? 's' : ''));
return parts.join(', ');
}
module.exports = durationToHumanReadable;