Reformat all project files to use the prettier config file.

This commit is contained in:
Patrick Fic
2024-03-27 15:35:07 -07:00
parent b161530381
commit e1df64d592
873 changed files with 111387 additions and 125473 deletions

View File

@@ -1,9 +1,6 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const GraphQLClient = require("graphql-request").GraphQLClient;
@@ -13,279 +10,240 @@ const moment = require("moment");
const Dinero = require("dinero.js");
const AxiosLib = require("axios").default;
const axios = AxiosLib.create();
const {PBS_ENDPOINTS, PBS_CREDENTIALS} = require("./pbs-constants");
const {CheckForErrors} = require("./pbs-job-export");
const { PBS_ENDPOINTS, PBS_CREDENTIALS } = require("./pbs-constants");
const { CheckForErrors } = require("./pbs-job-export");
const uuid = require("uuid").v4;
axios.interceptors.request.use((x) => {
const socket = x.socket;
const socket = x.socket;
const headers = {
...x.headers.common,
...x.headers[x.method],
...x.headers,
};
const printable = `${new Date()} | Request: ${x.method.toUpperCase()} | ${
x.url
} | ${JSON.stringify(x.data)} | ${JSON.stringify(headers)}`;
console.log(printable);
const headers = {
...x.headers.common,
...x.headers[x.method],
...x.headers
};
const printable = `${new Date()} | Request: ${x.method.toUpperCase()} | ${
x.url
} | ${JSON.stringify(x.data)} | ${JSON.stringify(headers)}`;
console.log(printable);
CdkBase.createJsonEvent(socket, "TRACE", `Raw Request: ${printable}`, x.data);
CdkBase.createJsonEvent(socket, "TRACE", `Raw Request: ${printable}`, x.data);
return x;
return x;
});
axios.interceptors.response.use((x) => {
const socket = x.config.socket;
const socket = x.config.socket;
const printable = `${new Date()} | Response: ${x.status} | ${JSON.stringify(
x.data
)}`;
console.log(printable);
CdkBase.createJsonEvent(
socket,
"TRACE",
`Raw Response: ${printable}`,
x.data
);
const printable = `${new Date()} | Response: ${x.status} | ${JSON.stringify(x.data)}`;
console.log(printable);
CdkBase.createJsonEvent(socket, "TRACE", `Raw Response: ${printable}`, x.data);
return x;
return x;
});
async function PbsCalculateAllocationsAp(socket, billids) {
try {
CdkBase.createLogEvent(
socket,
"DEBUG",
`Received request to calculate allocations for ${billids}`
);
const {bills, bodyshops} = await QueryBillData(socket, billids);
const bodyshop = bodyshops[0];
socket.bodyshop = bodyshop;
socket.bills = bills;
try {
CdkBase.createLogEvent(socket, "DEBUG", `Received request to calculate allocations for ${billids}`);
const { bills, bodyshops } = await QueryBillData(socket, billids);
const bodyshop = bodyshops[0];
socket.bodyshop = bodyshop;
socket.bills = bills;
//Each bill will enter it's own top level transaction.
//Each bill will enter it's own top level transaction.
const transactionlist = [];
if (bills.length === 0) {
CdkBase.createLogEvent(
socket,
"ERROR",
`No bills found for export. Ensure they have not already been exported and try again.`
);
}
bills.forEach((bill) => {
//Keep the allocations at the bill level.
const transactionObject = {
SerialNumber: socket.bodyshop.pbs_serialnumber,
billid: bill.id,
Posting: {
Reference: bill.invoice_number,
JournalCode: socket.txEnvelope ? socket.txEnvelope.journal : null,
TransactionDate: moment().tz(socket.bodyshop.timezone).toISOString(), //"0001-01-01T00:00:00.0000000Z",
//Description: "Bulk AP posting.",
//AdditionalInfo: "String",
Source: "ImEX Online", //TODO:AIO Resolve this for rome online.
Lines: [], //socket.apAllocations,
},
};
const billHash = {
[bodyshop.md_responsibility_centers.taxes.federal_itc.name]: {
Account:
bodyshop.md_responsibility_centers.taxes.federal_itc.dms_acctnumber,
ControlNumber: bill.vendor.dmsid,
Amount: Dinero(),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString(),
},
[bodyshop.md_responsibility_centers.taxes.state.name]: {
Account:
bodyshop.md_responsibility_centers.taxes.state.dms_acctnumber,
ControlNumber: bill.vendor.dmsid,
Amount: Dinero(),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString(),
},
};
bill.billlines.forEach((bl) => {
let lineDinero = Dinero({
amount: Math.round((bl.actual_cost || 0) * 100),
})
.multiply(bl.quantity)
.multiply(bill.is_credit_memo ? -1 : 1);
const cc = getCostAccount(bl, bodyshop.md_responsibility_centers);
if (!billHash[cc.name]) {
billHash[cc.name] = {
Account:
bodyshop.pbs_configuration.appostingaccount === "wip"
? cc.dms_wip_acctnumber
: cc.dms_acctnumber,
ControlNumber:
bodyshop.pbs_configuration.apcontrol === "ro"
? bill.job.ro_number
: bill.vendor.dmsid,
Amount: Dinero(),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString(),
};
}
//Add the line amount.
billHash[cc.name] = {
...billHash[cc.name],
Amount: billHash[cc.name].Amount.add(lineDinero),
};
//Does the line have taxes?
if (bl.applicable_taxes.federal) {
billHash[bodyshop.md_responsibility_centers.taxes.federal_itc.name] =
{
...billHash[
bodyshop.md_responsibility_centers.taxes.federal_itc.name
],
Amount: billHash[
bodyshop.md_responsibility_centers.taxes.federal_itc.name
].Amount.add(lineDinero.percentage(bill.federal_tax_rate || 0)),
};
}
if (bl.applicable_taxes.state) {
billHash[bodyshop.md_responsibility_centers.taxes.state.name] = {
...billHash[bodyshop.md_responsibility_centers.taxes.state.name],
Amount: billHash[
bodyshop.md_responsibility_centers.taxes.state.name
].Amount.add(lineDinero.percentage(bill.state_tax_rate || 0)),
};
}
//End tax check
});
let APAmount = Dinero();
Object.keys(billHash).map((key) => {
if (billHash[key].Amount.getAmount() > 0 || billHash[key].Amount.getAmount() < 0) {
transactionObject.Posting.Lines.push({
...billHash[key],
Amount: billHash[key].Amount.toFormat("0.00"),
});
APAmount = APAmount.add(billHash[key].Amount); //Calculate the total expense for the bill iteratively to create the corresponding credit to AP.
}
});
transactionObject.Posting.Lines.push({
Account: bodyshop.md_responsibility_centers.ap.dms_acctnumber,
ControlNumber: bill.vendor.dmsid,
Amount: APAmount.multiply(-1).toFormat("0.00"),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString(),
});
transactionlist.push(transactionObject);
});
return transactionlist;
} catch (error) {
CdkBase.createLogEvent(
socket,
"ERROR",
`Error encountered in PbsCalculateAllocationsAp. ${error}`
);
const transactionlist = [];
if (bills.length === 0) {
CdkBase.createLogEvent(
socket,
"ERROR",
`No bills found for export. Ensure they have not already been exported and try again.`
);
}
bills.forEach((bill) => {
//Keep the allocations at the bill level.
const transactionObject = {
SerialNumber: socket.bodyshop.pbs_serialnumber,
billid: bill.id,
Posting: {
Reference: bill.invoice_number,
JournalCode: socket.txEnvelope ? socket.txEnvelope.journal : null,
TransactionDate: moment().tz(socket.bodyshop.timezone).toISOString(), //"0001-01-01T00:00:00.0000000Z",
//Description: "Bulk AP posting.",
//AdditionalInfo: "String",
Source: "ImEX Online", //TODO:AIO Resolve this for rome online.
Lines: [] //socket.apAllocations,
}
};
const billHash = {
[bodyshop.md_responsibility_centers.taxes.federal_itc.name]: {
Account: bodyshop.md_responsibility_centers.taxes.federal_itc.dms_acctnumber,
ControlNumber: bill.vendor.dmsid,
Amount: Dinero(),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString()
},
[bodyshop.md_responsibility_centers.taxes.state.name]: {
Account: bodyshop.md_responsibility_centers.taxes.state.dms_acctnumber,
ControlNumber: bill.vendor.dmsid,
Amount: Dinero(),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString()
}
};
bill.billlines.forEach((bl) => {
let lineDinero = Dinero({
amount: Math.round((bl.actual_cost || 0) * 100)
})
.multiply(bl.quantity)
.multiply(bill.is_credit_memo ? -1 : 1);
const cc = getCostAccount(bl, bodyshop.md_responsibility_centers);
if (!billHash[cc.name]) {
billHash[cc.name] = {
Account: bodyshop.pbs_configuration.appostingaccount === "wip" ? cc.dms_wip_acctnumber : cc.dms_acctnumber,
ControlNumber: bodyshop.pbs_configuration.apcontrol === "ro" ? bill.job.ro_number : bill.vendor.dmsid,
Amount: Dinero(),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString()
};
}
//Add the line amount.
billHash[cc.name] = {
...billHash[cc.name],
Amount: billHash[cc.name].Amount.add(lineDinero)
};
//Does the line have taxes?
if (bl.applicable_taxes.federal) {
billHash[bodyshop.md_responsibility_centers.taxes.federal_itc.name] = {
...billHash[bodyshop.md_responsibility_centers.taxes.federal_itc.name],
Amount: billHash[bodyshop.md_responsibility_centers.taxes.federal_itc.name].Amount.add(
lineDinero.percentage(bill.federal_tax_rate || 0)
)
};
}
if (bl.applicable_taxes.state) {
billHash[bodyshop.md_responsibility_centers.taxes.state.name] = {
...billHash[bodyshop.md_responsibility_centers.taxes.state.name],
Amount: billHash[bodyshop.md_responsibility_centers.taxes.state.name].Amount.add(
lineDinero.percentage(bill.state_tax_rate || 0)
)
};
}
//End tax check
});
let APAmount = Dinero();
Object.keys(billHash).map((key) => {
if (billHash[key].Amount.getAmount() > 0 || billHash[key].Amount.getAmount() < 0) {
transactionObject.Posting.Lines.push({
...billHash[key],
Amount: billHash[key].Amount.toFormat("0.00")
});
APAmount = APAmount.add(billHash[key].Amount); //Calculate the total expense for the bill iteratively to create the corresponding credit to AP.
}
});
transactionObject.Posting.Lines.push({
Account: bodyshop.md_responsibility_centers.ap.dms_acctnumber,
ControlNumber: bill.vendor.dmsid,
Amount: APAmount.multiply(-1).toFormat("0.00"),
// Comment: "String",
AdditionalInfo: bill.vendor.name,
InvoiceNumber: bill.invoice_number,
InvoiceDate: moment(bill.date).tz(bodyshop.timezone).toISOString()
});
transactionlist.push(transactionObject);
});
return transactionlist;
} catch (error) {
CdkBase.createLogEvent(socket, "ERROR", `Error encountered in PbsCalculateAllocationsAp. ${error}`);
}
}
exports.PbsCalculateAllocationsAp = PbsCalculateAllocationsAp;
async function QueryBillData(socket, billids) {
CdkBase.createLogEvent(
socket,
"DEBUG",
`Querying bill data for id(s) ${billids}`
);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
const result = await client
.setHeaders({Authorization: `Bearer ${socket.handshake.auth.token}`})
.request(queries.GET_PBS_AP_ALLOCATIONS, {billids: billids});
CdkBase.createLogEvent(
socket,
"TRACE",
`Bill data query result ${JSON.stringify(result, null, 2)}`
);
CdkBase.createLogEvent(socket, "DEBUG", `Querying bill data for id(s) ${billids}`);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
const result = await client
.setHeaders({ Authorization: `Bearer ${socket.handshake.auth.token}` })
.request(queries.GET_PBS_AP_ALLOCATIONS, { billids: billids });
CdkBase.createLogEvent(socket, "TRACE", `Bill data query result ${JSON.stringify(result, null, 2)}`);
return result;
return result;
}
//@returns the account object.
function getCostAccount(billline, respcenters) {
if (!billline.cost_center) return null;
if (!billline.cost_center) return null;
const acctName = respcenters.defaults.costs[billline.cost_center];
const acctName = respcenters.defaults.costs[billline.cost_center];
return respcenters.costs.find((c) => c.name === acctName);
return respcenters.costs.find((c) => c.name === acctName);
}
exports.PbsExportAp = async function (socket, {billids, txEnvelope}) {
CdkBase.createLogEvent(socket, "DEBUG", `Exporting selected AP.`);
exports.PbsExportAp = async function (socket, { billids, txEnvelope }) {
CdkBase.createLogEvent(socket, "DEBUG", `Exporting selected AP.`);
//apAllocations has the same shap as the lines key for the accounting posting to PBS.
socket.apAllocations = await PbsCalculateAllocationsAp(socket, billids);
socket.txEnvelope = txEnvelope;
for (const allocation of socket.apAllocations) {
const {billid, ...restAllocation} = allocation;
const {data: AccountPostingChange} = await axios.post(
PBS_ENDPOINTS.AccountingPostingChange,
restAllocation,
{auth: PBS_CREDENTIALS, socket}
);
//apAllocations has the same shap as the lines key for the accounting posting to PBS.
socket.apAllocations = await PbsCalculateAllocationsAp(socket, billids);
socket.txEnvelope = txEnvelope;
for (const allocation of socket.apAllocations) {
const { billid, ...restAllocation } = allocation;
const { data: AccountPostingChange } = await axios.post(PBS_ENDPOINTS.AccountingPostingChange, restAllocation, {
auth: PBS_CREDENTIALS,
socket
});
CheckForErrors(socket, AccountPostingChange);
CheckForErrors(socket, AccountPostingChange);
if (AccountPostingChange.WasSuccessful) {
CdkBase.createLogEvent(socket, "DEBUG", `Marking bill as exported.`);
await MarkApExported(socket, [billid]);
if (AccountPostingChange.WasSuccessful) {
CdkBase.createLogEvent(socket, "DEBUG", `Marking bill as exported.`);
await MarkApExported(socket, [billid]);
socket.emit("ap-export-success", billid);
} else {
CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`);
socket.emit("ap-export-failure", {
billid,
error: AccountPostingChange.Message,
});
}
socket.emit("ap-export-success", billid);
} else {
CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`);
socket.emit("ap-export-failure", {
billid,
error: AccountPostingChange.Message
});
}
socket.emit("ap-export-complete");
}
socket.emit("ap-export-complete");
};
async function MarkApExported(socket, billids) {
CdkBase.createLogEvent(
socket,
"DEBUG",
`Marking bills as exported for id ${billids}`
);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
const result = await client
.setHeaders({Authorization: `Bearer ${socket.handshake.auth.token}`})
.request(queries.MARK_BILLS_EXPORTED, {
billids,
bill: {
exported: true,
exported_at: new Date(),
},
logs: socket.bills.map((bill) => ({
bodyshopid: socket.bodyshop.id,
billid: bill.id,
successful: true,
useremail: socket.user.email,
})),
});
CdkBase.createLogEvent(socket, "DEBUG", `Marking bills as exported for id ${billids}`);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
const result = await client
.setHeaders({ Authorization: `Bearer ${socket.handshake.auth.token}` })
.request(queries.MARK_BILLS_EXPORTED, {
billids,
bill: {
exported: true,
exported_at: new Date()
},
logs: socket.bills.map((bill) => ({
bodyshopid: socket.bodyshop.id,
billid: bill.id,
successful: true,
useremail: socket.user.email
}))
});
return result;
return result;
}

View File

@@ -1,16 +1,13 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const IMEX_PBS_USER = process.env.IMEX_PBS_USER,
IMEX_PBS_PASSWORD = process.env.IMEX_PBS_PASSWORD;
IMEX_PBS_PASSWORD = process.env.IMEX_PBS_PASSWORD;
const PBS_CREDENTIALS = {
password: IMEX_PBS_PASSWORD,
username: IMEX_PBS_USER,
password: IMEX_PBS_PASSWORD,
username: IMEX_PBS_USER
};
exports.PBS_CREDENTIALS = PBS_CREDENTIALS;
@@ -21,10 +18,10 @@ exports.PBS_CREDENTIALS = PBS_CREDENTIALS;
const pbsDomain = `https://partnerhub.pbsdealers.com/json/reply`;
exports.PBS_ENDPOINTS = {
AccountGet: `${pbsDomain}/AccountGet`,
ContactGet: `${pbsDomain}/ContactGet`,
VehicleGet: `${pbsDomain}/VehicleGet`,
AccountingPostingChange: `${pbsDomain}/AccountingPostingChange`,
ContactChange: `${pbsDomain}/ContactChange`,
VehicleChange: `${pbsDomain}/VehicleChange`,
AccountGet: `${pbsDomain}/AccountGet`,
ContactGet: `${pbsDomain}/ContactGet`,
VehicleGet: `${pbsDomain}/VehicleGet`,
AccountingPostingChange: `${pbsDomain}/AccountingPostingChange`,
ContactChange: `${pbsDomain}/ContactChange`,
VehicleChange: `${pbsDomain}/VehicleChange`
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +1,29 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const OAuthClient = require("intuit-oauth");
const logger = require("../../utils/logger");
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI
});
exports.default = async (req, res) => {
try {
logger.log("qbo-auth-uri", "DEBUG", req.user.email, null, null);
const authUri = oauthClient.authorizeUri({
scope: [OAuthClient.scopes.Accounting, OAuthClient.scopes.OpenId],
state: req.user.email,
}); // can be an array of multiple scopes ex : {scope:[OAuthClient.scopes.Accounting,OAuthClient.scopes.OpenId]}
try {
logger.log("qbo-auth-uri", "DEBUG", req.user.email, null, null);
const authUri = oauthClient.authorizeUri({
scope: [OAuthClient.scopes.Accounting, OAuthClient.scopes.OpenId],
state: req.user.email
}); // can be an array of multiple scopes ex : {scope:[OAuthClient.scopes.Accounting,OAuthClient.scopes.OpenId]}
res.send(authUri);
} catch (error) {
logger.log("qbo-auth-uri-error", "ERROR", req.user.email, null, {error});
res.send(authUri);
} catch (error) {
logger.log("qbo-auth-uri-error", "ERROR", req.user.email, null, { error });
res.status(500).json(error);
}
res.status(500).json(error);
}
};

View File

@@ -1,96 +1,82 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const OAuthClient = require("intuit-oauth");
const client = require("../../graphql-client/graphql-client").client;
const queries = require("../../graphql-client/queries");
const {parse, stringify} = require("querystring");
const { parse, stringify } = require("querystring");
const InstanceManager = require("../../utils/instanceMgr").default;
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true,
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
});
let url;
if (process.env.NODE_ENV === "production") { //TODO:AIO Add in QBO callbacks.
url = InstanceManager({imex: `https://imex.online`, rome: `https://romeonline.io`,});
if (process.env.NODE_ENV === "production") {
//TODO:AIO Add in QBO callbacks.
url = InstanceManager({ imex: `https://imex.online`, rome: `https://romeonline.io` });
} else if (process.env.NODE_ENV === "test") {
url = InstanceManager({imex: `https://test.imex.online`,rome: `https://test.romeonline.io`});
url = InstanceManager({ imex: `https://test.imex.online`, rome: `https://test.romeonline.io` });
} else {
url = `http://localhost:3000`;
url = `http://localhost:3000`;
}
exports.default = async (req, res) => {
const queryString = req.url.split("?").reverse()[0];
const params = parse(queryString);
try {
logger.log("qbo-callback-create-token", "DEBUG", params.state, null, null);
const authResponse = await oauthClient.createToken(req.url);
if (authResponse.json.error) {
logger.log("qbo-callback-error", "ERROR", params.state, null, {
error: authResponse.json,
});
res.redirect(
`${url}/manage/accounting/qbo?error=${encodeURIComponent(
JSON.stringify(authResponse.json)
)}`
);
} else {
await client.request(queries.SET_QBO_AUTH_WITH_REALM, {
email: params.state,
qbo_auth: {...authResponse.json, createdAt: Date.now()},
qbo_realmId: params.realmId,
});
logger.log(
"qbo-callback-create-token-success",
"DEBUG",
params.state,
null,
null
);
const queryString = req.url.split("?").reverse()[0];
const params = parse(queryString);
try {
logger.log("qbo-callback-create-token", "DEBUG", params.state, null, null);
const authResponse = await oauthClient.createToken(req.url);
if (authResponse.json.error) {
logger.log("qbo-callback-error", "ERROR", params.state, null, {
error: authResponse.json
});
res.redirect(`${url}/manage/accounting/qbo?error=${encodeURIComponent(JSON.stringify(authResponse.json))}`);
} else {
await client.request(queries.SET_QBO_AUTH_WITH_REALM, {
email: params.state,
qbo_auth: { ...authResponse.json, createdAt: Date.now() },
qbo_realmId: params.realmId
});
logger.log("qbo-callback-create-token-success", "DEBUG", params.state, null, null);
res.redirect(
`${url}/manage/accounting/qbo?${stringify(params)}`
);
}
} catch (e) {
logger.log("qbo-callback-error", "ERROR", params.state, null, {
error: e,
});
res.status(400).json(e);
res.redirect(`${url}/manage/accounting/qbo?${stringify(params)}`);
}
} catch (e) {
logger.log("qbo-callback-error", "ERROR", params.state, null, {
error: e
});
res.status(400).json(e);
}
};
exports.refresh = async (oauthClient, req) => {
try {
// logger.log("qbo-token-refresh", "DEBUG", req.user.email, null, null);
const authResponse = await oauthClient.refresh();
await client.request(queries.SET_QBO_AUTH, {
email: req.user.email,
qbo_auth: {...authResponse.json, createdAt: Date.now()},
});
} catch (error) {
logger.log("qbo-token-refresh-error", "ERROR", req.user.email, null, {
error,
});
}
try {
// logger.log("qbo-token-refresh", "DEBUG", req.user.email, null, null);
const authResponse = await oauthClient.refresh();
await client.request(queries.SET_QBO_AUTH, {
email: req.user.email,
qbo_auth: { ...authResponse.json, createdAt: Date.now() }
});
} catch (error) {
logger.log("qbo-token-refresh-error", "ERROR", req.user.email, null, {
error
});
}
};
exports.setNewRefreshToken = async (email, apiResponse) => {
//logger.log("qbo-token-updated", "DEBUG", email, null, null);
//logger.log("qbo-token-updated", "DEBUG", email, null, null);
await client.request(queries.SET_QBO_AUTH, {
email,
qbo_auth: {...apiResponse.token, createdAt: Date.now()},
});
await client.request(queries.SET_QBO_AUTH, {
email,
qbo_auth: { ...apiResponse.token, createdAt: Date.now() }
});
};

View File

@@ -2,336 +2,282 @@ const urlBuilder = require("./qbo").urlBuilder;
const StandardizeName = require("./qbo").StandardizeName;
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const Dinero = require("dinero.js");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const apiGqlClient = require("../../graphql-client/graphql-client").client;
const queries = require("../../graphql-client/queries");
const {
refresh: refreshOauthToken,
setNewRefreshToken,
} = require("./qbo-callback");
const { refresh: refreshOauthToken, setNewRefreshToken } = require("./qbo-callback");
const OAuthClient = require("intuit-oauth");
const moment = require("moment-timezone");
const findTaxCode = require("../qb-receivables-lines").findTaxCode;
exports.default = async (req, res) => {
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
});
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment:
process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true,
try {
//Fetch the API Access Tokens & Set them for the session.
const response = await apiGqlClient.request(queries.GET_QBO_AUTH, {
email: req.user.email
});
try {
//Fetch the API Access Tokens & Set them for the session.
const response = await apiGqlClient.request(queries.GET_QBO_AUTH, {
email: req.user.email,
const { qbo_realmId } = response.associations[0];
oauthClient.setToken(response.associations[0].qbo_auth);
if (!qbo_realmId) {
res.status(401).json({ error: "No company associated." });
return;
}
await refreshOauthToken(oauthClient, req);
const { bills: billsToQuery, elgen } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payable-create", "DEBUG", req.user.email, billsToQuery);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {
bills: billsToQuery
});
const { bills, bodyshops } = result;
const ret = [];
const bodyshop = bodyshops[0];
for (const bill of bills) {
try {
let vendorRecord;
vendorRecord = await QueryVendorRecord(oauthClient, qbo_realmId, req, bill);
if (!vendorRecord) {
vendorRecord = await InsertVendorRecord(oauthClient, qbo_realmId, req, bill);
}
const insertResults = await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
// //No error. Mark the job exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
}
ret.push({ billid: bill.id, success: true });
} catch (error) {
ret.push({
billid: bill.id,
success: false,
errorMessage: (error && error.authResponse && error.authResponse.body) || (error && error.message)
});
const {qbo_realmId} = response.associations[0];
oauthClient.setToken(response.associations[0].qbo_auth);
if (!qbo_realmId) {
res.status(401).json({error: "No company associated."});
return;
//Add the export log error.
if (elgen) {
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: false,
message: JSON.stringify([
(error && error.authResponse && error.authResponse.body) || (error && error.message)
]),
useremail: req.user.email
}
]
});
}
await refreshOauthToken(oauthClient, req);
const {bills: billsToQuery, elgen} = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payable-create", "DEBUG", req.user.email, billsToQuery);
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {
bills: billsToQuery,
});
const {bills, bodyshops} = result;
const ret = [];
const bodyshop = bodyshops[0];
for (const bill of bills) {
try {
let vendorRecord;
vendorRecord = await QueryVendorRecord(
oauthClient,
qbo_realmId,
req,
bill
);
if (!vendorRecord) {
vendorRecord = await InsertVendorRecord(
oauthClient,
qbo_realmId,
req,
bill
);
}
const insertResults = await InsertBill(
oauthClient,
qbo_realmId,
req,
bill,
vendorRecord,
bodyshop
);
// //No error. Mark the job exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone),
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email,
},
],
});
}
ret.push({billid: bill.id, success: true});
} catch (error) {
ret.push({
billid: bill.id,
success: false,
errorMessage:
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
});
//Add the export log error.
if (elgen) {
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: false,
message: JSON.stringify([
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
]),
useremail: req.user.email,
},
],
});
}
}
}
res.status(200).json(ret);
} catch (error) {
console.log(error);
logger.log("qbo-payable-create-error", "ERROR", req.user.email, {error: error.message, stack: error.stack});
res.status(400).json(error);
}
}
res.status(200).json(ret);
} catch (error) {
console.log(error);
logger.log("qbo-payable-create-error", "ERROR", req.user.email, {
error: error.message,
stack: error.stack
});
res.status(400).json(error);
}
};
async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(
bill.vendor.name
)}'`
),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
result.json.QueryResponse &&
result.json.QueryResponse.Vendor &&
result.json.QueryResponse.Vendor[0]
);
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, bill.id, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
method: "QueryVendorRecord",
});
throw error;
}
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(bill.vendor.name)}'`
),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
result.json.QueryResponse &&
result.json.QueryResponse.Vendor &&
result.json.QueryResponse.Vendor[0]
);
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, bill.id, {
error: (error && error.authResponse && error.authResponse.body) || (error && error.message),
method: "QueryVendorRecord"
});
throw error;
}
}
async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
const Vendor = {
DisplayName: bill.vendor.name,
};
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "vendor"),
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(Vendor),
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Vendor;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, bill.id, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
method: "InsertVendorRecord",
});
throw error;
}
const Vendor = {
DisplayName: bill.vendor.name
};
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "vendor"),
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(Vendor)
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Vendor;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, bill.id, {
error: (error && error.authResponse && error.authResponse.body) || (error && error.message),
method: "InsertVendorRecord"
});
throw error;
}
}
async function InsertBill(
oauthClient,
qbo_realmId,
req,
bill,
vendor,
bodyshop
) {
const {accounts, taxCodes, classes} = await QueryMetaData(
oauthClient,
qbo_realmId,
req
);
async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop) {
const { accounts, taxCodes, classes } = await QueryMetaData(oauthClient, qbo_realmId, req);
const lines = bill.billlines.map((il) =>
generateBillLine(
il,
accounts,
bill.job.class,
bodyshop.md_responsibility_centers.sales_tax_codes,
classes,
taxCodes,
bodyshop.md_responsibility_centers.costs
const lines = bill.billlines.map((il) =>
generateBillLine(
il,
accounts,
bill.job.class,
bodyshop.md_responsibility_centers.sales_tax_codes,
classes,
taxCodes,
bodyshop.md_responsibility_centers.costs
)
);
//QB USA with GST
//This was required for the No. 1 Collision Group.
if (
bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_")
) {
lines.push({
DetailType: "AccountBasedExpenseLineDetail",
AccountBasedExpenseLineDetail: {
...(bill.job.class ? { ClassRef: { value: classes[bill.job.class] } } : {}),
AccountRef: {
value: accounts[bodyshop.md_responsibility_centers.taxes.federal.accountdesc]
}
},
Amount: Dinero({
amount: Math.round(
bill.billlines.reduce((acc, val) => {
return acc + val.actual_cost * val.quantity;
}, 0) * 100
)
);
})
.percentage(bill.federal_tax_rate)
//QB USA with GST
//This was required for the No. 1 Collision Group.
if (
bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_")
) {
lines.push({
DetailType: "AccountBasedExpenseLineDetail",
AccountBasedExpenseLineDetail: {
...(bill.job.class
? {ClassRef: {value: classes[bill.job.class]}}
: {}),
AccountRef: {
value:
accounts[
bodyshop.md_responsibility_centers.taxes.federal.accountdesc
],
},
},
Amount: Dinero({
amount: Math.round(
bill.billlines.reduce((acc, val) => {
return acc + val.actual_cost * val.quantity;
}, 0) * 100
),
})
.percentage(bill.federal_tax_rate)
.toFormat(DineroQbFormat),
});
}
const billQbo = {
VendorRef: {
value: vendor.Id,
},
TxnDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD"),
}),
DocNumber: bill.invoice_number,
//...(bill.job.class ? { ClassRef: { Id: classes[bill.job.class] } } : {}),
...(!(
bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_")
)
? {GlobalTaxCalculation: "TaxExcluded"}
: {}),
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: {value: bodyshop.accountingconfig.qbo_departmentid},
}),
PrivateNote: `RO ${bill.job.ro_number || ""}`,
Line: lines,
};
logger.log("qbo-payable-objectlog", "DEBUG", req.user.email, bill.id, {
billQbo,
.toFormat(DineroQbFormat)
});
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
bill.is_credit_memo ? "vendorcredit" : "bill"
),
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(billQbo),
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, bill.id, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
method: "InsertBill",
});
throw error;
}
}
const billQbo = {
VendorRef: {
value: vendor.Id
},
TxnDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DocNumber: bill.invoice_number,
//...(bill.job.class ? { ClassRef: { Id: classes[bill.job.class] } } : {}),
...(!(
bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_")
)
? { GlobalTaxCalculation: "TaxExcluded" }
: {}),
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
PrivateNote: `RO ${bill.job.ro_number || ""}`,
Line: lines
};
logger.log("qbo-payable-objectlog", "DEBUG", req.user.email, bill.id, {
billQbo
});
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, bill.is_credit_memo ? "vendorcredit" : "bill"),
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(billQbo)
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, bill.id, {
error: (error && error.authResponse && error.authResponse.body) || (error && error.message),
method: "InsertBill"
});
throw error;
}
}
// [
@@ -347,97 +293,88 @@ async function InsertBill(
// },
// ],
const generateBillLine = (
billLine,
accounts,
jobClass,
ioSalesTaxCodes,
classes,
taxCodes,
costCenters
) => {
const account = costCenters.find((c) => c.name === billLine.cost_center);
const generateBillLine = (billLine, accounts, jobClass, ioSalesTaxCodes, classes, taxCodes, costCenters) => {
const account = costCenters.find((c) => c.name === billLine.cost_center);
return {
DetailType: "AccountBasedExpenseLineDetail",
return {
DetailType: "AccountBasedExpenseLineDetail",
AccountBasedExpenseLineDetail: {
...(jobClass ? {ClassRef: {value: classes[jobClass]}} : {}),
TaxCodeRef: {
value:
taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)],
},
AccountRef: {
value: accounts[account.accountname],
},
},
AccountBasedExpenseLineDetail: {
...(jobClass ? { ClassRef: { value: classes[jobClass] } } : {}),
TaxCodeRef: {
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
AccountRef: {
value: accounts[account.accountname]
}
},
Amount: Dinero({
amount: Math.round(billLine.actual_cost * 100),
})
.multiply(billLine.quantity || 1)
.toFormat(DineroQbFormat),
};
Amount: Dinero({
amount: Math.round(billLine.actual_cost * 100)
})
.multiply(billLine.quantity || 1)
.toFormat(DineroQbFormat)
};
};
async function QueryMetaData(oauthClient, qbo_realmId, req) {
const accounts = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From Account where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
setNewRefreshToken(req.user.email, accounts);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
const accounts = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From Account where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
setNewRefreshToken(req.user.email, accounts);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
const taxCodeMapping = {};
const taxCodeMapping = {};
taxCodes.json &&
taxCodes.json &&
taxCodes.json.QueryResponse &&
taxCodes.json.QueryResponse.TaxCode &&
taxCodes.json.QueryResponse.TaxCode.forEach((t) => {
taxCodeMapping[t.Name] = t.Id;
taxCodeMapping[t.Name] = t.Id;
});
const accountMapping = {};
const accountMapping = {};
accounts.json &&
accounts.json &&
accounts.json.QueryResponse &&
accounts.json.QueryResponse.Account &&
accounts.json.QueryResponse.Account.forEach((t) => {
accountMapping[t.FullyQualifiedName] = t.Id;
accountMapping[t.FullyQualifiedName] = t.Id;
});
const classMapping = {};
classes.json &&
const classMapping = {};
classes.json &&
classes.json.QueryResponse &&
classes.json.QueryResponse.Class &&
classes.json.QueryResponse.Class.forEach((t) => {
classMapping[t.Name] = t.Id;
classMapping[t.Name] = t.Id;
});
return {
accounts: accountMapping,
taxCodes: taxCodeMapping,
classes: classMapping,
};
return {
accounts: accountMapping,
taxCodes: taxCodeMapping,
classes: classMapping
};
}

View File

@@ -1,531 +1,439 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const Dinero = require("dinero.js");
const apiGqlClient = require("../../graphql-client/graphql-client").client;
const queries = require("../../graphql-client/queries");
const {
refresh: refreshOauthToken,
setNewRefreshToken,
} = require("./qbo-callback");
const { refresh: refreshOauthToken, setNewRefreshToken } = require("./qbo-callback");
const OAuthClient = require("intuit-oauth");
const moment = require("moment-timezone");
const GraphQLClient = require("graphql-request").GraphQLClient;
const {
QueryInsuranceCo,
InsertInsuranceCo,
InsertJob,
InsertOwner,
QueryJob,
QueryOwner,
QueryInsuranceCo,
InsertInsuranceCo,
InsertJob,
InsertOwner,
QueryJob,
QueryOwner
} = require("../qbo/qbo-receivables");
const {urlBuilder} = require("./qbo");
const {DineroQbFormat} = require("../accounting-constants");
const {findTaxCode} = require("../qb-receivables-lines");
const { urlBuilder } = require("./qbo");
const { DineroQbFormat } = require("../accounting-constants");
const { findTaxCode } = require("../qb-receivables-lines");
exports.default = async (req, res) => {
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment:
process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true,
const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
logging: true
});
try {
//Fetch the API Access Tokens & Set them for the session.
const response = await apiGqlClient.request(queries.GET_QBO_AUTH, {
email: req.user.email
});
try {
//Fetch the API Access Tokens & Set them for the session.
const response = await apiGqlClient.request(queries.GET_QBO_AUTH, {
email: req.user.email,
});
const {qbo_realmId} = response.associations[0];
oauthClient.setToken(response.associations[0].qbo_auth);
if (!qbo_realmId) {
res.status(401).json({error: "No company associated."});
return;
}
await refreshOauthToken(oauthClient, req);
const {payments: paymentsToQuery, elgen} = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payment-create", "DEBUG", req.user.email, paymentsToQuery);
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
payments: paymentsToQuery,
});
const {payments, bodyshops} = result;
const bodyshop = bodyshops[0];
const ret = [];
for (const payment of payments) {
try {
let isThreeTier = bodyshop.accountingconfig.tiers === 3;
let twoTierPref = bodyshop.accountingconfig.twotierpref;
//Replace this with a for-each loop to check every single Job that's included in the list.
//QB Multi AR - If it is in this scenario, overwrite whatever defaults are set since multi AR
//will always go Source => RO
if (payment.payer !== "Customer" && payment.payer !== "Insurance") {
payment.job.ins_co_nm = payment.payer;
twoTierPref = "source";
isThreeTier = false;
}
let insCoCustomerTier, ownerCustomerTier, jobTier;
if (isThreeTier || (!isThreeTier && twoTierPref === "source")) {
//Insert the insurance company tier.
//Query for top level customer, the insurance company name.
insCoCustomerTier = await QueryInsuranceCo(
oauthClient,
qbo_realmId,
req,
payment.job
);
if (!insCoCustomerTier) {
//Creating the Insurance Customer.
insCoCustomerTier = await InsertInsuranceCo(
oauthClient,
qbo_realmId,
req,
payment.job,
bodyshop
);
}
}
if (isThreeTier || (!isThreeTier && twoTierPref === "name")) {
//Insert the name/owner and account for whether the source should be the ins co in 3 tier..
ownerCustomerTier = await QueryOwner(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier,
insCoCustomerTier
);
//Query for the owner itself.
if (!ownerCustomerTier) {
ownerCustomerTier = await InsertOwner(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier,
insCoCustomerTier
);
}
}
//Query for the Job or Create it.
jobTier = await QueryJob(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier
? ownerCustomerTier
: twoTierPref === "source"
? insCoCustomerTier
: ownerCustomerTier
);
// Need to validate that the job tier is associated to the right individual?
if (!jobTier) {
jobTier = await InsertJob(
oauthClient,
qbo_realmId,
req,
payment.job,
ownerCustomerTier || insCoCustomerTier
);
}
if (payment.amount > 0) {
await InsertPayment(
oauthClient,
qbo_realmId,
req,
payment,
jobTier,
bodyshop
);
} else {
await InsertCreditMemo(
oauthClient,
qbo_realmId,
req,
payment,
jobTier,
bodyshop
);
}
// //No error. Mark the payment exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QBO_MARK_PAYMENT_EXPORTED, {
paymentId: payment.id,
payment: {
exportedat: moment().tz(bodyshop.timezone),
},
logs: [
{
bodyshopid: bodyshop.id,
paymentid: payment.id,
successful: true,
useremail: req.user.email,
},
],
});
}
ret.push({paymentid: payment.id, success: true});
} catch (error) {
logger.log("qbo-payment-create-error", "ERROR", req.user.email, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
});
//Add the export log error.
if (elgen) {
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
paymentid: payment.id,
successful: false,
message: JSON.stringify([
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
]),
useremail: req.user.email,
},
],
});
}
ret.push({
paymentid: payment.id,
success: false,
errorMessage:
(error && error.authResponse && error.authResponse.body) ||
(error && error.message),
});
}
}
res.status(200).json(ret);
} catch (error) {
console.log(error);
logger.log("qbo-payment-create-error", "ERROR", req.user.email, {
error: error.message,
stack: error.stack,
});
res.status(400).json(error);
const { qbo_realmId } = response.associations[0];
oauthClient.setToken(response.associations[0].qbo_auth);
if (!qbo_realmId) {
res.status(401).json({ error: "No company associated." });
return;
}
await refreshOauthToken(oauthClient, req);
const { payments: paymentsToQuery, elgen } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payment-create", "DEBUG", req.user.email, paymentsToQuery);
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
payments: paymentsToQuery
});
const { payments, bodyshops } = result;
const bodyshop = bodyshops[0];
const ret = [];
for (const payment of payments) {
try {
let isThreeTier = bodyshop.accountingconfig.tiers === 3;
let twoTierPref = bodyshop.accountingconfig.twotierpref;
//Replace this with a for-each loop to check every single Job that's included in the list.
//QB Multi AR - If it is in this scenario, overwrite whatever defaults are set since multi AR
//will always go Source => RO
if (payment.payer !== "Customer" && payment.payer !== "Insurance") {
payment.job.ins_co_nm = payment.payer;
twoTierPref = "source";
isThreeTier = false;
}
let insCoCustomerTier, ownerCustomerTier, jobTier;
if (isThreeTier || (!isThreeTier && twoTierPref === "source")) {
//Insert the insurance company tier.
//Query for top level customer, the insurance company name.
insCoCustomerTier = await QueryInsuranceCo(oauthClient, qbo_realmId, req, payment.job);
if (!insCoCustomerTier) {
//Creating the Insurance Customer.
insCoCustomerTier = await InsertInsuranceCo(oauthClient, qbo_realmId, req, payment.job, bodyshop);
}
}
if (isThreeTier || (!isThreeTier && twoTierPref === "name")) {
//Insert the name/owner and account for whether the source should be the ins co in 3 tier..
ownerCustomerTier = await QueryOwner(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier,
insCoCustomerTier
);
//Query for the owner itself.
if (!ownerCustomerTier) {
ownerCustomerTier = await InsertOwner(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier,
insCoCustomerTier
);
}
}
//Query for the Job or Create it.
jobTier = await QueryJob(
oauthClient,
qbo_realmId,
req,
payment.job,
isThreeTier ? ownerCustomerTier : twoTierPref === "source" ? insCoCustomerTier : ownerCustomerTier
);
// Need to validate that the job tier is associated to the right individual?
if (!jobTier) {
jobTier = await InsertJob(oauthClient, qbo_realmId, req, payment.job, ownerCustomerTier || insCoCustomerTier);
}
if (payment.amount > 0) {
await InsertPayment(oauthClient, qbo_realmId, req, payment, jobTier, bodyshop);
} else {
await InsertCreditMemo(oauthClient, qbo_realmId, req, payment, jobTier, bodyshop);
}
// //No error. Mark the payment exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QBO_MARK_PAYMENT_EXPORTED, {
paymentId: payment.id,
payment: {
exportedat: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
paymentid: payment.id,
successful: true,
useremail: req.user.email
}
]
});
}
ret.push({ paymentid: payment.id, success: true });
} catch (error) {
logger.log("qbo-payment-create-error", "ERROR", req.user.email, {
error: (error && error.authResponse && error.authResponse.body) || (error && error.message)
});
//Add the export log error.
if (elgen) {
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
paymentid: payment.id,
successful: false,
message: JSON.stringify([
(error && error.authResponse && error.authResponse.body) || (error && error.message)
]),
useremail: req.user.email
}
]
});
}
ret.push({
paymentid: payment.id,
success: false,
errorMessage: (error && error.authResponse && error.authResponse.body) || (error && error.message)
});
}
}
res.status(200).json(ret);
} catch (error) {
console.log(error);
logger.log("qbo-payment-create-error", "ERROR", req.user.email, {
error: error.message,
stack: error.stack
});
res.status(400).json(error);
}
};
async function InsertPayment(
async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef, bodyshop) {
const { paymentMethods, invoices } = await QueryMetaData(
oauthClient,
qbo_realmId,
req,
payment,
parentRef,
bodyshop
) {
const {paymentMethods, invoices} = await QueryMetaData(
oauthClient,
qbo_realmId,
req,
payment.job.ro_number,
false,
parentRef
);
payment.job.ro_number,
false,
parentRef
);
if (invoices && invoices.length !== 1) {
throw new Error(
`More than 1 invoice with DocNumber ${payment.job.ro_number} found.`
);
}
if (invoices && invoices.length !== 1) {
throw new Error(`More than 1 invoice with DocNumber ${payment.job.ro_number} found.`);
}
const paymentQbo = {
CustomerRef: {
value: parentRef.Id,
},
TxnDate: moment(payment.date) //.tz(bodyshop.timezone)
.format("YYYY-MM-DD"),
//DueDate: bill.due_date && moment(bill.due_date).format("YYYY-MM-DD"),
DocNumber: payment.paymentnum,
TotalAmt: Dinero({
amount: Math.round(payment.amount * 100),
}).toFormat(DineroQbFormat),
PaymentMethodRef: {
value: paymentMethods[payment.type],
},
PaymentRefNum: payment.transactionid,
...(invoices && invoices.length === 1 && invoices[0]
? {
Line: [
{
Amount: Dinero({
amount: Math.round(payment.amount * 100),
}).toFormat(DineroQbFormat),
LinkedTxn: [
{
TxnId: invoices[0].Id,
TxnType: "Invoice",
},
],
},
],
const paymentQbo = {
CustomerRef: {
value: parentRef.Id
},
TxnDate: moment(payment.date) //.tz(bodyshop.timezone)
.format("YYYY-MM-DD"),
//DueDate: bill.due_date && moment(bill.due_date).format("YYYY-MM-DD"),
DocNumber: payment.paymentnum,
TotalAmt: Dinero({
amount: Math.round(payment.amount * 100)
}).toFormat(DineroQbFormat),
PaymentMethodRef: {
value: paymentMethods[payment.type]
},
PaymentRefNum: payment.transactionid,
...(invoices && invoices.length === 1 && invoices[0]
? {
Line: [
{
Amount: Dinero({
amount: Math.round(payment.amount * 100)
}).toFormat(DineroQbFormat),
LinkedTxn: [
{
TxnId: invoices[0].Id,
TxnType: "Invoice"
}
]
}
: {}),
};
logger.log("qbo-payments-objectlog", "DEBUG", req.user.email, payment.id, {
paymentQbo,
]
}
: {})
};
logger.log("qbo-payments-objectlog", "DEBUG", req.user.email, payment.id, {
paymentQbo
});
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "payment"),
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(paymentQbo)
});
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "payment"),
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(paymentQbo),
});
setNewRefreshToken(req.user.email, result);
return result && result.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, payment.id, {
error: error && error.message,
method: "InsertPayment",
});
throw error;
}
setNewRefreshToken(req.user.email, result);
return result && result.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, payment.id, {
error: error && error.message,
method: "InsertPayment"
});
throw error;
}
}
async function QueryMetaData(
oauthClient,
qbo_realmId,
req,
ro_number,
isCreditMemo,
parentTierRef
) {
const invoice = await oauthClient.makeApiCall({
url: urlBuilder(
qbo_realmId,
"query",
`select * From Invoice where DocNumber like '${ro_number}%'`
),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditMemo, parentTierRef) {
const invoice = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Invoice where DocNumber like '${ro_number}%'`),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
const paymentMethods = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From PaymentMethod`),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
setNewRefreshToken(req.user.email, paymentMethods);
const paymentMethods = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From PaymentMethod`),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
setNewRefreshToken(req.user.email, paymentMethods);
// const classes = await oauthClient.makeApiCall({
// url: urlBuilder(qbo_realmId, "query", `select * From Class`),
// method: "POST",
// headers: {
// "Content-Type": "application/json",
// },
// });
// const classes = await oauthClient.makeApiCall({
// url: urlBuilder(qbo_realmId, "query", `select * From Class`),
// method: "POST",
// headers: {
// "Content-Type": "application/json",
// },
// });
const paymentMethodMapping = {};
const paymentMethodMapping = {};
paymentMethods.json &&
paymentMethods.json &&
paymentMethods.json.QueryResponse &&
paymentMethods.json.QueryResponse.PaymentMethod &&
paymentMethods.json.QueryResponse.PaymentMethod.forEach((t) => {
paymentMethodMapping[t.Name] = t.Id;
paymentMethodMapping[t.Name] = t.Id;
});
// const accountMapping = {};
// const accountMapping = {};
// accounts.json &&
// accounts.json.QueryResponse &&
// accounts.json.QueryResponse.Account.forEach((t) => {
// accountMapping[t.Name] = t.Id;
// });
// accounts.json &&
// accounts.json.QueryResponse &&
// accounts.json.QueryResponse.Account.forEach((t) => {
// accountMapping[t.Name] = t.Id;
// });
// const classMapping = {};
// classes.json &&
// classes.json.QueryResponse &&
// classes.json.QueryResponse.Class.forEach((t) => {
// accountMapping[t.Name] = t.Id;
// });
let ret = {};
// const classMapping = {};
// classes.json &&
// classes.json.QueryResponse &&
// classes.json.QueryResponse.Class.forEach((t) => {
// accountMapping[t.Name] = t.Id;
// });
let ret = {};
if (isCreditMemo) {
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item`),
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
setNewRefreshToken(req.user.email, items);
if (isCreditMemo) {
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item`),
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
setNewRefreshToken(req.user.email, items);
const itemMapping = {};
const itemMapping = {};
items.json &&
items.json.QueryResponse &&
items.json.QueryResponse.Item &&
items.json.QueryResponse.Item.forEach((t) => {
itemMapping[t.Name] = t.Id;
});
const taxCodeMapping = {};
items.json &&
items.json.QueryResponse &&
items.json.QueryResponse.Item &&
items.json.QueryResponse.Item.forEach((t) => {
itemMapping[t.Name] = t.Id;
});
const taxCodeMapping = {};
taxCodes.json &&
taxCodes.json.QueryResponse &&
taxCodes.json.QueryResponse.TaxCode &&
taxCodes.json.QueryResponse.TaxCode.forEach((t) => {
taxCodeMapping[t.Name] = t.Id;
});
ret = {
...ret,
items: itemMapping,
taxCodes: taxCodeMapping,
};
}
return {
...ret,
paymentMethods: paymentMethodMapping,
invoices:
invoice.json &&
invoice.json.QueryResponse &&
invoice.json.QueryResponse.Invoice &&
(parentTierRef
? [
invoice.json.QueryResponse.Invoice.find(
(x) => x.CustomerRef.value === parentTierRef.Id
),
]
: [invoice.json.QueryResponse.Invoice[0]]),
taxCodes.json &&
taxCodes.json.QueryResponse &&
taxCodes.json.QueryResponse.TaxCode &&
taxCodes.json.QueryResponse.TaxCode.forEach((t) => {
taxCodeMapping[t.Name] = t.Id;
});
ret = {
...ret,
items: itemMapping,
taxCodes: taxCodeMapping
};
}
return {
...ret,
paymentMethods: paymentMethodMapping,
invoices:
invoice.json &&
invoice.json.QueryResponse &&
invoice.json.QueryResponse.Invoice &&
(parentTierRef
? [invoice.json.QueryResponse.Invoice.find((x) => x.CustomerRef.value === parentTierRef.Id)]
: [invoice.json.QueryResponse.Invoice[0]])
};
}
async function InsertCreditMemo(
async function InsertCreditMemo(oauthClient, qbo_realmId, req, payment, parentRef, bodyshop) {
const { paymentMethods, invoices, items, taxCodes } = await QueryMetaData(
oauthClient,
qbo_realmId,
req,
payment,
parentRef,
bodyshop
) {
const {paymentMethods, invoices, items, taxCodes} = await QueryMetaData(
oauthClient,
qbo_realmId,
req,
payment.job.ro_number,
true,
parentRef
);
payment.job.ro_number,
true,
parentRef
);
if (invoices && invoices.length !== 1) {
throw new Error(
`More than 1 invoice with DocNumber ${payment.ro_number} found.`
);
}
if (invoices && invoices.length !== 1) {
throw new Error(`More than 1 invoice with DocNumber ${payment.ro_number} found.`);
}
const paymentQbo = {
CustomerRef: {
value: parentRef.Id,
},
TxnDate: moment(payment.date)
//.tz(bodyshop.timezone)
.format("YYYY-MM-DD"),
DocNumber: payment.paymentnum,
...(invoices && invoices[0]
? {InvoiceRef: {value: invoices[0].Id}}
: {}),
PaymentRefNum: payment.transactionid,
Line: [
{
DetailType: "SalesItemLineDetail",
Amount: Dinero({amount: Math.round(payment.amount * -100)}).toFormat(
DineroQbFormat
),
SalesItemLineDetail: {
ItemRef: {
value:
items[
payment.job.bodyshop.md_responsibility_centers.refund
.accountitem
],
},
Qty: 1,
TaxCodeRef: {
value:
taxCodes[
findTaxCode(
{
local: false,
federal: false,
state: false,
},
payment.job.bodyshop.md_responsibility_centers.sales_tax_codes
)
],
},
},
},
],
};
logger.log("qbo-payments-objectlog", "DEBUG", req.user.email, payment.id, {
paymentQbo,
const paymentQbo = {
CustomerRef: {
value: parentRef.Id
},
TxnDate: moment(payment.date)
//.tz(bodyshop.timezone)
.format("YYYY-MM-DD"),
DocNumber: payment.paymentnum,
...(invoices && invoices[0] ? { InvoiceRef: { value: invoices[0].Id } } : {}),
PaymentRefNum: payment.transactionid,
Line: [
{
DetailType: "SalesItemLineDetail",
Amount: Dinero({ amount: Math.round(payment.amount * -100) }).toFormat(DineroQbFormat),
SalesItemLineDetail: {
ItemRef: {
value: items[payment.job.bodyshop.md_responsibility_centers.refund.accountitem]
},
Qty: 1,
TaxCodeRef: {
value:
taxCodes[
findTaxCode(
{
local: false,
federal: false,
state: false
},
payment.job.bodyshop.md_responsibility_centers.sales_tax_codes
)
]
}
}
}
]
};
logger.log("qbo-payments-objectlog", "DEBUG", req.user.email, payment.id, {
paymentQbo
});
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "creditmemo"),
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(paymentQbo)
});
try {
const result = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "creditmemo"),
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(paymentQbo),
});
setNewRefreshToken(req.user.email, result);
return result && result.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, payment.id, {
error: error && error.message,
method: "InsertCreditMemo",
});
throw error;
}
setNewRefreshToken(req.user.email, result);
return result && result.Bill;
} catch (error) {
logger.log("qbo-payables-error", "DEBUG", req.user.email, payment.id, {
error: error && error.message,
method: "InsertCreditMemo"
});
throw error;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +1,16 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
function urlBuilder(realmId, object, query = null) {
return `https://${
process.env.NODE_ENV === "production" ? "" : "sandbox-"
}quickbooks.api.intuit.com/v3/company/${realmId}/${object}${
query ? `?query=${encodeURIComponent(query)}` : ""
}`;
return `https://${
process.env.NODE_ENV === "production" ? "" : "sandbox-"
}quickbooks.api.intuit.com/v3/company/${realmId}/${object}${query ? `?query=${encodeURIComponent(query)}` : ""}`;
}
function StandardizeName(str) {
return str.replace(new RegExp(/'/g), "\\'");
return str.replace(new RegExp(/'/g), "\\'");
}
exports.urlBuilder = urlBuilder;

View File

@@ -6,154 +6,131 @@ const Dinero = require("dinero.js");
const builder = require("xmlbuilder2");
const QbXmlUtils = require("./qbxml-utils");
const moment = require("moment-timezone");
const logger = require('../../utils/logger');
const logger = require("../../utils/logger");
const InstanceManager = require("../../utils/instanceMgr").default;
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.default = async (req, res) => {
const {bills: billsToQuery} = req.body;
const { bills: billsToQuery } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log(
"qbxml-payable-create",
"DEBUG",
req.user.email,
req.body.billsToQuery
);
try {
logger.log("qbxml-payable-create", "DEBUG", req.user.email, req.body.billsToQuery);
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {
bills: billsToQuery,
});
const {bills, bodyshops} = result;
const bodyshop = bodyshops[0];
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {
bills: billsToQuery
});
const { bills, bodyshops } = result;
const bodyshop = bodyshops[0];
const QbXmlToExecute = [];
bills.map((i) => {
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0"],
qbxml: generateBill(i, bodyshop),
});
});
const QbXmlToExecute = [];
bills.map((i) => {
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0"],
qbxml: generateBill(i, bodyshop)
});
});
//For each invoice.
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log(
"qbxml-payable-error",
"ERROR",
req.user.email,
req.body.billsToQuery,
{error: error.message, stack: error.stack}
);
res.status(400).send(JSON.stringify(error));
}
//For each invoice.
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log("qbxml-payable-error", "ERROR", req.user.email, req.body.billsToQuery, {
error: error.message,
stack: error.stack
});
res.status(400).send(JSON.stringify(error));
}
};
const generateBill = (bill, bodyshop) => {
const billQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
[`${bill.is_credit_memo ? "VendorCreditAddRq" : "BillAddRq"}`]: {
[`${bill.is_credit_memo ? "VendorCreditAdd" : "BillAdd"}`]: {
VendorRef: {
FullName: bill.vendor.name,
},
TxnDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
// .tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD"),
}),
RefNumber: bill.invoice_number,
Memo: `RO ${bill.job.ro_number || ""}`,
ExpenseLineAdd: bill.billlines.map((il) =>
generateBillLine(
il,
bodyshop.md_responsibility_centers,
bill.job.class
)
),
},
},
const billQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
[`${bill.is_credit_memo ? "VendorCreditAddRq" : "BillAddRq"}`]: {
[`${bill.is_credit_memo ? "VendorCreditAdd" : "BillAdd"}`]: {
VendorRef: {
FullName: bill.vendor.name
},
},
};
TxnDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
// .tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
RefNumber: bill.invoice_number,
Memo: `RO ${bill.job.ro_number || ""}`,
ExpenseLineAdd: bill.billlines.map((il) =>
generateBillLine(il, bodyshop.md_responsibility_centers, bill.job.class)
)
}
}
}
}
};
var billQbxml_partial = builder
.create(billQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true,
})
.end({pretty: true});
var billQbxml_partial = builder
.create(billQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true
})
.end({ pretty: true });
const billQbxml_Full = QbXmlUtils.addQbxmlHeader(billQbxml_partial);
const billQbxml_Full = QbXmlUtils.addQbxmlHeader(billQbxml_partial);
return billQbxml_Full;
return billQbxml_Full;
};
const generateBillLine = (billLine, responsibilityCenters, jobClass) => {
return {
AccountRef: {
FullName: responsibilityCenters.costs.find(
(c) => c.name === billLine.cost_center
).accountname,
},
Amount: Dinero({
amount: Math.round(billLine.actual_cost * 100),
})
.multiply(billLine.quantity || 1)
.toFormat(DineroQbFormat),
...(jobClass ? {ClassRef: {FullName: jobClass}} : {}),
...InstanceManager({imex:{
SalesTaxCodeRef: {
FullName: findTaxCode(
billLine,
responsibilityCenters.sales_tax_codes
),
},
} })
};
return {
AccountRef: {
FullName: responsibilityCenters.costs.find((c) => c.name === billLine.cost_center).accountname
},
Amount: Dinero({
amount: Math.round(billLine.actual_cost * 100)
})
.multiply(billLine.quantity || 1)
.toFormat(DineroQbFormat),
...(jobClass ? { ClassRef: { FullName: jobClass } } : {}),
...InstanceManager({
imex: {
SalesTaxCodeRef: {
FullName: findTaxCode(billLine, responsibilityCenters.sales_tax_codes)
}
}
})
};
};
const findTaxCode = (billLine, taxcode) => {
const {
applicable_taxes: {local, state, federal},
} =
billLine.applicable_taxes === null
? {
...billLine,
applicable_taxes: {local: false, state: false, federal: false},
}
: billLine;
const t = taxcode.filter(
(t) =>
!!t.local === !!local &&
!!t.state === !!state &&
!!t.federal === !!federal
);
if (t.length === 1) {
return t[0].code;
} else if (t.length > 1) {
return "Multiple Tax Codes Match";
} else {
return "No Tax Code Matches";
}
const {
applicable_taxes: { local, state, federal }
} =
billLine.applicable_taxes === null
? {
...billLine,
applicable_taxes: { local: false, state: false, federal: false }
}
: billLine;
const t = taxcode.filter((t) => !!t.local === !!local && !!t.state === !!state && !!t.federal === !!federal);
if (t.length === 1) {
return t[0].code;
} else if (t.length > 1) {
return "Multiple Tax Codes Match";
} else {
return "No Tax Code Matches";
}
};

View File

@@ -6,205 +6,166 @@ const builder = require("xmlbuilder2");
const moment = require("moment-timezone");
const QbXmlUtils = require("./qbxml-utils");
const QbxmlReceivables = require("./qbxml-receivables");
const logger = require('../../utils/logger');
const logger = require("../../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const {generateJobTier, generateOwnerTier, generateSourceTier} = QbXmlUtils;
const { generateJobTier, generateOwnerTier, generateSourceTier } = QbXmlUtils;
exports.default = async (req, res) => {
const {payments: paymentsToQuery} = req.body;
const { payments: paymentsToQuery } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log(
"qbxml-payments-create",
"DEBUG",
req.user.email,
req.body.paymentsToQuery,
null
);
try {
logger.log("qbxml-payments-create", "DEBUG", req.user.email, req.body.paymentsToQuery, null);
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
payments: paymentsToQuery,
});
const {payments, bodyshops} = result;
const bodyshop = bodyshops[0];
const isThreeTier = bodyshop.accountingconfig.tiers === 3;
const twoTierPref = bodyshop.accountingconfig.twotierpref;
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
payments: paymentsToQuery
});
const { payments, bodyshops } = result;
const bodyshop = bodyshops[0];
const isThreeTier = bodyshop.accountingconfig.tiers === 3;
const twoTierPref = bodyshop.accountingconfig.twotierpref;
const QbXmlToExecute = [];
payments.map((i) => {
if (isThreeTier) {
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0", "3100"],
qbxml: QbxmlReceivables.generateSourceCustomerQbxml(i.job, bodyshop), // Create the source customer.
});
}
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0", "3100"],
qbxml: QbxmlReceivables.generateJobQbxml(
i.job,
bodyshop,
isThreeTier,
2,
twoTierPref
),
});
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0", "3100"],
qbxml: QbxmlReceivables.generateJobQbxml(
i.job,
bodyshop,
isThreeTier,
3,
twoTierPref
),
});
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0"],
qbxml: generatePayment(i, isThreeTier, twoTierPref, bodyshop),
});
const QbXmlToExecute = [];
payments.map((i) => {
if (isThreeTier) {
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0", "3100"],
qbxml: QbxmlReceivables.generateSourceCustomerQbxml(i.job, bodyshop) // Create the source customer.
});
}
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log(
"qbxml-payments-error",
"error",
req.user.email,
req.body.paymentsToQuery,
{error: error.message, stack: error.stack}
);
res.status(400).send(JSON.stringify(error));
}
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0", "3100"],
qbxml: QbxmlReceivables.generateJobQbxml(i.job, bodyshop, isThreeTier, 2, twoTierPref)
});
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0", "3100"],
qbxml: QbxmlReceivables.generateJobQbxml(i.job, bodyshop, isThreeTier, 3, twoTierPref)
});
QbXmlToExecute.push({
id: i.id,
okStatusCodes: ["0"],
qbxml: generatePayment(i, isThreeTier, twoTierPref, bodyshop)
});
});
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log("qbxml-payments-error", "error", req.user.email, req.body.paymentsToQuery, {
error: error.message,
stack: error.stack
});
res.status(400).send(JSON.stringify(error));
}
};
const generatePayment = (payment, isThreeTier, twoTierPref, bodyshop) => {
let paymentQbxmlObj;
if (payment.amount > 0) {
paymentQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
ReceivePaymentAddRq: {
ReceivePaymentAdd: {
CustomerRef: {
FullName: (payment.job.bodyshop.accountingconfig.tiers === 3
? `${generateSourceTier(payment.job)}:${generateOwnerTier(
payment.job,
isThreeTier,
twoTierPref
)}:${generateJobTier(payment.job)}`
: `${generateOwnerTier(
payment.job,
isThreeTier,
twoTierPref
)}:${generateJobTier(payment.job)}`
).trim(),
},
ARAccountRef: {
FullName:
payment.job.bodyshop.md_responsibility_centers.ar.accountname,
},
TxnDate: moment(payment.date)
// .tz(bodyshop.timezone)
.format("YYYY-MM-DD"), //Trim String
RefNumber: payment.paymentnum || payment.transactionid,
TotalAmount: Dinero({
amount: Math.round(payment.amount * 100),
}).toFormat(DineroQbFormat),
PaymentMethodRef: {
FullName: payment.type,
},
Memo: `RO ${payment.job.ro_number || ""} OWNER ${
payment.job.ownr_fn || ""
} ${payment.job.ownr_ln || ""} ${payment.job.ownr_co_nm || ""} ${
payment.stripeid || ""
} ${payment.payer ? ` PAID BY ${payment.payer}` : ""}`,
IsAutoApply: true,
},
},
},
},
};
} else {
paymentQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
CreditMemoAddRq: {
CreditMemoAdd: {
CustomerRef: {
FullName: (payment.job.bodyshop.accountingconfig.tiers === 3
? `${generateSourceTier(payment.job)}:${generateOwnerTier(
payment.job,
isThreeTier,
twoTierPref
)}:${generateJobTier(payment.job)}`
: `${generateOwnerTier(
payment.job,
isThreeTier,
twoTierPref
)}:${generateJobTier(payment.job)}`
).trim(),
},
ARAccountRef: {
FullName:
payment.job.bodyshop.md_responsibility_centers.ar.accountname,
},
TxnDate: moment(payment.date)
//.tz(bodyshop.timezone)
.format("YYYY-MM-DD"), //Trim String
RefNumber:
payment.paymentnum || payment.stripeid || payment.transactionid,
let paymentQbxmlObj;
if (payment.amount > 0) {
paymentQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
ReceivePaymentAddRq: {
ReceivePaymentAdd: {
CustomerRef: {
FullName: (payment.job.bodyshop.accountingconfig.tiers === 3
? `${generateSourceTier(payment.job)}:${generateOwnerTier(
payment.job,
isThreeTier,
twoTierPref
)}:${generateJobTier(payment.job)}`
: `${generateOwnerTier(payment.job, isThreeTier, twoTierPref)}:${generateJobTier(payment.job)}`
).trim()
},
ARAccountRef: {
FullName: payment.job.bodyshop.md_responsibility_centers.ar.accountname
},
TxnDate: moment(payment.date)
// .tz(bodyshop.timezone)
.format("YYYY-MM-DD"), //Trim String
RefNumber: payment.paymentnum || payment.transactionid,
TotalAmount: Dinero({
amount: Math.round(payment.amount * 100)
}).toFormat(DineroQbFormat),
PaymentMethodRef: {
FullName: payment.type
},
Memo: `RO ${payment.job.ro_number || ""} OWNER ${
payment.job.ownr_fn || ""
} ${payment.job.ownr_ln || ""} ${payment.job.ownr_co_nm || ""} ${
payment.stripeid || ""
} ${payment.payer ? ` PAID BY ${payment.payer}` : ""}`,
IsAutoApply: true
}
}
}
}
};
} else {
paymentQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
CreditMemoAddRq: {
CreditMemoAdd: {
CustomerRef: {
FullName: (payment.job.bodyshop.accountingconfig.tiers === 3
? `${generateSourceTier(payment.job)}:${generateOwnerTier(
payment.job,
isThreeTier,
twoTierPref
)}:${generateJobTier(payment.job)}`
: `${generateOwnerTier(payment.job, isThreeTier, twoTierPref)}:${generateJobTier(payment.job)}`
).trim()
},
ARAccountRef: {
FullName: payment.job.bodyshop.md_responsibility_centers.ar.accountname
},
TxnDate: moment(payment.date)
//.tz(bodyshop.timezone)
.format("YYYY-MM-DD"), //Trim String
RefNumber: payment.paymentnum || payment.stripeid || payment.transactionid,
CreditMemoLineAdd: [
{
ItemRef: {
FullName:
payment.job.bodyshop.md_responsibility_centers.refund
.accountitem,
},
Desc: payment.memo,
Amount: Dinero({
amount: Math.round(payment.amount * 100 * -1),
}).toFormat(DineroQbFormat),
SalesTaxCodeRef: {FullName: "E"},
},
],
},
},
},
},
};
}
CreditMemoLineAdd: [
{
ItemRef: {
FullName: payment.job.bodyshop.md_responsibility_centers.refund.accountitem
},
Desc: payment.memo,
Amount: Dinero({
amount: Math.round(payment.amount * 100 * -1)
}).toFormat(DineroQbFormat),
SalesTaxCodeRef: { FullName: "E" }
}
]
}
}
}
}
};
}
var paymentQbxmlPartial = builder
.create(paymentQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true,
})
.end({pretty: true});
var paymentQbxmlPartial = builder
.create(paymentQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true
})
.end({ pretty: true });
const paymentQbxmlFull = QbXmlUtils.addQbxmlHeader(paymentQbxmlPartial);
const paymentQbxmlFull = QbXmlUtils.addQbxmlHeader(paymentQbxmlPartial);
return paymentQbxmlFull;
return paymentQbxmlFull;
};

View File

@@ -6,310 +6,255 @@ const moment = require("moment-timezone");
const builder = require("xmlbuilder2");
const QbXmlUtils = require("./qbxml-utils");
const CreateInvoiceLines = require("../qb-receivables-lines").default;
const logger = require('../../utils/logger');
const InstanceManager = require('../../utils/instanceMgr').default;
const logger = require("../../utils/logger");
const InstanceManager = require("../../utils/instanceMgr").default;
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
Dinero.globalRoundingMode = "HALF_EVEN";
const {generateJobTier, generateOwnerTier, generateSourceTier} = QbXmlUtils;
const { generateJobTier, generateOwnerTier, generateSourceTier } = QbXmlUtils;
exports.default = async (req, res) => {
const {jobIds} = req.body;
const { jobIds } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log(
"qbxml-receivables-create",
"DEBUG",
req.user.email,
req.body.jobIds,
null
);
try {
logger.log("qbxml-receivables-create", "DEBUG", req.user.email, req.body.jobIds, null);
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, {ids: jobIds});
const {jobs, bodyshops} = result;
const QbXmlToExecute = [];
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, { ids: jobIds });
const { jobs, bodyshops } = result;
const QbXmlToExecute = [];
const bodyshop = bodyshops[0];
const bodyshop = bodyshops[0];
jobs.map((jobs_by_pk) => {
//Is this a two tier, or 3 tier setup?
const isThreeTier = bodyshop.accountingconfig.tiers === 3;
const twoTierPref = bodyshop.accountingconfig.twotierpref;
jobs.map((jobs_by_pk) => {
//Is this a two tier, or 3 tier setup?
const isThreeTier = bodyshop.accountingconfig.tiers === 3;
const twoTierPref = bodyshop.accountingconfig.twotierpref;
//This is the Insurance Company tier IF 3 tier is selected.
if (isThreeTier) {
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0", "3100"],
qbxml: generateSourceCustomerQbxml(jobs_by_pk, bodyshop), // Create the source customer.
});
}
//If 3 tier, this should be the customer.
//If 2 tier, this should be based on the pref.
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0", "3100"],
qbxml: generateJobQbxml(
jobs_by_pk,
bodyshop,
isThreeTier,
2,
twoTierPref
),
});
//This is always going to be the job.
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0", "3100"],
qbxml: generateJobQbxml(
jobs_by_pk,
bodyshop,
isThreeTier,
3,
twoTierPref
),
});
if (!req.body.custDataOnly) {
//Generate the actual invoice.
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0"],
qbxml: generateInvoiceQbxml(
jobs_by_pk,
bodyshop,
isThreeTier,
twoTierPref
),
});
}
//This is the Insurance Company tier IF 3 tier is selected.
if (isThreeTier) {
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0", "3100"],
qbxml: generateSourceCustomerQbxml(jobs_by_pk, bodyshop) // Create the source customer.
});
}
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log(
"qbxml-receivables-error",
"error",
req.user.email,
req.body.jobIds,
{error: error.message, stack: error.stack}
);
res.status(400).send(JSON.stringify(error));
}
//If 3 tier, this should be the customer.
//If 2 tier, this should be based on the pref.
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0", "3100"],
qbxml: generateJobQbxml(jobs_by_pk, bodyshop, isThreeTier, 2, twoTierPref)
});
//This is always going to be the job.
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0", "3100"],
qbxml: generateJobQbxml(jobs_by_pk, bodyshop, isThreeTier, 3, twoTierPref)
});
if (!req.body.custDataOnly) {
//Generate the actual invoice.
QbXmlToExecute.push({
id: jobs_by_pk.id,
okStatusCodes: ["0"],
qbxml: generateInvoiceQbxml(jobs_by_pk, bodyshop, isThreeTier, twoTierPref)
});
}
});
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log("qbxml-receivables-error", "error", req.user.email, req.body.jobIds, {
error: error.message,
stack: error.stack
});
res.status(400).send(JSON.stringify(error));
}
};
const generateSourceCustomerQbxml = (jobs_by_pk, bodyshop) => {
const customerQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
CustomerAddRq: {
CustomerAdd: {
Name: jobs_by_pk.ins_co_nm.trim(),
// BillAddress: {
// Addr1: jobs_by_pk.ownr_addr1,
// Addr2: jobs_by_pk.ownr_addr2,
// City: jobs_by_pk.ownr_city,
// State: jobs_by_pk.ownr_st,
// PostalCode: jobs_by_pk.ownr_zip,
// },
},
},
},
},
};
const customerQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
CustomerAddRq: {
CustomerAdd: {
Name: jobs_by_pk.ins_co_nm.trim()
// BillAddress: {
// Addr1: jobs_by_pk.ownr_addr1,
// Addr2: jobs_by_pk.ownr_addr2,
// City: jobs_by_pk.ownr_city,
// State: jobs_by_pk.ownr_st,
// PostalCode: jobs_by_pk.ownr_zip,
// },
}
}
}
}
};
var customerQbxml_partial = builder
.create(customerQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true,
})
.end({pretty: true});
var customerQbxml_partial = builder
.create(customerQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true
})
.end({ pretty: true });
const customerQbxml_Full = QbXmlUtils.addQbxmlHeader(customerQbxml_partial);
const customerQbxml_Full = QbXmlUtils.addQbxmlHeader(customerQbxml_partial);
return customerQbxml_Full;
return customerQbxml_Full;
};
exports.generateSourceCustomerQbxml = generateSourceCustomerQbxml;
const generateJobQbxml = (
jobs_by_pk,
bodyshop,
isThreeTier,
tierLevel,
twoTierPref
) => {
let Name;
let ParentRefName;
const generateJobQbxml = (jobs_by_pk, bodyshop, isThreeTier, tierLevel, twoTierPref) => {
let Name;
let ParentRefName;
if (tierLevel === 2) {
Name = generateOwnerTier(jobs_by_pk, isThreeTier, twoTierPref);
ParentRefName = isThreeTier ? generateSourceTier(jobs_by_pk) : null;
} else if (tierLevel === 3) {
Name = generateJobTier(jobs_by_pk);
ParentRefName = isThreeTier
? `${generateSourceTier(jobs_by_pk)}:${generateOwnerTier(jobs_by_pk)}`
: generateOwnerTier(jobs_by_pk, isThreeTier, twoTierPref);
if (tierLevel === 2) {
Name = generateOwnerTier(jobs_by_pk, isThreeTier, twoTierPref);
ParentRefName = isThreeTier ? generateSourceTier(jobs_by_pk) : null;
} else if (tierLevel === 3) {
Name = generateJobTier(jobs_by_pk);
ParentRefName = isThreeTier
? `${generateSourceTier(jobs_by_pk)}:${generateOwnerTier(jobs_by_pk)}`
: generateOwnerTier(jobs_by_pk, isThreeTier, twoTierPref);
}
const jobQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
CustomerAddRq: {
CustomerAdd: {
Name: Name,
ParentRef: ParentRefName
? {
FullName: ParentRefName
}
: null,
...(tierLevel === 3
? {
BillAddress: {
Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip
},
ShipAddress: {
Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip
},
Email: jobs_by_pk.ownr_ea
}
: {})
}
}
}
}
};
const jobQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "continueOnError",
var jobQbxml_partial = builder
.create(jobQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true
})
.end({ pretty: true });
CustomerAddRq: {
CustomerAdd: {
Name: Name,
ParentRef: ParentRefName
? {
FullName: ParentRefName,
}
: null,
...(tierLevel === 3
? {
BillAddress: {
Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
ShipAddress: {
Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
Email: jobs_by_pk.ownr_ea,
}
: {}),
},
},
},
},
};
const jobQbxml_Full = QbXmlUtils.addQbxmlHeader(jobQbxml_partial);
var jobQbxml_partial = builder
.create(jobQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true,
})
.end({pretty: true});
const jobQbxml_Full = QbXmlUtils.addQbxmlHeader(jobQbxml_partial);
return jobQbxml_Full;
return jobQbxml_Full;
};
exports.generateJobQbxml = generateJobQbxml;
const generateInvoiceQbxml = (
jobs_by_pk,
bodyshop,
isThreeTier,
twoTierPref
) => {
//Build the Invoice XML file.
const generateInvoiceQbxml = (jobs_by_pk, bodyshop, isThreeTier, twoTierPref) => {
//Build the Invoice XML file.
const InvoiceLineAdd = CreateInvoiceLines({bodyshop, jobs_by_pk});
const InvoiceLineAdd = CreateInvoiceLines({ bodyshop, jobs_by_pk });
const invoiceQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "stopOnError",
InvoiceAddRq: {
InvoiceAdd: {
CustomerRef: {
FullName: (bodyshop.accountingconfig.tiers === 3
? `${generateSourceTier(jobs_by_pk)}:${generateOwnerTier(
jobs_by_pk
)}:${generateJobTier(jobs_by_pk)}`
: `${generateOwnerTier(
jobs_by_pk,
isThreeTier,
twoTierPref
)}:${generateJobTier(jobs_by_pk)}`
).trim(),
},
...(jobs_by_pk.class
? {ClassRef: {FullName: jobs_by_pk.class}}
: {}),
ARAccountRef: {
FullName: bodyshop.md_responsibility_centers.ar.accountname,
},
TxnDate: moment(jobs_by_pk.date_invoiced)
.tz(bodyshop.timezone)
.format("YYYY-MM-DD"),
RefNumber: jobs_by_pk.ro_number,
BillAddress: {
Addr1: jobs_by_pk.ownr_co_nm
? jobs_by_pk.ownr_co_nm.substring(0, 30).trim()
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`
.substring(0, 30)
.trim()}`,
Addr2: jobs_by_pk.ownr_addr1,
Addr3: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
ShipAddress: {
Addr1: jobs_by_pk.ownr_co_nm
? jobs_by_pk.ownr_co_nm.substring(0, 30).trim()
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`
.substring(0, 30)
.trim()}`,
Addr2: jobs_by_pk.ownr_addr1,
Addr3: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
PONumber: jobs_by_pk.clm_no,
...InstanceManager({rome: {
ItemSalesTaxRef: {
FullName:
bodyshop.md_responsibility_centers.taxes.invoiceexemptcode,
},
}}),
IsToBePrinted: bodyshop.accountingconfig.printlater,
...(jobs_by_pk.ownr_ea
? {IsToBeEmailed: bodyshop.accountingconfig.emaillater}
: {}),
InvoiceLineAdd: InvoiceLineAdd,
},
},
const invoiceQbxmlObj = {
QBXML: {
QBXMLMsgsRq: {
"@onError": "stopOnError",
InvoiceAddRq: {
InvoiceAdd: {
CustomerRef: {
FullName: (bodyshop.accountingconfig.tiers === 3
? `${generateSourceTier(jobs_by_pk)}:${generateOwnerTier(jobs_by_pk)}:${generateJobTier(jobs_by_pk)}`
: `${generateOwnerTier(jobs_by_pk, isThreeTier, twoTierPref)}:${generateJobTier(jobs_by_pk)}`
).trim()
},
},
};
...(jobs_by_pk.class ? { ClassRef: { FullName: jobs_by_pk.class } } : {}),
var invoiceQbxml_partial = builder
.create(invoiceQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true,
})
.end({pretty: true});
ARAccountRef: {
FullName: bodyshop.md_responsibility_centers.ar.accountname
},
TxnDate: moment(jobs_by_pk.date_invoiced).tz(bodyshop.timezone).format("YYYY-MM-DD"),
RefNumber: jobs_by_pk.ro_number,
BillAddress: {
Addr1: jobs_by_pk.ownr_co_nm
? jobs_by_pk.ownr_co_nm.substring(0, 30).trim()
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`.substring(0, 30).trim()}`,
Addr2: jobs_by_pk.ownr_addr1,
Addr3: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip
},
ShipAddress: {
Addr1: jobs_by_pk.ownr_co_nm
? jobs_by_pk.ownr_co_nm.substring(0, 30).trim()
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`.substring(0, 30).trim()}`,
Addr2: jobs_by_pk.ownr_addr1,
Addr3: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip
},
PONumber: jobs_by_pk.clm_no,
...InstanceManager({
rome: {
ItemSalesTaxRef: {
FullName: bodyshop.md_responsibility_centers.taxes.invoiceexemptcode
}
}
}),
IsToBePrinted: bodyshop.accountingconfig.printlater,
...(jobs_by_pk.ownr_ea ? { IsToBeEmailed: bodyshop.accountingconfig.emaillater } : {}),
const invoiceQbxml_Full = QbXmlUtils.addQbxmlHeader(invoiceQbxml_partial);
InvoiceLineAdd: InvoiceLineAdd
}
}
}
}
};
return invoiceQbxml_Full;
var invoiceQbxml_partial = builder
.create(invoiceQbxmlObj, {
version: "1.30",
encoding: "UTF-8",
headless: true
})
.end({ pretty: true });
const invoiceQbxml_Full = QbXmlUtils.addQbxmlHeader(invoiceQbxml_partial);
return invoiceQbxml_Full;
};
// const generateInvoiceLine = (job, allocation, responsibilityCenters) => {

View File

@@ -1,50 +1,46 @@
exports.addQbxmlHeader = addQbxmlHeader = (xml) => {
return `<?xml version="1.0" encoding="utf-8"?>
return `<?xml version="1.0" encoding="utf-8"?>
<?qbxml version="13.0"?>
${xml}
`;
};
exports.generateSourceTier = (jobs_by_pk) => {
return jobs_by_pk.ins_co_nm && jobs_by_pk.ins_co_nm.trim().replace(":", " ");
return jobs_by_pk.ins_co_nm && jobs_by_pk.ins_co_nm.trim().replace(":", " ");
};
exports.generateJobTier = (jobs_by_pk) => {
return jobs_by_pk.ro_number && jobs_by_pk.ro_number.trim().replace(":", " ");
return jobs_by_pk.ro_number && jobs_by_pk.ro_number.trim().replace(":", " ");
};
exports.generateOwnerTier = (jobs_by_pk, isThreeTier, twotierpref) => {
if (isThreeTier) {
//It's always gonna be the owner now. Same as 2 tier by name
return (
jobs_by_pk.ownr_co_nm
? `${jobs_by_pk.ownr_co_nm.substring(0, 30)} #${
jobs_by_pk.owner.accountingid || ""
}`
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`
.substring(0, 30)
.trim()} #${jobs_by_pk.owner.accountingid || ""}`
)
.trim()
.replace(":", " ");
if (isThreeTier) {
//It's always gonna be the owner now. Same as 2 tier by name
return (
jobs_by_pk.ownr_co_nm
? `${jobs_by_pk.ownr_co_nm.substring(0, 30)} #${jobs_by_pk.owner.accountingid || ""}`
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`
.substring(0, 30)
.trim()} #${jobs_by_pk.owner.accountingid || ""}`
)
.trim()
.replace(":", " ");
} else {
//What's the 2 tier pref?
if (twotierpref === "source") {
return this.generateSourceTier(jobs_by_pk);
//It should be the insurance co.
} else {
//What's the 2 tier pref?
if (twotierpref === "source") {
return this.generateSourceTier(jobs_by_pk);
//It should be the insurance co.
} else {
//Same as 3 tier
return (
jobs_by_pk.ownr_co_nm
? `${jobs_by_pk.ownr_co_nm.substring(0, 30)} #${
jobs_by_pk.owner.accountingid || ""
}`
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`
.substring(0, 30)
.trim()} #${jobs_by_pk.owner.accountingid || ""}`
)
.trim()
.replace(":", " ");
}
//Same as 3 tier
return (
jobs_by_pk.ownr_co_nm
? `${jobs_by_pk.ownr_co_nm.substring(0, 30)} #${jobs_by_pk.owner.accountingid || ""}`
: `${`${jobs_by_pk.ownr_ln || ""} ${jobs_by_pk.ownr_fn || ""}`
.substring(0, 30)
.trim()} #${jobs_by_pk.owner.accountingid || ""}`
)
.trim()
.replace(":", " ");
}
}
};

View File

@@ -1,8 +1,5 @@
{
"id": "12345",
"okStatusCodes": [
"0",
"31400"
],
"okStatusCodes": ["0", "31400"],
"qbxml": "the qbxml string"
}

View File

@@ -3,23 +3,20 @@ const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
exports.createAssociation = async (req, res) => {
logger.log("admin-create-association", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
const {shopid, authlevel, useremail} = req.body;
logger.log("admin-create-association", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
const { shopid, authlevel, useremail } = req.body;
const result = await client.request(
`mutation INSERT_ASSOCIATION($assoc: associations_insert_input!){
const result = await client.request(
`mutation INSERT_ASSOCIATION($assoc: associations_insert_input!){
insert_associations_one(object:$assoc){
id
authlevel
@@ -27,55 +24,55 @@ exports.createAssociation = async (req, res) => {
active
}
}`,
{
assoc: {shopid, authlevel, useremail, active: false},
}
);
res.json(result);
{
assoc: { shopid, authlevel, useremail, active: false }
}
);
res.json(result);
};
exports.createShop = async (req, res) => {
logger.log("admin-create-shop", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
const {bodyshop, ronum} = req.body;
logger.log("admin-create-shop", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
const { bodyshop, ronum } = req.body;
try {
const result = await client.request(
`mutation INSERT_BODYSHOPS($bs: bodyshops_insert_input!){
try {
const result = await client.request(
`mutation INSERT_BODYSHOPS($bs: bodyshops_insert_input!){
insert_bodyshops_one(object:$bs){
id
}
}`,
{
bs: {
...bodyshop,
counters: {
data: [
{countertype: "ronum", count: ronum},
{countertype: "ihbnum", count: 1},
{countertype: "paymentnum", count: 1},
],
},
},
}
);
res.json(result);
} catch (error) {
res.status(500).json(error);
}
{
bs: {
...bodyshop,
counters: {
data: [
{ countertype: "ronum", count: ronum },
{ countertype: "ihbnum", count: 1 },
{ countertype: "paymentnum", count: 1 }
]
}
}
}
);
res.json(result);
} catch (error) {
res.status(500).json(error);
}
};
exports.updateCounter = async (req, res) => {
logger.log("admin-update-counter", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
const {id, counter} = req.body;
logger.log("admin-update-counter", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
const { id, counter } = req.body;
try {
const result = await client.request(
`mutation UPDATE_COUNTER($id: uuid!, $counter: counters_set_input!) {
try {
const result = await client.request(
`mutation UPDATE_COUNTER($id: uuid!, $counter: counters_set_input!) {
update_counters_by_pk(pk_columns: { id: $id }, _set: $counter) {
id
countertype
@@ -83,38 +80,38 @@ exports.updateCounter = async (req, res) => {
prefix
}
}`,
{
id,
counter,
}
);
res.json(result);
} catch (error) {
res.status(500).json(error);
}
{
id,
counter
}
);
res.json(result);
} catch (error) {
res.status(500).json(error);
}
};
exports.updateShop = async (req, res) => {
logger.log("admin-update-shop", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
const {id, bodyshop} = req.body;
logger.log("admin-update-shop", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
const { id, bodyshop } = req.body;
try {
const result = await client.request(
`mutation UPDATE_BODYSHOP($id: uuid!, $bodyshop: bodyshops_set_input!) {
try {
const result = await client.request(
`mutation UPDATE_BODYSHOP($id: uuid!, $bodyshop: bodyshops_set_input!) {
update_bodyshops_by_pk(pk_columns: { id: $id }, _set: $bodyshop) {
id
}
}`,
{
id,
bodyshop,
}
);
res.json(result);
} catch (error) {
res.status(500).json(error);
}
{
id,
bodyshop
}
);
res.json(result);
} catch (error) {
res.status(500).json(error);
}
};

View File

@@ -6,40 +6,37 @@ const GraphQLClient = require("graphql-request").GraphQLClient;
const moment = require("moment-timezone");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.generatePpc = async (req, res) => {
const {jobid} = req.body;
const BearerToken = req.headers.authorization;
logger.log("generate-ppc", "DEBUG", req.user.email, jobid, null);
const { jobid } = req.body;
const BearerToken = req.headers.authorization;
logger.log("generate-ppc", "DEBUG", req.user.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
try {
const {jobs_by_pk: job} = await client
.setHeaders({Authorization: BearerToken})
.request(queries.GET_JOB_FOR_PPC, {
jobid: jobid,
});
const ReturnVal = {
...job,
trans_type: "P",
create_dt: moment().tz(job.bodyshop.timezone).format("yyyyMMDD"),
create_tm: moment().tz(job.bodyshop.timezone).format("HHmmSS"),
incl_est: true,
joblines: job.joblines.map((jl) => ({...jl, tran_code: 2})),
};
res.json(ReturnVal);
} catch (error) {
res.status(400).json(error);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken
}
});
try {
const { jobs_by_pk: job } = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.GET_JOB_FOR_PPC, {
jobid: jobid
});
const ReturnVal = {
...job,
trans_type: "P",
create_dt: moment().tz(job.bodyshop.timezone).format("yyyyMMDD"),
create_tm: moment().tz(job.bodyshop.timezone).format("HHmmSS"),
incl_est: true,
joblines: job.joblines.map((jl) => ({ ...jl, tran_code: 2 }))
};
res.json(ReturnVal);
} catch (error) {
res.status(400).json(error);
}
};

View File

@@ -1,9 +1,6 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const GraphQLClient = require("graphql-request").GraphQLClient;
@@ -12,456 +9,380 @@ const CdkBase = require("../web-sockets/web-socket");
const Dinero = require("dinero.js");
const _ = require("lodash");
const {DiscountNotAlreadyCounted} = require("../job/job-totals");
const InstanceManager = require('../utils/instanceMgr').default;
const { DiscountNotAlreadyCounted } = require("../job/job-totals");
const InstanceManager = require("../utils/instanceMgr").default;
exports.default = async function (socket, jobid) {
try {
CdkBase.createLogEvent(
socket,
"DEBUG",
`Received request to calculate allocations for ${jobid}`
);
const job = await QueryJobData(socket, jobid);
const {bodyshop} = job;
try {
CdkBase.createLogEvent(socket, "DEBUG", `Received request to calculate allocations for ${jobid}`);
const job = await QueryJobData(socket, jobid);
const { bodyshop } = job;
const taxAllocations =
InstanceManager({
executeFunction:true,
deubg:true,
args: [],
imex: () => ({
local: {
center: bodyshop.md_responsibility_centers.taxes.local.name,
sale: Dinero(job.job_totals.totals.local_tax),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes.local,
costCenter: bodyshop.md_responsibility_centers.taxes.local,
},
state: {
center: bodyshop.md_responsibility_centers.taxes.state.name,
sale: Dinero(job.job_totals.totals.state_tax),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes.state,
costCenter: bodyshop.md_responsibility_centers.taxes.state,
},
federal: {
center: bodyshop.md_responsibility_centers.taxes.federal.name,
sale: Dinero(job.job_totals.totals.federal_tax),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes.federal,
costCenter: bodyshop.md_responsibility_centers.taxes.federal,
},
}), rome: () => ({
tax_ty1: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty1`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty1Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty1`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty1`],
},
tax_ty2: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty2`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty2Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty2`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty2`],
},
tax_ty3: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty3`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty3Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty3`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty3`],
},
tax_ty4: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty4`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty4Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty4`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty4`],
},
tax_ty5: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty5`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty5Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty5`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty5`],
},
}) })
//Determine if there are MAPA and MASH lines already on the estimate.
//If there are, don't do anything extra (mitchell estimate)
//Otherwise, calculate them and add them to the default MAPA and MASH centers.
let hasMapaLine = false;
let hasMashLine = false;
const profitCenterHash = job.joblines.reduce((acc, val) => {
//Check the Parts Assignment
if (val.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
}
if (val.db_ref === "936007") {
hasMashLine = true;
}
if (val.profitcenter_part) {
if (!acc[val.profitcenter_part]) acc[val.profitcenter_part] = Dinero();
let DineroAmount = Dinero({
amount: Math.round(val.act_price * 100),
}).multiply(val.part_qty || 1);
DineroAmount = DineroAmount.add(
((val.prt_dsmk_m && val.prt_dsmk_m !== 0) ||
(val.prt_dsmk_p && val.prt_dsmk_p !== 0)) &&
DiscountNotAlreadyCounted(val, job.joblines)
? val.prt_dsmk_m
? Dinero({amount: Math.round(val.prt_dsmk_m * 100)})
: Dinero({
amount: Math.round(val.act_price * 100),
})
.multiply(val.part_qty || 0)
.percentage(Math.abs(val.prt_dsmk_p || 0))
.multiply(val.prt_dsmk_p > 0 ? 1 : -1)
: Dinero()
);
acc[val.profitcenter_part] =
acc[val.profitcenter_part].add(DineroAmount);
}
if (val.profitcenter_labor && val.mod_lbr_ty) {
//Check the Labor Assignment.
if (!acc[val.profitcenter_labor])
acc[val.profitcenter_labor] = Dinero();
acc[val.profitcenter_labor] = acc[val.profitcenter_labor].add(
Dinero({
amount: Math.round(
job[`rate_${val.mod_lbr_ty.toLowerCase()}`] * 100
),
}).multiply(val.mod_lb_hrs)
);
}
return acc;
}, {});
const selectedDmsAllocationConfig =
bodyshop.md_responsibility_centers.dms_defaults.find(
(d) => d.name === job.dms_allocation
);
CdkBase.createLogEvent(
socket,
"DEBUG",
`Using DMS Allocation ${
selectedDmsAllocationConfig && selectedDmsAllocationConfig.name
} for cost export.`
);
let costCenterHash = {};
//Check whether to skip this if PBS and using AP module.
const disablebillwip = !!bodyshop?.pbs_configuration?.disablebillwip;
if (!disablebillwip) {
costCenterHash = job.bills.reduce((bill_acc, bill_val) => {
bill_val.billlines.map((line_val) => {
if (
!bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]]
)
bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]] =
Dinero();
let lineDinero = Dinero({
amount: Math.round((line_val.actual_cost || 0) * 100),
})
.multiply(line_val.quantity)
.multiply(bill_val.is_credit_memo ? -1 : 1);
bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]] =
bill_acc[
selectedDmsAllocationConfig.costs[line_val.cost_center]
].add(lineDinero);
return null;
});
return bill_acc;
}, {});
const taxAllocations = InstanceManager({
executeFunction: true,
deubg: true,
args: [],
imex: () => ({
local: {
center: bodyshop.md_responsibility_centers.taxes.local.name,
sale: Dinero(job.job_totals.totals.local_tax),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes.local,
costCenter: bodyshop.md_responsibility_centers.taxes.local
},
state: {
center: bodyshop.md_responsibility_centers.taxes.state.name,
sale: Dinero(job.job_totals.totals.state_tax),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes.state,
costCenter: bodyshop.md_responsibility_centers.taxes.state
},
federal: {
center: bodyshop.md_responsibility_centers.taxes.federal.name,
sale: Dinero(job.job_totals.totals.federal_tax),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes.federal,
costCenter: bodyshop.md_responsibility_centers.taxes.federal
}
}),
rome: () => ({
tax_ty1: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty1`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty1Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty1`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty1`]
},
tax_ty2: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty2`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty2Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty2`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty2`]
},
tax_ty3: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty3`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty3Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty3`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty3`]
},
tax_ty4: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty4`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty4Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty4`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty4`]
},
tax_ty5: {
center: bodyshop.md_responsibility_centers.taxes[`tax_ty5`].name,
sale: Dinero(job.job_totals.totals.us_sales_tax_breakdown[`ty5Tax`]),
cost: Dinero(),
profitCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty5`],
costCenter: bodyshop.md_responsibility_centers.taxes[`tax_ty5`]
}
})
});
job.timetickets.forEach((ticket) => {
//Get the total amount of the ticket.
let TicketTotal = Dinero({
amount: Math.round(
ticket.rate *
(ticket.employee && ticket.employee.flat_rate
? ticket.productivehrs || 0
: ticket.actualhrs || 0) *
100
),
});
//Add it to the right cost center.
if (!costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]])
costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]] =
Dinero();
//Determine if there are MAPA and MASH lines already on the estimate.
//If there are, don't do anything extra (mitchell estimate)
//Otherwise, calculate them and add them to the default MAPA and MASH centers.
let hasMapaLine = false;
let hasMashLine = false;
costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]] =
costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]].add(
TicketTotal
);
const profitCenterHash = job.joblines.reduce((acc, val) => {
//Check the Parts Assignment
if (val.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
}
if (val.db_ref === "936007") {
hasMashLine = true;
}
if (val.profitcenter_part) {
if (!acc[val.profitcenter_part]) acc[val.profitcenter_part] = Dinero();
let DineroAmount = Dinero({
amount: Math.round(val.act_price * 100)
}).multiply(val.part_qty || 1);
DineroAmount = DineroAmount.add(
((val.prt_dsmk_m && val.prt_dsmk_m !== 0) || (val.prt_dsmk_p && val.prt_dsmk_p !== 0)) &&
DiscountNotAlreadyCounted(val, job.joblines)
? val.prt_dsmk_m
? Dinero({ amount: Math.round(val.prt_dsmk_m * 100) })
: Dinero({
amount: Math.round(val.act_price * 100)
})
.multiply(val.part_qty || 0)
.percentage(Math.abs(val.prt_dsmk_p || 0))
.multiply(val.prt_dsmk_p > 0 ? 1 : -1)
: Dinero()
);
acc[val.profitcenter_part] = acc[val.profitcenter_part].add(DineroAmount);
}
if (val.profitcenter_labor && val.mod_lbr_ty) {
//Check the Labor Assignment.
if (!acc[val.profitcenter_labor]) acc[val.profitcenter_labor] = Dinero();
acc[val.profitcenter_labor] = acc[val.profitcenter_labor].add(
Dinero({
amount: Math.round(job[`rate_${val.mod_lbr_ty.toLowerCase()}`] * 100)
}).multiply(val.mod_lb_hrs)
);
}
return acc;
}, {});
const selectedDmsAllocationConfig = bodyshop.md_responsibility_centers.dms_defaults.find(
(d) => d.name === job.dms_allocation
);
CdkBase.createLogEvent(
socket,
"DEBUG",
`Using DMS Allocation ${selectedDmsAllocationConfig && selectedDmsAllocationConfig.name} for cost export.`
);
let costCenterHash = {};
//Check whether to skip this if PBS and using AP module.
const disablebillwip = !!bodyshop?.pbs_configuration?.disablebillwip;
if (!disablebillwip) {
costCenterHash = job.bills.reduce((bill_acc, bill_val) => {
bill_val.billlines.map((line_val) => {
if (!bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]])
bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]] = Dinero();
let lineDinero = Dinero({
amount: Math.round((line_val.actual_cost || 0) * 100)
})
.multiply(line_val.quantity)
.multiply(bill_val.is_credit_memo ? -1 : 1);
bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]] =
bill_acc[selectedDmsAllocationConfig.costs[line_val.cost_center]].add(lineDinero);
return null;
});
return bill_acc;
}, {});
}
if (!hasMapaLine && job.job_totals.rates.mapa.total.amount > 0) {
// console.log("Adding MAPA Line Manually.");
const mapaAccountName = selectedDmsAllocationConfig.profits.MAPA;
job.timetickets.forEach((ticket) => {
//Get the total amount of the ticket.
let TicketTotal = Dinero({
amount: Math.round(
ticket.rate *
(ticket.employee && ticket.employee.flat_rate ? ticket.productivehrs || 0 : ticket.actualhrs || 0) *
100
)
});
//Add it to the right cost center.
if (!costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]])
costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]] = Dinero();
const mapaAccount = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === mapaAccountName
);
costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]] =
costCenterHash[selectedDmsAllocationConfig.costs[ticket.ciecacode]].add(TicketTotal);
});
if (mapaAccount) {
if (!profitCenterHash[mapaAccountName])
profitCenterHash[mapaAccountName] = Dinero();
if (!hasMapaLine && job.job_totals.rates.mapa.total.amount > 0) {
// console.log("Adding MAPA Line Manually.");
const mapaAccountName = selectedDmsAllocationConfig.profits.MAPA;
profitCenterHash[mapaAccountName] = profitCenterHash[
mapaAccountName
].add(Dinero(job.job_totals.rates.mapa.total));
} else {
//console.log("NO MAPA ACCOUNT FOUND!!");
}
}
const mapaAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === mapaAccountName);
if (!hasMashLine && job.job_totals.rates.mash.total.amount > 0) {
// console.log("Adding MASH Line Manually.");
if (mapaAccount) {
if (!profitCenterHash[mapaAccountName]) profitCenterHash[mapaAccountName] = Dinero();
const mashAccountName = selectedDmsAllocationConfig.profits.MASH;
const mashAccount = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === mashAccountName
);
if (mashAccount) {
if (!profitCenterHash[mashAccountName])
profitCenterHash[mashAccountName] = Dinero();
profitCenterHash[mashAccountName] = profitCenterHash[
mashAccountName
].add(Dinero(job.job_totals.rates.mash.total));
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
console.log(
Number.isInteger(bodyshop?.cdk_configuration?.sendmaterialscosting),
typeof Number.isInteger(bodyshop?.cdk_configuration?.sendmaterialscosting)
profitCenterHash[mapaAccountName] = profitCenterHash[mapaAccountName].add(
Dinero(job.job_totals.rates.mapa.total)
);
if (!!bodyshop?.cdk_configuration?.sendmaterialscosting) {
//Manually send the percentage of the costing.
} else {
//console.log("NO MAPA ACCOUNT FOUND!!");
}
}
//Paint Mat
const mapaAccountName = selectedDmsAllocationConfig.costs.MAPA;
const mapaAccount = bodyshop.md_responsibility_centers.costs.find(
(c) => c.name === mapaAccountName
);
if (mapaAccount) {
if (!costCenterHash[mapaAccountName])
costCenterHash[mapaAccountName] = Dinero();
costCenterHash[mapaAccountName] = costCenterHash[mapaAccountName].add(
Dinero(job.job_totals.rates.mapa.total).percentage(
bodyshop?.cdk_configuration?.sendmaterialscosting
)
);
} else {
//console.log("NO MAPA ACCOUNT FOUND!!");
}
if (!hasMashLine && job.job_totals.rates.mash.total.amount > 0) {
// console.log("Adding MASH Line Manually.");
//Shop Mat
const mashAccountName = selectedDmsAllocationConfig.costs.MASH;
const mashAccount = bodyshop.md_responsibility_centers.costs.find(
(c) => c.name === mashAccountName
);
if (mashAccount) {
if (!costCenterHash[mashAccountName])
costCenterHash[mashAccountName] = Dinero();
costCenterHash[mashAccountName] = costCenterHash[mashAccountName].add(
Dinero(job.job_totals.rates.mash.total).percentage(
bodyshop?.cdk_configuration?.sendmaterialscosting
)
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
const mashAccountName = selectedDmsAllocationConfig.profits.MASH;
const {ca_bc_pvrt} = job;
if (ca_bc_pvrt) {
// const pvrtAccount = bodyshop.md_responsibility_centers.profits.find(
// (c) => c.name === mashAccountName
// );
const mashAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === mashAccountName);
taxAllocations.state.sale = taxAllocations.state.sale.add(
Dinero({amount: Math.round((ca_bc_pvrt || 0) * 100)})
);
}
if (mashAccount) {
if (!profitCenterHash[mashAccountName]) profitCenterHash[mashAccountName] = Dinero();
if (job.towing_payable && job.towing_payable !== 0) {
const towAccountName = selectedDmsAllocationConfig.profits.TOW;
profitCenterHash[mashAccountName] = profitCenterHash[mashAccountName].add(
Dinero(job.job_totals.rates.mash.total)
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
console.log(
Number.isInteger(bodyshop?.cdk_configuration?.sendmaterialscosting),
typeof Number.isInteger(bodyshop?.cdk_configuration?.sendmaterialscosting)
);
if (!!bodyshop?.cdk_configuration?.sendmaterialscosting) {
//Manually send the percentage of the costing.
const towAccount = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === towAccountName
);
//Paint Mat
const mapaAccountName = selectedDmsAllocationConfig.costs.MAPA;
const mapaAccount = bodyshop.md_responsibility_centers.costs.find((c) => c.name === mapaAccountName);
if (mapaAccount) {
if (!costCenterHash[mapaAccountName]) costCenterHash[mapaAccountName] = Dinero();
costCenterHash[mapaAccountName] = costCenterHash[mapaAccountName].add(
Dinero(job.job_totals.rates.mapa.total).percentage(bodyshop?.cdk_configuration?.sendmaterialscosting)
);
} else {
//console.log("NO MAPA ACCOUNT FOUND!!");
}
if (towAccount) {
if (!profitCenterHash[towAccountName])
profitCenterHash[towAccountName] = Dinero();
//Shop Mat
const mashAccountName = selectedDmsAllocationConfig.costs.MASH;
const mashAccount = bodyshop.md_responsibility_centers.costs.find((c) => c.name === mashAccountName);
if (mashAccount) {
if (!costCenterHash[mashAccountName]) costCenterHash[mashAccountName] = Dinero();
costCenterHash[mashAccountName] = costCenterHash[mashAccountName].add(
Dinero(job.job_totals.rates.mash.total).percentage(bodyshop?.cdk_configuration?.sendmaterialscosting)
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
profitCenterHash[towAccountName] = profitCenterHash[towAccountName].add(
Dinero({
amount: Math.round((job.towing_payable || 0) * 100),
})
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
if (job.storage_payable && job.storage_payable !== 0) {
const storageAccountName = selectedDmsAllocationConfig.profits.TOW;
const { ca_bc_pvrt } = job;
if (ca_bc_pvrt) {
// const pvrtAccount = bodyshop.md_responsibility_centers.profits.find(
// (c) => c.name === mashAccountName
// );
const towAccount = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === storageAccountName
);
taxAllocations.state.sale = taxAllocations.state.sale.add(
Dinero({ amount: Math.round((ca_bc_pvrt || 0) * 100) })
);
}
if (towAccount) {
if (!profitCenterHash[storageAccountName])
profitCenterHash[storageAccountName] = Dinero();
if (job.towing_payable && job.towing_payable !== 0) {
const towAccountName = selectedDmsAllocationConfig.profits.TOW;
profitCenterHash[storageAccountName] = profitCenterHash[
storageAccountName
].add(
Dinero({
amount: Math.round((job.storage_payable || 0) * 100),
})
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
const towAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === towAccountName);
if (job.adjustment_bottom_line && job.adjustment_bottom_line !== 0) {
const otherAccountName = selectedDmsAllocationConfig.profits.PAO;
if (towAccount) {
if (!profitCenterHash[towAccountName]) profitCenterHash[towAccountName] = Dinero();
const otherAccount = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === otherAccountName
);
profitCenterHash[towAccountName] = profitCenterHash[towAccountName].add(
Dinero({
amount: Math.round((job.towing_payable || 0) * 100)
})
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
if (job.storage_payable && job.storage_payable !== 0) {
const storageAccountName = selectedDmsAllocationConfig.profits.TOW;
if (otherAccount) {
if (!profitCenterHash[otherAccountName])
profitCenterHash[otherAccountName] = Dinero();
const towAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === storageAccountName);
profitCenterHash[otherAccountName] = profitCenterHash[
otherAccountName
].add(
Dinero({
amount: Math.round((job.adjustment_bottom_line || 0) * 100),
})
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
if(InstanceManager({rome:true})){
if (towAccount) {
if (!profitCenterHash[storageAccountName]) profitCenterHash[storageAccountName] = Dinero();
profitCenterHash[storageAccountName] = profitCenterHash[storageAccountName].add(
Dinero({
amount: Math.round((job.storage_payable || 0) * 100)
})
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
if (job.adjustment_bottom_line && job.adjustment_bottom_line !== 0) {
const otherAccountName = selectedDmsAllocationConfig.profits.PAO;
const otherAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === otherAccountName);
if (otherAccount) {
if (!profitCenterHash[otherAccountName]) profitCenterHash[otherAccountName] = Dinero();
profitCenterHash[otherAccountName] = profitCenterHash[otherAccountName].add(
Dinero({
amount: Math.round((job.adjustment_bottom_line || 0) * 100)
})
);
} else {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
if (InstanceManager({ rome: true })) {
//profile level adjustments
Object.keys(job.job_totals.parts.adjustments).forEach((key) => {
const accountName = selectedDmsAllocationConfig.profits[key];
Object.keys(job.job_totals.parts.adjustments).forEach((key) => {
const accountName = selectedDmsAllocationConfig.profits[key];
const otherAccount = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === accountName
);
const otherAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === accountName);
if (otherAccount) {
if (!profitCenterHash[accountName])
profitCenterHash[accountName] = Dinero();
if (otherAccount) {
if (!profitCenterHash[accountName]) profitCenterHash[accountName] = Dinero();
profitCenterHash[accountName] = profitCenterHash[accountName].add(
Dinero(job.job_totals.parts.adjustments[key])
);
} else {
CdkBase.createLogEvent(
socket,
"ERROR",
`Error encountered in CdkCalculateAllocations. Unable to find adjustment account. ${error}`
);
}
});
}
const jobAllocations = _.union(
Object.keys(profitCenterHash),
Object.keys(costCenterHash)
).map((key) => {
const profitCenter = bodyshop.md_responsibility_centers.profits.find(
(c) => c.name === key
);
const costCenter = bodyshop.md_responsibility_centers.costs.find(
(c) => c.name === key
);
return {
center: key,
sale: profitCenterHash[key] ? profitCenterHash[key] : Dinero(),
cost: costCenterHash[key] ? costCenterHash[key] : Dinero(),
profitCenter,
costCenter,
};
});
return [
...jobAllocations,
...Object.keys(taxAllocations)
.filter(
(key) =>
taxAllocations[key].sale.getAmount() > 0 ||
taxAllocations[key].cost.getAmount() > 0
)
.map((key) => {
if (
key === "federal" &&
selectedDmsAllocationConfig.gst_override &&
selectedDmsAllocationConfig.gst_override !== ""
) {
const ret = {...taxAllocations[key], tax: key};
ret.costCenter.dms_acctnumber =
selectedDmsAllocationConfig.gst_override;
ret.profitCenter.dms_acctnumber =
selectedDmsAllocationConfig.gst_override;
return ret;
} else {
return {...taxAllocations[key], tax: key};
}
}),
];
} catch (error) {
console.log(error)
CdkBase.createLogEvent(
profitCenterHash[accountName] = profitCenterHash[accountName].add(
Dinero(job.job_totals.parts.adjustments[key])
);
} else {
CdkBase.createLogEvent(
socket,
"ERROR",
`Error encountered in CdkCalculateAllocations. ${error}`
);
`Error encountered in CdkCalculateAllocations. Unable to find adjustment account. ${error}`
);
}
});
}
const jobAllocations = _.union(Object.keys(profitCenterHash), Object.keys(costCenterHash)).map((key) => {
const profitCenter = bodyshop.md_responsibility_centers.profits.find((c) => c.name === key);
const costCenter = bodyshop.md_responsibility_centers.costs.find((c) => c.name === key);
return {
center: key,
sale: profitCenterHash[key] ? profitCenterHash[key] : Dinero(),
cost: costCenterHash[key] ? costCenterHash[key] : Dinero(),
profitCenter,
costCenter
};
});
return [
...jobAllocations,
...Object.keys(taxAllocations)
.filter((key) => taxAllocations[key].sale.getAmount() > 0 || taxAllocations[key].cost.getAmount() > 0)
.map((key) => {
if (
key === "federal" &&
selectedDmsAllocationConfig.gst_override &&
selectedDmsAllocationConfig.gst_override !== ""
) {
const ret = { ...taxAllocations[key], tax: key };
ret.costCenter.dms_acctnumber = selectedDmsAllocationConfig.gst_override;
ret.profitCenter.dms_acctnumber = selectedDmsAllocationConfig.gst_override;
return ret;
} else {
return { ...taxAllocations[key], tax: key };
}
})
];
} catch (error) {
console.log(error);
CdkBase.createLogEvent(socket, "ERROR", `Error encountered in CdkCalculateAllocations. ${error}`);
}
};
async function QueryJobData(socket, jobid) {
CdkBase.createLogEvent(socket, "DEBUG", `Querying job data for id ${jobid}`);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
const result = await client
.setHeaders({Authorization: `Bearer ${socket.handshake.auth.token}`})
.request(queries.GET_CDK_ALLOCATIONS, {id: jobid});
CdkBase.createLogEvent(
socket,
"TRACE",
`Job data query result ${JSON.stringify(result, null, 2)}`
);
return result.jobs_by_pk;
CdkBase.createLogEvent(socket, "DEBUG", `Querying job data for id ${jobid}`);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
const result = await client
.setHeaders({ Authorization: `Bearer ${socket.handshake.auth.token}` })
.request(queries.GET_CDK_ALLOCATIONS, { id: jobid });
CdkBase.createLogEvent(socket, "TRACE", `Job data query result ${JSON.stringify(result, null, 2)}`);
return result.jobs_by_pk;
}

View File

@@ -1,9 +1,6 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const soap = require("soap");
const queries = require("../graphql-client/queries");
@@ -11,7 +8,7 @@ const queries = require("../graphql-client/queries");
const CdkWsdl = require("./cdk-wsdl").default;
const logger = require("../utils/logger");
const {CDK_CREDENTIALS, CheckCdkResponseForError} = require("./cdk-wsdl");
const { CDK_CREDENTIALS, CheckCdkResponseForError } = require("./cdk-wsdl");
// exports.default = async function (socket, cdk_dealerid) {
// try {
@@ -31,121 +28,83 @@ const {CDK_CREDENTIALS, CheckCdkResponseForError} = require("./cdk-wsdl");
// };
exports.default = async function ReloadCdkMakes(req, res) {
const {bodyshopid, cdk_dealerid} = req.body;
try {
//Query all CDK Models
const newList = await GetCdkMakes(req, cdk_dealerid);
const { bodyshopid, cdk_dealerid } = req.body;
try {
//Query all CDK Models
const newList = await GetCdkMakes(req, cdk_dealerid);
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const deleteResult = await client
.setHeaders({Authorization: BearerToken})
.request(queries.DELETE_ALL_DMS_VEHICLES, {});
console.log(
"🚀 ~ file: cdk-get-makes.js ~ line 53 ~ deleteResult",
deleteResult
);
const deleteResult = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.DELETE_ALL_DMS_VEHICLES, {});
console.log("🚀 ~ file: cdk-get-makes.js ~ line 53 ~ deleteResult", deleteResult);
//Insert the new ones.
//Insert the new ones.
const insertResult = await client
.setHeaders({Authorization: BearerToken})
.request(queries.INSERT_DMS_VEHICLES, {
vehicles: newList.map((i) => {
return {
bodyshopid,
makecode: i.makeCode,
modelcode: i.modelCode,
make: i.makeFullName,
model: i.modelFullName,
};
}),
});
const insertResult = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_DMS_VEHICLES, {
vehicles: newList.map((i) => {
return {
bodyshopid,
makecode: i.makeCode,
modelcode: i.modelCode,
make: i.makeFullName,
model: i.modelFullName
};
})
});
logger.log(
"cdk-replace-makes-models-success",
"DEBUG",
req.user.email,
null,
{
cdk_dealerid,
count: newList.length,
}
);
res.sendStatus(200);
} catch (error) {
logger.log(
"cdk-replace-makes-models-error",
"ERROR",
req.user.email,
null,
{
cdk_dealerid,
error,
}
);
res.status(500).json(error);
}
logger.log("cdk-replace-makes-models-success", "DEBUG", req.user.email, null, {
cdk_dealerid,
count: newList.length
});
res.sendStatus(200);
} catch (error) {
logger.log("cdk-replace-makes-models-error", "ERROR", req.user.email, null, {
cdk_dealerid,
error
});
res.status(500).json(error);
}
};
async function GetCdkMakes(req, cdk_dealerid) {
logger.log("cdk-replace-makes-models", "DEBUG", req.user.email, null, {
cdk_dealerid,
logger.log("cdk-replace-makes-models", "DEBUG", req.user.email, null, {
cdk_dealerid
});
try {
const soapClientVehicleInsert = await soap.createClientAsync(CdkWsdl.VehicleInsertUpdate);
const soapResponseVehicleSearch = await soapClientVehicleInsert.getMakeModelAsync(
{
arg0: CDK_CREDENTIALS,
arg1: { id: cdk_dealerid }
},
{}
);
CheckCdkResponseForError(null, soapResponseVehicleSearch);
const [result, rawResponse, , rawRequest] = soapResponseVehicleSearch;
logger.log("cdk-replace-makes-models-request", "ERROR", req.user.email, null, {
cdk_dealerid,
xml: rawRequest
});
try {
const soapClientVehicleInsert = await soap.createClientAsync(
CdkWsdl.VehicleInsertUpdate
);
logger.log("cdk-replace-makes-models-response", "ERROR", req.user.email, null, {
cdk_dealerid,
xml: rawResponse
});
const soapResponseVehicleSearch =
await soapClientVehicleInsert.getMakeModelAsync(
{
arg0: CDK_CREDENTIALS,
arg1: {id: cdk_dealerid},
},
return result.return;
} catch (error) {
logger.log("cdk-replace-makes-models-error", "ERROR", req.user.email, null, {
cdk_dealerid,
error
});
{}
);
CheckCdkResponseForError(null, soapResponseVehicleSearch);
const [result, rawResponse, , rawRequest] = soapResponseVehicleSearch;
logger.log(
"cdk-replace-makes-models-request",
"ERROR",
req.user.email,
null,
{
cdk_dealerid,
xml: rawRequest,
}
);
logger.log(
"cdk-replace-makes-models-response",
"ERROR",
req.user.email,
null,
{
cdk_dealerid,
xml: rawResponse,
}
);
return result.return;
} catch (error) {
logger.log(
"cdk-replace-makes-models-error",
"ERROR",
req.user.email,
null,
{
cdk_dealerid,
error,
}
);
throw new Error(error);
}
throw new Error(error);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,91 +1,74 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const CdkBase = require("../web-sockets/web-socket");
const IMEX_CDK_USER = process.env.IMEX_CDK_USER,
IMEX_CDK_PASSWORD = process.env.IMEX_CDK_PASSWORD;
IMEX_CDK_PASSWORD = process.env.IMEX_CDK_PASSWORD;
const CDK_CREDENTIALS = {
password: IMEX_CDK_PASSWORD,
username: IMEX_CDK_USER,
password: IMEX_CDK_PASSWORD,
username: IMEX_CDK_USER
};
exports.CDK_CREDENTIALS = CDK_CREDENTIALS;
const cdkDomain =
process.env.NODE_ENV === "production"
? "https://3pa.dmotorworks.com"
: "https://uat-3pa.dmotorworks.com";
process.env.NODE_ENV === "production" ? "https://3pa.dmotorworks.com" : "https://uat-3pa.dmotorworks.com";
function CheckCdkResponseForError(socket, soapResponse) {
if (!soapResponse[0]) {
//The response was null, this might be ok, it might not.
CdkBase.createLogEvent(
socket,
"WARNING",
`Warning detected in CDK Response - it appears to be null. Stack: ${
new Error().stack
}`
);
return;
}
if (!soapResponse[0]) {
//The response was null, this might be ok, it might not.
CdkBase.createLogEvent(
socket,
"WARNING",
`Warning detected in CDK Response - it appears to be null. Stack: ${new Error().stack}`
);
return;
}
const ResultToCheck = soapResponse[0].return;
const ResultToCheck = soapResponse[0].return;
if (Array.isArray(ResultToCheck)) {
ResultToCheck.forEach((result) => checkIndividualResult(socket, result));
} else {
checkIndividualResult(socket, ResultToCheck);
}
if (Array.isArray(ResultToCheck)) {
ResultToCheck.forEach((result) => checkIndividualResult(socket, result));
} else {
checkIndividualResult(socket, ResultToCheck);
}
}
exports.CheckCdkResponseForError = CheckCdkResponseForError;
function checkIndividualResult(socket, ResultToCheck) {
if (
ResultToCheck.errorLevel === 0 ||
ResultToCheck.errorLevel === "0" ||
ResultToCheck.code === "success" ||
(!ResultToCheck.code && !ResultToCheck.errorLevel)
)
//TODO: Verify that this is the best way to detect errors.
return;
else {
CdkBase.createLogEvent(
socket,
"ERROR",
`Error detected in CDK Response - ${JSON.stringify(
ResultToCheck,
null,
2
)}`
);
if (
ResultToCheck.errorLevel === 0 ||
ResultToCheck.errorLevel === "0" ||
ResultToCheck.code === "success" ||
(!ResultToCheck.code && !ResultToCheck.errorLevel)
)
//TODO: Verify that this is the best way to detect errors.
return;
else {
CdkBase.createLogEvent(
socket,
"ERROR",
`Error detected in CDK Response - ${JSON.stringify(ResultToCheck, null, 2)}`
);
throw new Error(
`Error found while validating CDK response for ${JSON.stringify(
ResultToCheck,
null,
2
)}:`
);
}
throw new Error(`Error found while validating CDK response for ${JSON.stringify(ResultToCheck, null, 2)}:`);
}
}
exports.checkIndividualResult = checkIndividualResult;
//const cdkDomain = "https://uat-3pa.dmotorworks.com";
exports.default = {
// VehicleSearch: `${cdkDomain}/pip-vehicle/services/VehicleSearch?wsdl`,
HelpDataBase: `${cdkDomain}/pip-help-database-location/services/HelpDatabaseLocation?wsdl`,
AccountingGLInsertUpdate: `${cdkDomain}/pip-accounting-gl/services/AccountingGLInsertUpdate?wsdl`,
VehicleInsertUpdate: `${cdkDomain}/pip-vehicle/services/VehicleInsertUpdate?wsdl`,
CustomerInsertUpdate: `${cdkDomain}/pip-customer/services/CustomerInsertUpdate?wsdl`,
CustomerSearch: `${cdkDomain}/pip-customer/services/CustomerSearch?wsdl`,
VehicleSearch: `${cdkDomain}/pip-vehicle/services/VehicleSearch?wsdl`,
ServiceHistoryInsert: `${cdkDomain}/pip-service-history-insert/services/ServiceHistoryInsert?wsdl`,
// VehicleSearch: `${cdkDomain}/pip-vehicle/services/VehicleSearch?wsdl`,
HelpDataBase: `${cdkDomain}/pip-help-database-location/services/HelpDatabaseLocation?wsdl`,
AccountingGLInsertUpdate: `${cdkDomain}/pip-accounting-gl/services/AccountingGLInsertUpdate?wsdl`,
VehicleInsertUpdate: `${cdkDomain}/pip-vehicle/services/VehicleInsertUpdate?wsdl`,
CustomerInsertUpdate: `${cdkDomain}/pip-customer/services/CustomerInsertUpdate?wsdl`,
CustomerSearch: `${cdkDomain}/pip-customer/services/CustomerSearch?wsdl`,
VehicleSearch: `${cdkDomain}/pip-vehicle/services/VehicleSearch?wsdl`,
ServiceHistoryInsert: `${cdkDomain}/pip-service-history-insert/services/ServiceHistoryInsert?wsdl`
};
// The following login credentials will be used for all PIPs and all environments (User Acceptance Testing and Production).

View File

@@ -1,2 +1,2 @@
exports.lookup = require("./lookup").default;
exports.submit = require("./submit").default;
exports.submit = require("./submit").default;

View File

@@ -2,23 +2,20 @@ const path = require("path");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
exports.default = async (req, res) => {
try {
logger.log("csi-surveyID-lookup", "DEBUG", "csi", req.body.surveyId, null);
const gql_response = await client.request(queries.QUERY_SURVEY, {
surveyId: req.body.surveyId,
});
res.status(200).json(gql_response);
} catch (error) {
logger.log("csi-surveyID-lookup", "ERROR", "csi", req.body.surveyId, error);
res.status(400).json(error);
}
try {
logger.log("csi-surveyID-lookup", "DEBUG", "csi", req.body.surveyId, null);
const gql_response = await client.request(queries.QUERY_SURVEY, {
surveyId: req.body.surveyId
});
res.status(200).json(gql_response);
} catch (error) {
logger.log("csi-surveyID-lookup", "ERROR", "csi", req.body.surveyId, error);
res.status(400).json(error);
}
};

View File

@@ -2,28 +2,25 @@ const path = require("path");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
exports.default = async (req, res) => {
try {
logger.log("csi-surveyID-submit", "DEBUG", "csi", req.body.surveyId, null);
const gql_response = await client.request(queries.COMPLETE_SURVEY, {
surveyId: req.body.surveyId,
survey: {
response: req.body.values,
valid: false,
completedon: new Date(),
},
});
res.status(200).json(gql_response);
} catch (error) {
logger.log("csi-surveyID-submit", "ERROR", "csi", req.body.surveyId, error);
res.status(400).json(error);
}
try {
logger.log("csi-surveyID-submit", "DEBUG", "csi", req.body.surveyId, null);
const gql_response = await client.request(queries.COMPLETE_SURVEY, {
surveyId: req.body.surveyId,
survey: {
response: req.body.values,
valid: false,
completedon: new Date()
}
});
res.status(200).json(gql_response);
} catch (error) {
logger.log("csi-surveyID-submit", "ERROR", "csi", req.body.surveyId, error);
res.status(400).json(error);
}
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
exports.arms = require("./arms").default;
exports.autohouse = require("./autohouse").default;
exports.claimscorp = require("./claimscorp").default;
exports.kaizen = require("./kaizen").default;
exports.kaizen = require("./kaizen").default;

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,6 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const axios = require("axios");
let nodemailer = require("nodemailer");
@@ -15,302 +12,284 @@ const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const ses = new aws.SES({
// The key apiVersion is no longer supported in v3, and can be removed.
// @deprecated The client uses the "latest" apiVersion.
apiVersion: "latest",
defaultProvider,
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2",
}),
// The key apiVersion is no longer supported in v3, and can be removed.
// @deprecated The client uses the "latest" apiVersion.
apiVersion: "latest",
defaultProvider,
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
});
let transporter = nodemailer.createTransport({
SES: { ses, aws },
SES: { ses, aws }
});
exports.sendServerEmail = async function ({ subject, text }) {
if (process.env.NODE_ENV === undefined) return;
try {
transporter.sendMail(
if (process.env.NODE_ENV === undefined) return;
try {
transporter.sendMail(
{
from: InstanceManager({
imex: `ImEX Online API - ${process.env.NODE_ENV} <noreply@imex.online>`,
rome: `Rome Online API - ${process.env.NODE_ENV} <noreply@romeonline.io>`,
promanager: `ProManager API - ${process.env.NODE_ENV} <noreply@promanager.web-est.com>`
}),
to: ["patrick@imexsystems.ca"],
subject: subject,
text: text,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
from: InstanceManager({
imex: `ImEX Online API - ${process.env.NODE_ENV} <noreply@imex.online>`,
rome: `Rome Online API - ${process.env.NODE_ENV} <noreply@romeonline.io>`,
promanager: `ProManager API - ${process.env.NODE_ENV} <noreply@promanager.web-est.com>`,
}),
to: ["patrick@imexsystems.ca"],
subject: subject,
text: text,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
Name: "tag_name",
Value: "tag_value",
},
],
},
},
(err, info) => {
console.log(err || info);
Name: "tag_name",
Value: "tag_value"
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
]
}
},
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
};
exports.sendTaskEmail = async function ({ to, subject, text, attachments }) {
try {
transporter.sendMail(
{
from: InstanceManager({
imex: `ImEX Online <noreply@imex.online>`,
rome: `Rome Online <noreply@romeonline.io>`,
promanager: `ProManager <noreply@promanager.web-est.com>`,
}),
to: to,
subject: subject,
text: text,
attachments: attachments || null,
},
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
try {
transporter.sendMail(
{
from: InstanceManager({
imex: `ImEX Online <noreply@imex.online>`,
rome: `Rome Online <noreply@romeonline.io>`,
promanager: `ProManager <noreply@promanager.web-est.com>`
}),
to: to,
subject: subject,
text: text,
attachments: attachments || null
},
(err, info) => {
console.log(err || info);
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
}
};
exports.sendEmail = async (req, res) => {
logger.log("send-email", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
});
logger.log("send-email", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject
});
let downloadedMedia = [];
if (req.body.media && req.body.media.length > 0) {
downloadedMedia = await Promise.all(
req.body.media.map((m) => {
try {
return getImage(m);
} catch (error) {
logger.log(
"send-email-error",
"ERROR",
req.user.email,
null,
{
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error,
}
);
}
})
);
}
transporter.sendMail(
{
let downloadedMedia = [];
if (req.body.media && req.body.media.length > 0) {
downloadedMedia = await Promise.all(
req.body.media.map((m) => {
try {
return getImage(m);
} catch (error) {
logger.log("send-email-error", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
attachments:
[
...((req.body.attachments &&
req.body.attachments.map((a) => {
return {
filename: a.filename,
path: a.path,
};
})) ||
[]),
...downloadedMedia.map((a) => {
return {
path: a,
};
}),
] || null,
html: req.body.html,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
Name: "tag_name",
Value: "tag_value",
},
],
},
},
(err, info) => {
console.log(err || info);
if (info) {
logger.log(
"send-email-success",
"DEBUG",
req.user.email,
null,
{
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
// info,
}
);
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
messageId: info.response,
});
res.json({
success: true, //response: info
});
} else {
logger.log(
"send-email-failure",
"ERROR",
req.user.email,
null,
{
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error: err,
}
);
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
bodyshopid: req.body.bodyshopid,
});
res.status(500).json({ success: false, error: err });
}
error
});
}
})
);
}
transporter.sendMail(
{
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
attachments:
[
...((req.body.attachments &&
req.body.attachments.map((a) => {
return {
filename: a.filename,
path: a.path
};
})) ||
[]),
...downloadedMedia.map((a) => {
return {
path: a
};
})
] || null,
html: req.body.html,
ses: {
// optional extra arguments for SendRawEmail
Tags: [
{
Name: "tag_name",
Value: "tag_value"
}
]
}
},
(err, info) => {
console.log(err || info);
if (info) {
logger.log("send-email-success", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject
// info,
});
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
messageId: info.response
});
res.json({
success: true //response: info
});
} else {
logger.log("send-email-failure", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error: err
});
logEmail(req, {
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
bodyshopid: req.body.bodyshopid
});
res.status(500).json({ success: false, error: err });
}
}
);
};
async function getImage(imageUrl) {
let image = await axios.get(imageUrl, { responseType: "arraybuffer" });
let raw = Buffer.from(image.data).toString("base64");
return "data:" + image.headers["content-type"] + ";base64," + raw;
let image = await axios.get(imageUrl, { responseType: "arraybuffer" });
let raw = Buffer.from(image.data).toString("base64");
return "data:" + image.headers["content-type"] + ";base64," + raw;
}
async function logEmail(req, email) {
try {
const insertresult = await client.request(queries.INSERT_EMAIL_AUDIT, {
email: {
to: email.to,
cc: email.cc,
subject: email.subject,
bodyshopid: req.body.bodyshopid,
useremail: req.user.email,
contents: req.body.html,
jobid: req.body.jobid,
sesmessageid: email.messageId,
status: "Sent",
},
});
console.log(insertresult);
} catch (error) {
logger.log("email-log-error", "error", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
// info,
});
}
try {
const insertresult = await client.request(queries.INSERT_EMAIL_AUDIT, {
email: {
to: email.to,
cc: email.cc,
subject: email.subject,
bodyshopid: req.body.bodyshopid,
useremail: req.user.email,
contents: req.body.html,
jobid: req.body.jobid,
sesmessageid: email.messageId,
status: "Sent"
}
});
console.log(insertresult);
} catch (error) {
logger.log("email-log-error", "error", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject
// info,
});
}
}
exports.emailBounce = async function (req, res) {
try {
const body = JSON.parse(req.body);
if (body.Type === "SubscriptionConfirmation") {
logger.log("SNS-message", "DEBUG", "api", null, {
body: req.body,
});
try {
const body = JSON.parse(req.body);
if (body.Type === "SubscriptionConfirmation") {
logger.log("SNS-message", "DEBUG", "api", null, {
body: req.body
});
}
const message = JSON.parse(body.Message);
if (message.notificationType === "Bounce") {
let replyTo, subject, messageId;
message.mail.headers.forEach((header) => {
if (header.name === "Reply-To") {
replyTo = header.value;
} else if (header.name === "Subject") {
subject = header.value;
}
const message = JSON.parse(body.Message);
if (message.notificationType === "Bounce") {
let replyTo, subject, messageId;
message.mail.headers.forEach((header) => {
if (header.name === "Reply-To") {
replyTo = header.value;
} else if (header.name === "Subject") {
subject = header.value;
}
});
messageId = message.mail.messageId;
if (
replyTo ===
InstanceManager({
imex: "noreply@imex.online",
rome: "noreply@romeonline.io",
promanager: "noreply@promanager.web-est.com",
})
) {
res.sendStatus(200);
return;
}
//If it's bounced, log it as bounced in audit log. Send an email to the user.
const result = await client.request(queries.UPDATE_EMAIL_AUDIT, {
sesid: messageId,
status: "Bounced",
context: message.bounce?.bouncedRecipients,
});
transporter.sendMail(
{
from: InstanceMgr({
imex: `ImEX Online <noreply@imex.online>`,
rome: `Rome Online <noreply@romeonline.io>`,
}),
to: replyTo,
//bcc: "patrick@snapt.ca",
subject: `${InstanceMgr({
imex: "ImEX Online",
rome: "Rome Online",
promanager: "ProManager",
})} Bounced Email - RE: ${subject}`,
text: `${InstanceMgr({
imex: "ImEX Online",
rome: "Rome Online",
promanager: "ProManager",
})} has tried to deliver an email with the subject: ${subject} to the intended recipients but encountered an error.
});
messageId = message.mail.messageId;
if (
replyTo ===
InstanceManager({
imex: "noreply@imex.online",
rome: "noreply@romeonline.io",
promanager: "noreply@promanager.web-est.com"
})
) {
res.sendStatus(200);
return;
}
//If it's bounced, log it as bounced in audit log. Send an email to the user.
const result = await client.request(queries.UPDATE_EMAIL_AUDIT, {
sesid: messageId,
status: "Bounced",
context: message.bounce?.bouncedRecipients
});
transporter.sendMail(
{
from: InstanceMgr({
imex: `ImEX Online <noreply@imex.online>`,
rome: `Rome Online <noreply@romeonline.io>`
}),
to: replyTo,
//bcc: "patrick@snapt.ca",
subject: `${InstanceMgr({
imex: "ImEX Online",
rome: "Rome Online",
promanager: "ProManager"
})} Bounced Email - RE: ${subject}`,
text: `${InstanceMgr({
imex: "ImEX Online",
rome: "Rome Online",
promanager: "ProManager"
})} has tried to deliver an email with the subject: ${subject} to the intended recipients but encountered an error.
${body.bounce?.bouncedRecipients.map(
(r) =>
`Recipient: ${r.emailAddress} | Status: ${r.action} | Code: ${r.diagnosticCode}
(r) =>
`Recipient: ${r.emailAddress} | Status: ${r.action} | Code: ${r.diagnosticCode}
`
)}
`,
},
(err, info) => {
console.log("***", err || info);
}
);
`
},
(err, info) => {
console.log("***", err || info);
}
} catch (error) {
logger.log("sns-error", "ERROR", "api", null, {
error: JSON.stringify(error),
});
);
}
res.sendStatus(200);
} catch (error) {
logger.log("sns-error", "ERROR", "api", null, {
error: JSON.stringify(error)
});
}
res.sendStatus(200);
};

View File

@@ -1,13 +1,10 @@
const admin = require("firebase-admin");
const logger = require("../utils/logger");
const path = require("path");
const {auth} = require("firebase-admin");
const { auth } = require("firebase-admin");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
@@ -15,202 +12,181 @@ const serviceAccount = require(process.env.FIREBASE_ADMINSDK_JSON);
const adminEmail = require("../utils/adminEmail");
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: process.env.FIREBASE_DATABASE_URL,
credential: admin.credential.cert(serviceAccount),
databaseURL: process.env.FIREBASE_DATABASE_URL
});
exports.admin = admin;
exports.createUser = async (req, res) => {
logger.log("admin-create-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
});
logger.log("admin-create-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
const {email, displayName, password, shopid, authlevel} = req.body;
try {
const userRecord = await admin
.auth()
.createUser({email, displayName, password});
const { email, displayName, password, shopid, authlevel } = req.body;
try {
const userRecord = await admin.auth().createUser({ email, displayName, password });
// See the UserRecord reference doc for the contents of userRecord.
// See the UserRecord reference doc for the contents of userRecord.
const result = await client.request(
`
const result = await client.request(
`
mutation INSERT_USER($user: users_insert_input!) {
insert_users_one(object: $user) {
email
}
}
`,
{
user: {
email: email.toLowerCase(),
authid: userRecord.uid,
associations: {
data: [{shopid, authlevel, active: true}],
},
},
}
);
{
user: {
email: email.toLowerCase(),
authid: userRecord.uid,
associations: {
data: [{ shopid, authlevel, active: true }]
}
}
}
);
res.json({userRecord, result});
} catch (error) {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
}
res.json({ userRecord, result });
} catch (error) {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error
});
res.status(500).json(error);
}
};
exports.updateUser = (req, res) => {
logger.log("admin-update-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
logger.log("admin-update-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log("admin-update-user-unauthorized", "ERROR", req.user.email, null, {
request: req.body,
user: req.user
});
res.sendStatus(404);
return;
}
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
return;
}
admin
.auth()
.updateUser(
req.body.uid,
req.body.user
// {
// email: "modifiedUser@example.com",
// phoneNumber: "+11234567890",
// emailVerified: true,
// password: "newPassword",
// displayName: "Jane Doe",
// photoURL: "http://www.example.com/12345678/photo.png",
// disabled: true,
// }
)
.then((userRecord) => {
// See the UserRecord reference doc for the contents of userRecord.
admin
.auth()
.updateUser(
req.body.uid,
req.body.user
// {
// email: "modifiedUser@example.com",
// phoneNumber: "+11234567890",
// emailVerified: true,
// password: "newPassword",
// displayName: "Jane Doe",
// photoURL: "http://www.example.com/12345678/photo.png",
// disabled: true,
// }
)
.then((userRecord) => {
// See the UserRecord reference doc for the contents of userRecord.
logger.log("admin-update-user-success", "ADMIN", req.user.email, null, {
userRecord,
ioadmin: true,
});
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
});
logger.log("admin-update-user-success", "ADMIN", req.user.email, null, {
userRecord,
ioadmin: true
});
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error
});
res.status(500).json(error);
});
};
exports.getUser = (req, res) => {
logger.log("admin-get-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true,
logger.log("admin-get-user", "ADMIN", req.user.email, null, {
request: req.body,
ioadmin: true
});
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log("admin-update-user-unauthorized", "ERROR", req.user.email, null, {
request: req.body,
user: req.user
});
res.sendStatus(404);
return;
}
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
return;
}
admin
.auth()
.getUser(req.body.uid)
.then((userRecord) => {
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-get-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
});
admin
.auth()
.getUser(req.body.uid)
.then((userRecord) => {
res.json(userRecord);
})
.catch((error) => {
logger.log("admin-get-user-error", "ERROR", req.user.email, null, {
error
});
res.status(500).json(error);
});
};
exports.sendNotification = async (req, res) => {
setTimeout(() => {
// Send a message to the device corresponding to the provided
// registration token.
admin
.messaging()
.send({
topic: "PRD_PATRICK-messaging",
notification: {
title: `ImEX Online Message - `,
body: "Test Noti.",
//imageUrl: "https://thinkimex.com/img/io-fcm.png",
},
data: {
type: "messaging-inbound",
conversationid: "e0eb17c3-3a78-4e3f-b932-55ef35aa2297",
text: "Hello. ",
image_path: "",
phone_num: "+16049992002",
},
})
.then((response) => {
// Response is a message ID string.
console.log("Successfully sent message:", response);
})
.catch((error) => {
console.log("Error sending message:", error);
});
setTimeout(() => {
// Send a message to the device corresponding to the provided
// registration token.
admin
.messaging()
.send({
topic: "PRD_PATRICK-messaging",
notification: {
title: `ImEX Online Message - `,
body: "Test Noti."
//imageUrl: "https://thinkimex.com/img/io-fcm.png",
},
data: {
type: "messaging-inbound",
conversationid: "e0eb17c3-3a78-4e3f-b932-55ef35aa2297",
text: "Hello. ",
image_path: "",
phone_num: "+16049992002"
}
})
.then((response) => {
// Response is a message ID string.
console.log("Successfully sent message:", response);
})
.catch((error) => {
console.log("Error sending message:", error);
});
res.sendStatus(200);
}, 500);
res.sendStatus(200);
}, 500);
};
exports.subscribe = async (req, res) => {
const result = await admin
.messaging()
.subscribeToTopic(
req.body.fcm_tokens,
`${req.body.imexshopid}-${req.body.type}`
);
const result = await admin
.messaging()
.subscribeToTopic(req.body.fcm_tokens, `${req.body.imexshopid}-${req.body.type}`);
res.json(result);
res.json(result);
};
exports.unsubscribe = async (req, res) => {
try {
const result = await admin
.messaging()
.unsubscribeFromTopic(
req.body.fcm_tokens,
`${req.body.imexshopid}-${req.body.type}`
);
try {
const result = await admin
.messaging()
.unsubscribeFromTopic(req.body.fcm_tokens, `${req.body.imexshopid}-${req.body.type}`);
res.json(result);
} catch (error) {
res.sendStatus(500);
}
res.json(result);
} catch (error) {
res.sendStatus(500);
}
};
//Admin claims code.
// const uid = "JEqqYlsadwPEXIiyRBR55fflfko1";

View File

@@ -1,10 +1,7 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
//New bug introduced with Graphql Request.
// https://github.com/prisma-labs/graphql-request/issues/206
@@ -12,9 +9,9 @@ require("dotenv").config({
// global.Headers = global.Headers || Headers;
exports.client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
"x-hasura-admin-secret": process.env.HASURA_ADMIN_SECRET,
},
headers: {
"x-hasura-admin-secret": process.env.HASURA_ADMIN_SECRET
}
});
exports.unauthclient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT);

View File

@@ -1987,20 +1987,20 @@ exports.UPDATE_OLD_TRANSITION = `mutation UPDATE_OLD_TRANSITION($jobid: uuid!, $
}`;
exports.INSERT_NEW_TRANSITION = (
includeOldTransition
includeOldTransition
) => `mutation INSERT_NEW_TRANSITION($newTransition: transitions_insert_input!, ${
includeOldTransition ? `$oldTransitionId: uuid!, $duration: numeric` : ""
includeOldTransition ? `$oldTransitionId: uuid!, $duration: numeric` : ""
}) {
insert_transitions_one(object: $newTransition) {
id
}
${
includeOldTransition
? `update_transitions(where: {id: {_eq: $oldTransitionId}}, _set: {duration: $duration}) {
? `update_transitions(where: {id: {_eq: $oldTransitionId}}, _set: {duration: $duration}) {
affected_rows
}`
: ""
}
: ""
}
}`;
exports.QUERY_JOB_ID_MIXDATA = `query QUERY_JOB_ID_MIXDATA($roNumbers: [String!]!) {

View File

@@ -3,44 +3,43 @@
const awsSecretManager = require("@aws-sdk/client-secrets-manager");
class SecretsManager {
/**
* Uses AWS Secrets Manager to retrieve a secret
*/
static async getSecret(secretName, region) {
const config = {region: region};
let secretsManager = new awsSecretManager.SecretsManager(config);
try {
let secretValue = await secretsManager
.getSecretValue({SecretId: secretName});
if ("SecretString" in secretValue) {
return secretValue.SecretString;
} else {
let buff = new Buffer(secretValue.SecretBinary, "base64");
return buff.toString("ascii");
}
} catch (err) {
if (err.code === "DecryptionFailureException")
// Secrets Manager can't decrypt the protected secret text using the provided KMS key.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "InternalServiceErrorException")
// An error occurred on the server side.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "InvalidParameterException")
// You provided an invalid value for a parameter.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "InvalidRequestException")
// You provided a parameter value that is not valid for the current state of the resource.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "ResourceNotFoundException")
// We can't find the resource that you asked for.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
}
/**
* Uses AWS Secrets Manager to retrieve a secret
*/
static async getSecret(secretName, region) {
const config = { region: region };
let secretsManager = new awsSecretManager.SecretsManager(config);
try {
let secretValue = await secretsManager.getSecretValue({ SecretId: secretName });
if ("SecretString" in secretValue) {
return secretValue.SecretString;
} else {
let buff = new Buffer(secretValue.SecretBinary, "base64");
return buff.toString("ascii");
}
} catch (err) {
if (err.code === "DecryptionFailureException")
// Secrets Manager can't decrypt the protected secret text using the provided KMS key.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "InternalServiceErrorException")
// An error occurred on the server side.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "InvalidParameterException")
// You provided an invalid value for a parameter.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "InvalidRequestException")
// You provided a parameter value that is not valid for the current state of the resource.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
else if (err.code === "ResourceNotFoundException")
// We can't find the resource that you asked for.
// Deal with the exception here, and/or rethrow at your discretion.
throw err;
}
}
}
module.exports = SecretsManager;

View File

@@ -8,207 +8,187 @@ const moment = require("moment");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const domain = process.env.NODE_ENV ? "secure" : "test";
const {
SecretsManagerClient,
GetSecretValueCommand,
} = require("@aws-sdk/client-secrets-manager");
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
const client = new SecretsManagerClient({
region: "ca-central-1",
region: "ca-central-1"
});
const gqlClient = require("../graphql-client/graphql-client").client;
const getShopCredentials = async (bodyshop) => {
// Development only
if (process.env.NODE_ENV === undefined) {
return {
merchantkey: process.env.INTELLIPAY_MERCHANTKEY,
apikey: process.env.INTELLIPAY_APIKEY,
};
}
// Development only
if (process.env.NODE_ENV === undefined) {
return {
merchantkey: process.env.INTELLIPAY_MERCHANTKEY,
apikey: process.env.INTELLIPAY_APIKEY
};
}
// Production code
if (bodyshop?.imexshopid) {
try {
const secret = await client.send(
new GetSecretValueCommand({
SecretId: `intellipay-credentials-${bodyshop.imexshopid}`,
VersionStage: "AWSCURRENT", // VersionStage defaults to AWSCURRENT if unspecified
})
);
return JSON.parse(secret.SecretString);
} catch (error) {
return {
error: error.message,
};
}
// Production code
if (bodyshop?.imexshopid) {
try {
const secret = await client.send(
new GetSecretValueCommand({
SecretId: `intellipay-credentials-${bodyshop.imexshopid}`,
VersionStage: "AWSCURRENT" // VersionStage defaults to AWSCURRENT if unspecified
})
);
return JSON.parse(secret.SecretString);
} catch (error) {
return {
error: error.message
};
}
}
};
exports.lightbox_credentials = async (req, res) => {
logger.log(
"intellipay-lightbox-credentials",
"DEBUG",
req.user?.email,
null,
null
);
logger.log("intellipay-lightbox-credentials", "DEBUG", req.user?.email, null, null);
const shopCredentials = await getShopCredentials(req.body.bodyshop);
const shopCredentials = await getShopCredentials(req.body.bodyshop);
if (shopCredentials.error) {
res.json(shopCredentials);
return;
}
try {
const options = {
method: "POST",
headers: {"content-type": "application/x-www-form-urlencoded"},
data: qs.stringify({
...shopCredentials,
operatingenv: "businessattended",
}),
url: `https://${domain}.cpteller.com/api/custapi.cfc?method=autoterminal${
req.body.refresh ? "_refresh" : ""
}`, //autoterminal_refresh
};
if (shopCredentials.error) {
res.json(shopCredentials);
return;
}
try {
const options = {
method: "POST",
headers: { "content-type": "application/x-www-form-urlencoded" },
data: qs.stringify({
...shopCredentials,
operatingenv: "businessattended"
}),
url: `https://${domain}.cpteller.com/api/custapi.cfc?method=autoterminal${req.body.refresh ? "_refresh" : ""}` //autoterminal_refresh
};
const response = await axios(options);
const response = await axios(options);
res.send(response.data);
} catch (error) {
console.log(error);
logger.log(
"intellipay-lightbox-credentials-error",
"ERROR",
req.user?.email,
null,
{error: JSON.stringify(error)}
);
res.json({error});
}
res.send(response.data);
} catch (error) {
console.log(error);
logger.log("intellipay-lightbox-credentials-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error)
});
res.json({ error });
}
};
exports.payment_refund = async (req, res) => {
logger.log("intellipay-refund", "DEBUG", req.user?.email, null, null);
logger.log("intellipay-refund", "DEBUG", req.user?.email, null, null);
const shopCredentials = await getShopCredentials(req.body.bodyshop);
const shopCredentials = await getShopCredentials(req.body.bodyshop);
try {
const options = {
method: "POST",
headers: {"content-type": "application/x-www-form-urlencoded"},
try {
const options = {
method: "POST",
headers: { "content-type": "application/x-www-form-urlencoded" },
data: qs.stringify({
method: "payment_refund",
...shopCredentials,
paymentid: req.body.paymentid,
amount: req.body.amount,
}),
url: `https://${domain}.cpteller.com/api/26/webapi.cfc?method=payment_refund`,
};
data: qs.stringify({
method: "payment_refund",
...shopCredentials,
paymentid: req.body.paymentid,
amount: req.body.amount
}),
url: `https://${domain}.cpteller.com/api/26/webapi.cfc?method=payment_refund`
};
const response = await axios(options);
const response = await axios(options);
res.send(response.data);
} catch (error) {
console.log(error);
logger.log("intellipay-refund-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error),
});
res.json({error});
}
res.send(response.data);
} catch (error) {
console.log(error);
logger.log("intellipay-refund-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error)
});
res.json({ error });
}
};
exports.generate_payment_url = async (req, res) => {
logger.log("intellipay-payment-url", "DEBUG", req.user?.email, null, null);
const shopCredentials = await getShopCredentials(req.body.bodyshop);
logger.log("intellipay-payment-url", "DEBUG", req.user?.email, null, null);
const shopCredentials = await getShopCredentials(req.body.bodyshop);
try {
const options = {
method: "POST",
headers: {"content-type": "application/x-www-form-urlencoded"},
//TODO: Move these to environment variables/database.
data: qs.stringify({
...shopCredentials,
//...req.body,
amount: Dinero({amount: Math.round(req.body.amount * 100)}).toFormat(
"0.00"
),
account: req.body.account,
invoice: req.body.invoice,
createshorturl: true,
//The postback URL is set at the CP teller global terminal settings page.
}),
url: `https://${domain}.cpteller.com/api/custapi.cfc?method=generate_lightbox_url`,
};
try {
const options = {
method: "POST",
headers: { "content-type": "application/x-www-form-urlencoded" },
//TODO: Move these to environment variables/database.
data: qs.stringify({
...shopCredentials,
//...req.body,
amount: Dinero({ amount: Math.round(req.body.amount * 100) }).toFormat("0.00"),
account: req.body.account,
invoice: req.body.invoice,
createshorturl: true
//The postback URL is set at the CP teller global terminal settings page.
}),
url: `https://${domain}.cpteller.com/api/custapi.cfc?method=generate_lightbox_url`
};
const response = await axios(options);
const response = await axios(options);
res.send(response.data);
} catch (error) {
console.log(error);
logger.log("intellipay-payment-url-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error),
});
res.json({error});
}
res.send(response.data);
} catch (error) {
console.log(error);
logger.log("intellipay-payment-url-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error)
});
res.json({ error });
}
};
exports.postback = async (req, res) => {
logger.log("intellipay-postback", "ERROR", req.user?.email, null, req.body);
const {body: values} = req;
logger.log("intellipay-postback", "ERROR", req.user?.email, null, req.body);
const { body: values } = req;
if (!values.invoice) {
res.sendStatus(200);
return;
}
// TODO query job by account name
const job = await gqlClient.request(queries.GET_JOB_BY_PK, {
id: values.invoice,
if (!values.invoice) {
res.sendStatus(200);
return;
}
// TODO query job by account name
const job = await gqlClient.request(queries.GET_JOB_BY_PK, {
id: values.invoice
});
// TODO add mutation to database
try {
const paymentResult = await gqlClient.request(queries.INSERT_NEW_PAYMENT, {
paymentInput: {
amount: values.total,
transactionid: `C00 ${values.authcode}`,
payer: "Customer",
type: values.cardtype,
jobid: values.invoice,
date: moment(Date.now())
}
});
// TODO add mutation to database
try {
const paymentResult = await gqlClient.request(queries.INSERT_NEW_PAYMENT, {
paymentInput: {
amount: values.total,
transactionid: `C00 ${values.authcode}`,
payer: "Customer",
type: values.cardtype,
jobid: values.invoice,
date: moment(Date.now()),
},
});
await gqlClient.request(queries.INSERT_PAYMENT_RESPONSE, {
paymentResponse: {
amount: values.total,
bodyshopid: job.jobs_by_pk.shopid,
paymentid: paymentResult.id,
jobid: values.invoice,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values
}
});
await gqlClient.request(queries.INSERT_PAYMENT_RESPONSE, {
paymentResponse: {
amount: values.total,
bodyshopid: job.jobs_by_pk.shopid,
paymentid: paymentResult.id,
jobid: values.invoice,
declinereason: "Approved",
ext_paymentid: values.paymentid,
successful: true,
response: values,
},
});
res.send({message: "Postback Successful"});
} catch (error) {
logger.log("intellipay-postback-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error),
body: req.body,
});
res.status(400).json({succesful: false, error: error.message});
}
res.send({ message: "Postback Successful" });
} catch (error) {
logger.log("intellipay-postback-error", "ERROR", req.user?.email, null, {
error: JSON.stringify(error),
body: req.body
});
res.status(400).json({ succesful: false, error: error.message });
}
};

View File

@@ -4,47 +4,35 @@ const path = require("path");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.default = async (req, res) => {
const {
useremail,
bodyshopid,
operationName,
variables,
env,
const { useremail, bodyshopid, operationName, variables, env, time, dbevent, user } = req.body;
try {
await client.request(queries.INSERT_IOEVENT, {
event: {
operationname: operationName,
time,
dbevent,
user,
} = req.body;
try {
await client.request(queries.INSERT_IOEVENT, {
event: {
operationname: operationName,
time,
dbevent,
env,
variables,
bodyshopid,
useremail,
},
});
res.sendStatus(200);
} catch (error) {
logger.log("ioevent-error", "trace", user, null, {
operationname: operationName,
time,
dbevent,
env,
variables,
bodyshopid,
useremail,
});
res.sendStatus(200);
}
env,
variables,
bodyshopid,
useremail
}
});
res.sendStatus(200);
} catch (error) {
logger.log("ioevent-error", "trace", user, null, {
operationname: operationName,
time,
dbevent,
env,
variables,
bodyshopid,
useremail
});
res.sendStatus(200);
}
};

File diff suppressed because it is too large Load Diff

View File

@@ -6,106 +6,103 @@ const calculateStatusDuration = require("../utils/calculateStatusDuration");
const getLifecycleStatusColor = require("../utils/getLifecycleStatusColor");
const jobLifecycle = async (req, res) => {
// Grab the jobids and statuses from the request body
const {
jobids,
statuses
} = req.body;
// Grab the jobids and statuses from the request body
const { jobids, statuses } = req.body;
if (!jobids) {
return res.status(400).json({
error: "Missing jobids"
});
}
const jobIDs = _.isArray(jobids) ? jobids : [jobids];
const client = req.userGraphQLClient;
const resp = await client.request(queries.QUERY_TRANSITIONS_BY_JOBID, {jobids: jobIDs,});
const transitions = resp.transitions;
if (!transitions) {
return res.status(200).json({
jobIDs,
transitions: []
});
}
const transitionsByJobId = _.groupBy(resp.transitions, 'jobid');
const groupedTransitions = {};
const allDurations = [];
for (let jobId in transitionsByJobId) {
let lifecycle = transitionsByJobId[jobId].map(transition => {
transition.start_readable = transition.start ? moment(transition.start).fromNow() : 'N/A';
transition.end_readable = transition.end ? moment(transition.end).fromNow() : 'N/A';
if (transition.duration) {
transition.duration_seconds = Math.round(transition.duration / 1000);
transition.duration_minutes = Math.round(transition.duration_seconds / 60);
let duration = moment.duration(transition.duration);
transition.duration_readable = durationToHumanReadable(duration);
} else {
transition.duration_seconds = 0;
transition.duration_minutes = 0;
transition.duration_readable = 'N/A';
}
return transition;
});
const durations = calculateStatusDuration(lifecycle, statuses);
groupedTransitions[jobId] = {
lifecycle,
durations
};
if (durations?.summations) {
allDurations.push(durations.summations);
}
}
const finalSummations = [];
const flatGroupedAllDurations = _.groupBy(allDurations.flat(),'status');
const finalStatusCounts = Object.keys(flatGroupedAllDurations).reduce((acc, status) => {
acc[status] = flatGroupedAllDurations[status].length;
return acc;
}, {});
// Calculate total value of all statuses
const finalTotal = Object.values(flatGroupedAllDurations).reduce((total, statusArr) => {
return total + statusArr.reduce((acc, curr) => acc + curr.value, 0);
}, 0);
Object.keys(flatGroupedAllDurations).forEach(status => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = (value / finalTotal) * 100;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage
});
if (!jobids) {
return res.status(400).json({
error: "Missing jobids"
});
}
const jobIDs = _.isArray(jobids) ? jobids : [jobids];
const client = req.userGraphQLClient;
const resp = await client.request(queries.QUERY_TRANSITIONS_BY_JOBID, { jobids: jobIDs });
const transitions = resp.transitions;
if (!transitions) {
return res.status(200).json({
jobIDs,
transition: groupedTransitions,
durations: {
jobs: jobIDs.length,
summations: finalSummations,
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal))
}
jobIDs,
transitions: []
});
}
}
module.exports = jobLifecycle;
const transitionsByJobId = _.groupBy(resp.transitions, "jobid");
const groupedTransitions = {};
const allDurations = [];
for (let jobId in transitionsByJobId) {
let lifecycle = transitionsByJobId[jobId].map((transition) => {
transition.start_readable = transition.start ? moment(transition.start).fromNow() : "N/A";
transition.end_readable = transition.end ? moment(transition.end).fromNow() : "N/A";
if (transition.duration) {
transition.duration_seconds = Math.round(transition.duration / 1000);
transition.duration_minutes = Math.round(transition.duration_seconds / 60);
let duration = moment.duration(transition.duration);
transition.duration_readable = durationToHumanReadable(duration);
} else {
transition.duration_seconds = 0;
transition.duration_minutes = 0;
transition.duration_readable = "N/A";
}
return transition;
});
const durations = calculateStatusDuration(lifecycle, statuses);
groupedTransitions[jobId] = {
lifecycle,
durations
};
if (durations?.summations) {
allDurations.push(durations.summations);
}
}
const finalSummations = [];
const flatGroupedAllDurations = _.groupBy(allDurations.flat(), "status");
const finalStatusCounts = Object.keys(flatGroupedAllDurations).reduce((acc, status) => {
acc[status] = flatGroupedAllDurations[status].length;
return acc;
}, {});
// Calculate total value of all statuses
const finalTotal = Object.values(flatGroupedAllDurations).reduce((total, statusArr) => {
return total + statusArr.reduce((acc, curr) => acc + curr.value, 0);
}, 0);
Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = (value / finalTotal) * 100;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage
});
});
return res.status(200).json({
jobIDs,
transition: groupedTransitions,
durations: {
jobs: jobIDs.length,
summations: finalSummations,
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal))
}
});
};
module.exports = jobLifecycle;

View File

@@ -11,91 +11,68 @@ const path = require("path");
const client = require("../graphql-client/graphql-client").client;
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
async function StatusTransition(req, res) {
const {
id: jobid,
status: value,
shopid: bodyshopid,
} = req.body.event.data.new;
const { id: jobid, status: value, shopid: bodyshopid } = req.body.event.data.new;
// Create record OPEN on new item, enter state
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record, create a new record saying we came from previous status going to previous status
// (Timeline)
// Final status is exported, there is no end date as there is no further transition (has no end date)
try {
const {update_transitions} = await client.request(
queries.UPDATE_OLD_TRANSITION,
{
jobid: jobid,
existingTransition: {
end: new Date(),
next_value: value,
// Create record OPEN on new item, enter state
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record, create a new record saying we came from previous status going to previous status
// (Timeline)
// Final status is exported, there is no end date as there is no further transition (has no end date)
try {
const { update_transitions } = await client.request(queries.UPDATE_OLD_TRANSITION, {
jobid: jobid,
existingTransition: {
end: new Date(),
next_value: value
//duration
},
}
);
//duration
}
});
let duration =
update_transitions.affected_rows === 0
? 0
: new Date(update_transitions.returning[0].end) -
new Date(update_transitions.returning[0].start);
let duration =
update_transitions.affected_rows === 0
? 0
: new Date(update_transitions.returning[0].end) - new Date(update_transitions.returning[0].start);
const resp2 = await client.request(
queries.INSERT_NEW_TRANSITION(update_transitions.affected_rows > 0),
{
...(update_transitions.affected_rows > 0
? {
oldTransitionId:
update_transitions.affected_rows === 0
? null
: update_transitions.returning[0].id,
duration,
}
: {}),
newTransition: {
bodyshopid: bodyshopid,
jobid: jobid,
start:
update_transitions.affected_rows === 0
? new Date()
: update_transitions.returning[0].end,
prev_value:
update_transitions.affected_rows === 0
? null
: update_transitions.returning[0].value,
value: value,
type: "status",
},
}
);
const resp2 = await client.request(queries.INSERT_NEW_TRANSITION(update_transitions.affected_rows > 0), {
...(update_transitions.affected_rows > 0
? {
oldTransitionId: update_transitions.affected_rows === 0 ? null : update_transitions.returning[0].id,
duration
}
: {}),
newTransition: {
bodyshopid: bodyshopid,
jobid: jobid,
start: update_transitions.affected_rows === 0 ? new Date() : update_transitions.returning[0].end,
prev_value: update_transitions.affected_rows === 0 ? null : update_transitions.returning[0].value,
value: value,
type: "status"
}
});
logger.log("job-transition-update-result", "DEBUG", null, jobid, resp2);
logger.log("job-transition-update-result", "DEBUG", null, jobid, resp2);
//Check to see if there is an existing status transition record.
//Query using Job ID, start is not null, end is null.
//Check to see if there is an existing status transition record.
//Query using Job ID, start is not null, end is null.
//If there is no existing record, this is the start of the transition life cycle.
// Create the initial transition record.
//If there is no existing record, this is the start of the transition life cycle.
// Create the initial transition record.
//If there is a current status transition record, update it with the end date, duration, and next value.
//If there is a current status transition record, update it with the end date, duration, and next value.
res.sendStatus(200); //.json(ret);
} catch (error) {
logger.log("job-status-transition-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack,
});
res.sendStatus(200); //.json(ret);
} catch (error) {
logger.log("job-status-transition-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack
});
res.status(400).send(JSON.stringify(error));
}
res.status(400).send(JSON.stringify(error));
}
}
exports.statustransition = StatusTransition;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,16 @@
const RenderInstanceManager = require('../utils/instanceMgr').default;
const RenderInstanceManager = require("../utils/instanceMgr").default;
exports.totals = RenderInstanceManager({
imex: require('./job-totals').default,
rome: require('./job-totals-USA').default,
promanager: require('./job-totals-USA').default,
imex: require("./job-totals").default,
rome: require("./job-totals-USA").default,
promanager: require("./job-totals-USA").default
});
exports.totalsSsu = RenderInstanceManager({
imex: require('./job-totals').totalsSsu,
rome: require('./job-totals-USA').totalsSsu,
promanager: require('./job-totals-USA').totalsSsu,
imex: require("./job-totals").totalsSsu,
rome: require("./job-totals-USA").totalsSsu,
promanager: require("./job-totals-USA").totalsSsu
});
exports.costing = require('./job-costing').JobCosting;
exports.costingmulti = require('./job-costing').JobCostingMulti;
exports.statustransition = require('./job-status-transition').statustransition;
exports.lifecycle = require('./job-lifecycle');
exports.costing = require("./job-costing").JobCosting;
exports.costingmulti = require("./job-costing").JobCostingMulti;
exports.statustransition = require("./job-status-transition").statustransition;
exports.lifecycle = require("./job-lifecycle");

View File

@@ -5,168 +5,155 @@ const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
var cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL);
exports.createSignedUploadURL = (req, res) => {
logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send(
cloudinary.utils.api_sign_request(
req.body,
process.env.CLOUDINARY_API_SECRET
)
);
logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET));
};
exports.downloadFiles = (req, res) => {
const {ids} = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const { ids } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const url = cloudinary.utils.download_zip_url({
public_ids: ids,
flatten_folders: true,
});
res.send(url);
const url = cloudinary.utils.download_zip_url({
public_ids: ids,
flatten_folders: true
});
res.send(url);
};
exports.deleteFiles = async (req, res) => {
const {ids} = req.body;
const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
const { ids } = req.body;
const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
const returns = [];
if (types.image) {
//delete images
const returns = [];
if (types.image) {
//delete images
returns.push(
await cloudinary.api.delete_resources(
types.image.map((x) => x.key),
{resource_type: "image"}
)
);
}
if (types.video) {
//delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.video.map((x) => x.key),
{resource_type: "video"}
)
);
}
if (types.raw) {
//delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.raw.map((x) => `${x.key}.${x.extension}`),
{resource_type: "raw"}
)
);
}
returns.push(
await cloudinary.api.delete_resources(
types.image.map((x) => x.key),
{ resource_type: "image" }
)
);
}
if (types.video) {
//delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.video.map((x) => x.key),
{ resource_type: "video" }
)
);
}
if (types.raw) {
//delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.raw.map((x) => `${x.key}.${x.extension}`),
{ resource_type: "raw" }
)
);
}
// Delete it on apollo.
const successfulDeletes = [];
returns.forEach((resType) => {
Object.keys(resType.deleted).forEach((key) => {
if (
resType.deleted[key] === "deleted" ||
resType.deleted[key] === "not_found"
) {
successfulDeletes.push(key.replace(/\.[^/.]+$/, ""));
}
});
// Delete it on apollo.
const successfulDeletes = [];
returns.forEach((resType) => {
Object.keys(resType.deleted).forEach((key) => {
if (resType.deleted[key] === "deleted" || resType.deleted[key] === "not_found") {
successfulDeletes.push(key.replace(/\.[^/.]+$/, ""));
}
});
});
try {
const result = await client.request(queries.DELETE_MEDIA_DOCUMENTS, {
ids: ids.filter((i) => successfulDeletes.includes(i.key)).map((i) => i.id)
});
try {
const result = await client.request(queries.DELETE_MEDIA_DOCUMENTS, {
ids: ids
.filter((i) => successfulDeletes.includes(i.key))
.map((i) => i.id),
});
res.send({ returns, result });
} catch (error) {
logger.log("media-delete-error", "ERROR", req.user.email, null, [
{ ids, error: error.message || JSON.stringify(error) }
]);
res.send({returns, result});
} catch (error) {
logger.log("media-delete-error", "ERROR", req.user.email, null, [
{ids, error: error.message || JSON.stringify(error)},
]);
res.json({error});
}
res.json({ error });
}
};
exports.renameKeys = async (req, res) => {
const {documents, tojobid} = req.body;
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
const { documents, tojobid } = req.body;
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
const proms = [];
documents.forEach((d) => {
proms.push(
(async () => {
try {
const res = {
id: d.id,
...(await cloudinary.uploader.rename(d.from, d.to, {
resource_type: DetermineFileType(d.type),
})),
};
return res;
} catch (error) {
return {id: d.id, from: d.from, error: error};
}
})()
);
const proms = [];
documents.forEach((d) => {
proms.push(
(async () => {
try {
const res = {
id: d.id,
...(await cloudinary.uploader.rename(d.from, d.to, {
resource_type: DetermineFileType(d.type)
}))
};
return res;
} catch (error) {
return { id: d.id, from: d.from, error: error };
}
})()
);
});
let result;
result = await Promise.all(proms);
const errors = [];
result
.filter((d) => d.error)
.forEach((d) => {
errors.push(d);
});
let result;
let mutations = "";
result = await Promise.all(proms);
const errors = [];
result
.filter((d) => d.error)
.forEach((d) => {
errors.push(d);
});
result
.filter((d) => !d.error)
.forEach((d, idx) => {
//Create mutation text
let mutations = "";
result
.filter((d) => !d.error)
.forEach((d, idx) => {
//Create mutation text
mutations =
mutations +
`
mutations =
mutations +
`
update_doc${idx}:update_documents_by_pk(pk_columns: { id: "${d.id}" }, _set: {key: "${d.public_id}", jobid: "${tojobid}"}){
id
}
`;
});
});
if (mutations !== "") {
const mutationResult = await client.request(`mutation {
if (mutations !== "") {
const mutationResult = await client.request(`mutation {
${mutations}
}`);
res.json({errors, mutationResult});
} else {
res.json({errors: "No images were succesfully moved on remote server. "});
}
res.json({ errors, mutationResult });
} else {
res.json({ errors: "No images were succesfully moved on remote server. " });
}
};
//Also needs to be updated in upload utility and mobile app.
function DetermineFileType(filetype) {
if (!filetype) return "auto";
else if (filetype.startsWith("image")) return "image";
else if (filetype.startsWith("video")) return "video";
else if (filetype.startsWith("application/pdf")) return "image";
else if (filetype.startsWith("application")) return "raw";
if (!filetype) return "auto";
else if (filetype.startsWith("image")) return "image";
else if (filetype.startsWith("video")) return "video";
else if (filetype.startsWith("application/pdf")) return "image";
else if (filetype.startsWith("application")) return "raw";
return "auto";
return "auto";
}

View File

@@ -9,12 +9,12 @@ const path = require("path");
* @param next
*/
function eventAuthorizationMiddleware(req, res, next) {
if (req.headers["event-secret"] !== process.env.EVENT_SECRET) {
return res.status(401).send("Unauthorized");
}
if (req.headers["event-secret"] !== process.env.EVENT_SECRET) {
return res.status(401).send("Unauthorized");
}
req.isEventAuthorized = true;
next();
req.isEventAuthorized = true;
next();
}
module.exports = eventAuthorizationMiddleware;
module.exports = eventAuthorizationMiddleware;

View File

@@ -11,16 +11,16 @@ const adminEmail = require("../utils/adminEmail");
* @returns {*}
*/
const validateAdminMiddleware = (req, res, next) => {
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log("admin-validation-failed", "ERROR", req.user.email, null, {
request: req.body,
user: req.user,
});
return res.sendStatus(404);
}
if (!adminEmail.includes(req.user.email) && !req.user.ioadmin) {
logger.log("admin-validation-failed", "ERROR", req.user.email, null, {
request: req.body,
user: req.user
});
return res.sendStatus(404);
}
req.isAdmin = true;
next();
req.isAdmin = true;
next();
};
module.exports = validateAdminMiddleware;
module.exports = validateAdminMiddleware;

View File

@@ -12,58 +12,51 @@ const admin = require("firebase-admin");
* @returns {Promise<void>}
*/
const validateFirebaseIdTokenMiddleware = async (req, res, next) => {
if (
(
!req.headers.authorization ||
!req.headers.authorization.startsWith("Bearer ")) &&
!(req.cookies && req.cookies.__session
)
) {
console.error("Unauthorized attempt. No authorization provided.");
return res.status(403).send("Unauthorized");
}
if (
(!req.headers.authorization || !req.headers.authorization.startsWith("Bearer ")) &&
!(req.cookies && req.cookies.__session)
) {
console.error("Unauthorized attempt. No authorization provided.");
return res.status(403).send("Unauthorized");
}
let idToken;
let idToken;
if (
req.headers.authorization &&
req.headers.authorization.startsWith("Bearer ")
) {
// console.log('Found "Authorization" header');
// Read the ID Token from the Authorization header.
idToken = req.headers.authorization.split("Bearer ")[1];
} else if (req.cookies) {
//console.log('Found "__session" cookie');
// Read the ID Token from cookie.
idToken = req.cookies.__session;
} else {
// No cookie
console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie",
});
if (req.headers.authorization && req.headers.authorization.startsWith("Bearer ")) {
// console.log('Found "Authorization" header');
// Read the ID Token from the Authorization header.
idToken = req.headers.authorization.split("Bearer ")[1];
} else if (req.cookies) {
//console.log('Found "__session" cookie');
// Read the ID Token from cookie.
idToken = req.cookies.__session;
} else {
// No cookie
console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie"
});
return res.status(403).send("Unauthorized");
}
return res.status(403).send("Unauthorized");
}
try {
const decodedIdToken = await admin.auth().verifyIdToken(idToken);
//console.log("ID Token correctly decoded", decodedIdToken);
req.user = decodedIdToken;
next();
try {
const decodedIdToken = await admin.auth().verifyIdToken(idToken);
//console.log("ID Token correctly decoded", decodedIdToken);
req.user = decodedIdToken;
next();
} catch (error) {
logger.log("api-unauthorized-call", "WARN", null, null, {
path: req.path,
body: req.body,
} catch (error) {
logger.log("api-unauthorized-call", "WARN", null, null, {
path: req.path,
body: req.body,
type: "unauthroized",
...error
});
type: "unauthroized",
...error,
});
return res.status(401).send("Unauthorized");
}
return res.status(401).send("Unauthorized");
}
};
module.exports = validateFirebaseIdTokenMiddleware;
module.exports = validateFirebaseIdTokenMiddleware;

View File

@@ -1,4 +1,4 @@
const {GraphQLClient} = require("graphql-request");
const { GraphQLClient } = require("graphql-request");
/**
* Middleware to add a GraphQL Client to the request object
@@ -10,15 +10,15 @@ const {GraphQLClient} = require("graphql-request");
* @param next
*/
const withUserGraphQLClientMiddleware = (req, res, next) => {
const BearerToken = req.headers.authorization;
req.userGraphQLClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
req.BearerToken = BearerToken;
const BearerToken = req.headers.authorization;
req.userGraphQLClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken
}
});
req.BearerToken = BearerToken;
next();
next();
};
module.exports = withUserGraphQLClientMiddleware;
module.exports = withUserGraphQLClientMiddleware;

View File

@@ -2,142 +2,124 @@ const path = require("path");
const _ = require("lodash");
const xml2js = require("xml2js");
const queries = require("../graphql-client/queries");
const logger = require('../utils/logger');
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.mixdataUpload = async (req, res) => {
const {bodyshopid} = req.body;
const { bodyshopid } = req.body;
const client = req.userGraphQLClient;
const client = req.userGraphQLClient;
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
try {
for (const element of req.files) {
const b = Buffer.from(element.buffer);
try {
for (const element of req.files) {
const b = Buffer.from(element.buffer);
const inboundRequest = await xml2js.parseStringPromise(b.toString(), {
explicitArray: false
});
const inboundRequest = await xml2js.parseStringPromise(b.toString(), {
explicitArray: false,
});
logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest);
logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest);
const ScaleType = DetermineScaleType(inboundRequest);
const RoNumbersFromInboundRequest = GetListOfRos(inboundRequest, ScaleType);
const ScaleType = DetermineScaleType(inboundRequest);
const RoNumbersFromInboundRequest = GetListOfRos(
inboundRequest,
ScaleType
);
if (RoNumbersFromInboundRequest.length > 0) {
//Query the list of ROs based on the RO number.
const { jobs } = await client.request(queries.QUERY_JOB_ID_MIXDATA, {
roNumbers: RoNumbersFromInboundRequest
});
if (RoNumbersFromInboundRequest.length > 0) {
//Query the list of ROs based on the RO number.
const {jobs} = await client.request(queries.QUERY_JOB_ID_MIXDATA, {
roNumbers: RoNumbersFromInboundRequest,
});
//Create the hash for faster processing for inserts/updates.
const jobHash = {};
jobs.forEach((j) => {
jobHash[j.ro_number] = {
jobid: j.id,
mixdataid: j.mixdata.length > 0 ? j.mixdata[0].id : null,
};
});
const MixDataArray = GenerateMixDataArray(
inboundRequest,
ScaleType,
jobHash
);
const foundJobs = MixDataArray.filter((m) => m.jobid);
const MixDataQuery = `
//Create the hash for faster processing for inserts/updates.
const jobHash = {};
jobs.forEach((j) => {
jobHash[j.ro_number] = {
jobid: j.id,
mixdataid: j.mixdata.length > 0 ? j.mixdata[0].id : null
};
});
const MixDataArray = GenerateMixDataArray(inboundRequest, ScaleType, jobHash);
const foundJobs = MixDataArray.filter((m) => m.jobid);
const MixDataQuery = `
mutation UPSERT_MIXDATA{
${foundJobs
.map((md, idx) => GenerateGqlForMixData(md, idx))
.join(" ")}
${foundJobs.map((md, idx) => GenerateGqlForMixData(md, idx)).join(" ")}
}
`;
if (foundJobs.length > 1) {
const resp = await client.request(MixDataQuery);
}
//Process the list of ROs and return an object to generate the queries.
}
if (foundJobs.length > 1) {
const resp = await client.request(MixDataQuery);
}
res.sendStatus(200);
} catch (error) {
res.status(500).json(error);
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
error: error.message,
...error,
});
//Process the list of ROs and return an object to generate the queries.
}
}
res.sendStatus(200);
} catch (error) {
res.status(500).json(error);
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
error: error.message,
...error
});
}
};
function DetermineScaleType(inboundRequest) {
const ret = {type: "", verson: 0};
const ret = { type: "", verson: 0 };
//PPG Mix Data
if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") {
return {
type: inboundRequest.PPG.Header.Protocol.Name,
company: "PPG",
version: inboundRequest.PPG.Header.Protocol.Version,
};
}
//PPG Mix Data
if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") {
return {
type: inboundRequest.PPG.Header.Protocol.Name,
company: "PPG",
version: inboundRequest.PPG.Header.Protocol.Version
};
}
}
function GetListOfRos(inboundRequest, ScaleType) {
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map(
(r) => r.RONumber
);
}
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => r.RONumber);
}
}
function GenerateMixDataArray(inboundRequest, ScaleType, jobHash) {
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map(
(r) => {
return {
jobid: jobHash[r.RONumber]?.jobid,
id: jobHash[r.RONumber]?.mixdataid,
mixdata: r,
totalliquidcost: r.TotalLiquidCost,
totalsundrycost: r.TotalSundryCost,
company: ScaleType.company,
version: ScaleType.version,
};
}
);
}
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => {
return {
jobid: jobHash[r.RONumber]?.jobid,
id: jobHash[r.RONumber]?.mixdataid,
mixdata: r,
totalliquidcost: r.TotalLiquidCost,
totalsundrycost: r.TotalSundryCost,
company: ScaleType.company,
version: ScaleType.version
};
});
}
}
function GenerateGqlForMixData(mixdata, key) {
const {id, ...restMixData} = mixdata;
const { id, ...restMixData } = mixdata;
if (id) {
//Update.
return `
update${key}: update_mixdata_by_pk(pk_columns:{id: "${id}"}, _set: ${JSON.stringify(
restMixData
).replace(/"(\w+)"\s*:/g, "$1:")}){
if (id) {
//Update.
return `
update${key}: update_mixdata_by_pk(pk_columns:{id: "${id}"}, _set: ${JSON.stringify(restMixData).replace(
/"(\w+)"\s*:/g,
"$1:"
)}){
id
}
`;
} else {
//Insert
return `
insert${key}: insert_mixdata_one(object: ${JSON.stringify(
restMixData
).replace(/"(\w+)"\s*:/g, "$1:")}){
} else {
//Insert
return `
insert${key}: insert_mixdata_one(object: ${JSON.stringify(restMixData).replace(/"(\w+)"\s*:/g, "$1:")}){
id
}
`;
}
}
}

View File

@@ -1,84 +1,72 @@
require("dotenv").config({
path: require("path").resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: require("path").resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
//const client = require("../graphql-client/graphql-client").client;
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
const client = require("../graphql-client/graphql-client").client;
const {pick, isNil} = require("lodash");
const {getClient} = require('../../libs/awsUtils');
const { pick, isNil } = require("lodash");
const { getClient } = require("../../libs/awsUtils");
async function OpenSearchUpdateHandler(req, res) {
try {
try {
const osClient = await getClient();
const osClient = await getClient();
if (req.body.event.op === "DELETE") {
let response;
response = await osClient.delete({
id: req.body.event.data.old.id,
index: req.body.table.name
});
res.status(200).json(response.body);
} else {
let document;
if (req.body.event.op === "DELETE") {
let response;
response = await osClient.delete({
id: req.body.event.data.old.id,
index: req.body.table.name,
});
res.status(200).json(response.body);
} else {
let document;
switch (req.body.table.name) {
case "jobs":
document = pick(req.body.event.data.new, [
"id",
"bodyshopid",
"clm_no",
"clm_total",
"comment",
"ins_co_nm",
"owner_owing",
"ownr_co_nm",
"ownr_fn",
"ownr_ln",
"ownr_ph1",
"ownr_ph2",
"plate_no",
"ro_number",
"status",
"v_model_yr",
"v_make_desc",
"v_model_desc",
"v_vin",
]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "vehicles":
document = pick(req.body.event.data.new, [
"id",
"v_model_yr",
"v_model_desc",
"v_make_desc",
"v_color",
"v_vin",
"plate_no",
]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "owners":
document = pick(req.body.event.data.new, [
"id",
"ownr_fn",
"ownr_ln",
"ownr_co_nm",
"ownr_ph1",
"ownr_ph2",
]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "bills":
const bill = await client.request(
`query ADMIN_GET_BILL_BY_ID($billId: uuid!) {
switch (req.body.table.name) {
case "jobs":
document = pick(req.body.event.data.new, [
"id",
"bodyshopid",
"clm_no",
"clm_total",
"comment",
"ins_co_nm",
"owner_owing",
"ownr_co_nm",
"ownr_fn",
"ownr_ln",
"ownr_ph1",
"ownr_ph2",
"plate_no",
"ro_number",
"status",
"v_model_yr",
"v_make_desc",
"v_model_desc",
"v_vin"
]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "vehicles":
document = pick(req.body.event.data.new, [
"id",
"v_model_yr",
"v_model_desc",
"v_make_desc",
"v_color",
"v_vin",
"plate_no"
]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "owners":
document = pick(req.body.event.data.new, ["id", "ownr_fn", "ownr_ln", "ownr_co_nm", "ownr_ph1", "ownr_ph2"]);
document.bodyshopid = req.body.event.data.new.shopid;
break;
case "bills":
const bill = await client.request(
`query ADMIN_GET_BILL_BY_ID($billId: uuid!) {
bills_by_pk(id: $billId) {
id
job {
@@ -93,26 +81,26 @@ async function OpenSearchUpdateHandler(req, res) {
}
}
`,
{billId: req.body.event.data.new.id}
);
document = {
...pick(req.body.event.data.new, [
"id",
"date",
"exported",
"exported_at",
"invoice_number",
"is_credit_memo",
"total",
]),
...bill.bills_by_pk,
bodyshopid: bill.bills_by_pk.job.shopid,
};
break;
case "payments":
//Query to get the job and RO number
const payment = await client.request(
`query ADMIN_GET_PAYMENT_BY_ID($paymentId: uuid!) {
{ billId: req.body.event.data.new.id }
);
document = {
...pick(req.body.event.data.new, [
"id",
"date",
"exported",
"exported_at",
"invoice_number",
"is_credit_memo",
"total"
]),
...bill.bills_by_pk,
bodyshopid: bill.bills_by_pk.job.shopid
};
break;
case "payments":
//Query to get the job and RO number
const payment = await client.request(
`query ADMIN_GET_PAYMENT_BY_ID($paymentId: uuid!) {
payments_by_pk(id: $paymentId) {
id
job {
@@ -133,150 +121,146 @@ async function OpenSearchUpdateHandler(req, res) {
}
}
`,
{paymentId: req.body.event.data.new.id}
);
document = {
...pick(req.body.event.data.new, [
"id",
"amount",
"created_at",
"date",
"exportedat",
"memo",
"payer",
"paymentnum",
"transactionid",
"type",
]),
...payment.payments_by_pk,
bodyshopid: payment.payments_by_pk.job.shopid,
};
break;
}
const payload = {
id: req.body.event.data.new.id,
index: req.body.table.name,
body: document,
};
{ paymentId: req.body.event.data.new.id }
);
document = {
...pick(req.body.event.data.new, [
"id",
"amount",
"created_at",
"date",
"exportedat",
"memo",
"payer",
"paymentnum",
"transactionid",
"type"
]),
...payment.payments_by_pk,
bodyshopid: payment.payments_by_pk.job.shopid
};
break;
}
const payload = {
id: req.body.event.data.new.id,
index: req.body.table.name,
body: document
};
const response = await osClient.index(payload);
console.log(response.body);
res.status(200).json(response.body);
}
} catch (error) {
res.status(400).json(JSON.stringify(error));
const response = await osClient.index(payload);
console.log(response.body);
res.status(200).json(response.body);
}
} catch (error) {
res.status(400).json(JSON.stringify(error));
}
}
async function OpenSearchSearchHandler(req, res) {
try {
const {search, bodyshopid, index} = req.body;
try {
const { search, bodyshopid, index } = req.body;
if (!req.user) {
res.sendStatus(401);
return;
}
logger.log("os-search", "DEBUG", req.user.email, null, {
search,
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const assocs = await client
.setHeaders({Authorization: BearerToken})
.request(queries.ACTIVE_SHOP_BY_USER, {
user: req.user.email,
});
if (assocs.length === 0) {
res.sendStatus(401);
}
const osClient = await getClient();
const bodyShopIdMatchOverride = isNil(process.env.BODY_SHOP_ID_MATCH_OVERRIDE) ? assocs.associations[0].shopid : process.env.BODY_SHOP_ID_MATCH_OVERRIDE
const {body} = await osClient.search({
...(index ? {index} : {}),
body: {
size: 100,
query: {
bool: {
must: [
{
match: {
bodyshopid: bodyShopIdMatchOverride,
},
},
{
bool: {
should: [
{
multi_match: {
query: search,
type: "cross_fields",
fields: ["*ownr_fn", "*ownr_ln"],
},
},
{
multi_match: {
query: search,
type: "most_fields",
fields: [
"*v_model_yr",
"*v_make_desc^2",
"*v_model_desc^3",
],
},
},
{
query_string: {
query: `*${search}*`,
// Weighted Fields
fields: [
"*ro_number^20",
"*clm_no^14",
"*v_vin^12",
"*plate_no^12",
"*ownr_ln^10",
"transactionid^10",
"paymentnum^10",
"invoice_number^10",
"*ownr_fn^8",
"*ownr_co_nm^8",
"*ownr_ph1^8",
"*ownr_ph2^8",
"*",
],
},
},
],
minimum_should_match: 1,
},
},
],
},
},
sort: [
{
_score: {
order: "desc",
},
},
],
},
});
res.json(body);
} catch (error) {
console.log(error);
logger.log("os-search-error", "ERROR", req.user.email, null, {
error: JSON.stringify(error),
});
res.status(400).json(error);
if (!req.user) {
res.sendStatus(401);
return;
}
logger.log("os-search", "DEBUG", req.user.email, null, {
search
});
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const assocs = await client.setHeaders({ Authorization: BearerToken }).request(queries.ACTIVE_SHOP_BY_USER, {
user: req.user.email
});
if (assocs.length === 0) {
res.sendStatus(401);
}
const osClient = await getClient();
const bodyShopIdMatchOverride = isNil(process.env.BODY_SHOP_ID_MATCH_OVERRIDE)
? assocs.associations[0].shopid
: process.env.BODY_SHOP_ID_MATCH_OVERRIDE;
const { body } = await osClient.search({
...(index ? { index } : {}),
body: {
size: 100,
query: {
bool: {
must: [
{
match: {
bodyshopid: bodyShopIdMatchOverride
}
},
{
bool: {
should: [
{
multi_match: {
query: search,
type: "cross_fields",
fields: ["*ownr_fn", "*ownr_ln"]
}
},
{
multi_match: {
query: search,
type: "most_fields",
fields: ["*v_model_yr", "*v_make_desc^2", "*v_model_desc^3"]
}
},
{
query_string: {
query: `*${search}*`,
// Weighted Fields
fields: [
"*ro_number^20",
"*clm_no^14",
"*v_vin^12",
"*plate_no^12",
"*ownr_ln^10",
"transactionid^10",
"paymentnum^10",
"invoice_number^10",
"*ownr_fn^8",
"*ownr_co_nm^8",
"*ownr_ph1^8",
"*ownr_ph2^8",
"*"
]
}
}
],
minimum_should_match: 1
}
}
]
}
},
sort: [
{
_score: {
order: "desc"
}
}
]
}
});
res.json(body);
} catch (error) {
console.log(error);
logger.log("os-search-error", "ERROR", req.user.email, null, {
error: JSON.stringify(error)
});
res.status(400).json(error);
}
}
exports.handler = OpenSearchUpdateHandler;

View File

@@ -1,56 +1,50 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const logger = require('../utils/logger');
const {job} = require("../scheduling/scheduling-job");
const logger = require("../utils/logger");
const { job } = require("../scheduling/scheduling-job");
const _ = require("lodash");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
exports.partsScan = async function (req, res) {
const {jobid} = req.body;
const { jobid } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("job-parts-scan", "DEBUG", req.user?.email, jobid, null);
logger.log("job-parts-scan", "DEBUG", req.user?.email, jobid, null);
try {
//Query all jobline data using the user's authorization.
const data = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_PARTS_SCAN, {
id: jobid,
});
try {
//Query all jobline data using the user's authorization.
const data = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PARTS_SCAN, {
id: jobid
});
//Create RegExps once for better performance.
const IdsToMarkCritical = [];
const RegExpressions = data.jobs_by_pk.bodyshop.md_parts_scan.map(
(r) => new RegExp(r.expression, r.flags)
);
//Create RegExps once for better performance.
const IdsToMarkCritical = [];
const RegExpressions = data.jobs_by_pk.bodyshop.md_parts_scan.map((r) => new RegExp(r.expression, r.flags));
//Check each line against each regex rule.
data.jobs_by_pk.joblines.forEach((jobline) => {
RegExpressions.forEach((rExp) => {
if (jobline.line_desc.match(rExp)) {
IdsToMarkCritical.push(jobline);
}
});
});
//Check each line against each regex rule.
data.jobs_by_pk.joblines.forEach((jobline) => {
RegExpressions.forEach((rExp) => {
if (jobline.line_desc.match(rExp)) {
IdsToMarkCritical.push(jobline);
}
});
});
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.UPDATE_PARTS_CRITICAL, {
IdsToMarkCritical: _.uniqBy(IdsToMarkCritical, "id").map((i) => i.id),
jobid: jobid,
});
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.UPDATE_PARTS_CRITICAL, {
IdsToMarkCritical: _.uniqBy(IdsToMarkCritical, "id").map((i) => i.id),
jobid: jobid
});
res.status(200).json(result);
} catch (error) {
logger.log("job-parts-scan-error", "ERROR", req.user.email, jobid, {
jobid,
error,
});
res.status(400).json(JSON.stringify(error));
}
res.status(200).json(result);
} catch (error) {
logger.log("job-parts-scan-error", "ERROR", req.user.email, jobid, {
jobid,
error
});
res.status(400).json(JSON.stringify(error));
}
};

View File

@@ -2,126 +2,106 @@ const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const {
CalculateExpectedHoursForJob,
CalculateTicketsHoursForJob,
} = require("./pay-all");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
exports.calculatelabor = async function (req, res) {
const {jobid, calculateOnly} = req.body;
logger.log("job-payroll-calculate-labor", "DEBUG", req.user.email, jobid, null);
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-calculate-labor", "DEBUG", req.user.email, jobid, null);
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
const {jobs_by_pk: job} = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_JOB_PAYROLL_DATA, {
id: jobid,
});
try {
const { jobs_by_pk: job } = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOB_PAYROLL_DATA, {
id: jobid
});
//iterate over each ticket, building a hash of team -> employee to calculate total assigned hours.
const {employeeHash, assignmentHash} = CalculateExpectedHoursForJob(job);
const ticketHash = CalculateTicketsHoursForJob(job);
//iterate over each ticket, building a hash of team -> employee to calculate total assigned hours.
const { employeeHash, assignmentHash } = CalculateExpectedHoursForJob(job);
const ticketHash = CalculateTicketsHoursForJob(job);
const totals = [];
const totals = [];
//Iteratively go through all 4 levels of the object and create an array that can be presented.
// use the employee hash as the golden record (i.e. what they should have), and add what they've claimed.
//While going through, delete items from ticket hash.
//Anything left in ticket hash is an extra entered item.
//Iteratively go through all 4 levels of the object and create an array that can be presented.
// use the employee hash as the golden record (i.e. what they should have), and add what they've claimed.
//While going through, delete items from ticket hash.
//Anything left in ticket hash is an extra entered item.
Object.keys(employeeHash).forEach((employeeIdKey) => {
//At the employee level.
Object.keys(employeeHash[employeeIdKey]).forEach((laborTypeKey) => {
//At the labor level
Object.keys(employeeHash[employeeIdKey][laborTypeKey]).forEach(
(rateKey) => {
//At the rate level.
const expectedHours =
employeeHash[employeeIdKey][laborTypeKey][rateKey];
//Will the following line fail? Probably if it doesn't exist.
const claimedHours = get(
ticketHash,
`${employeeIdKey}.${laborTypeKey}.${rateKey}`
);
if (claimedHours) {
delete ticketHash[employeeIdKey][laborTypeKey][rateKey];
}
Object.keys(employeeHash).forEach((employeeIdKey) => {
//At the employee level.
Object.keys(employeeHash[employeeIdKey]).forEach((laborTypeKey) => {
//At the labor level
Object.keys(employeeHash[employeeIdKey][laborTypeKey]).forEach((rateKey) => {
//At the rate level.
const expectedHours = employeeHash[employeeIdKey][laborTypeKey][rateKey];
//Will the following line fail? Probably if it doesn't exist.
const claimedHours = get(ticketHash, `${employeeIdKey}.${laborTypeKey}.${rateKey}`);
if (claimedHours) {
delete ticketHash[employeeIdKey][laborTypeKey][rateKey];
}
totals.push({
employeeid: employeeIdKey,
rate: rateKey,
mod_lbr_ty: laborTypeKey,
expectedHours,
claimedHours: claimedHours || 0,
});
}
);
});
totals.push({
employeeid: employeeIdKey,
rate: rateKey,
mod_lbr_ty: laborTypeKey,
expectedHours,
claimedHours: claimedHours || 0
});
});
});
});
Object.keys(ticketHash).forEach((employeeIdKey) => {
//At the employee level.
Object.keys(ticketHash[employeeIdKey]).forEach((laborTypeKey) => {
//At the labor level
Object.keys(ticketHash[employeeIdKey][laborTypeKey]).forEach(
(rateKey) => {
//At the rate level.
const expectedHours = 0;
//Will the following line fail? Probably if it doesn't exist.
const claimedHours = get(
ticketHash,
`${employeeIdKey}.${laborTypeKey}.${rateKey}`
);
if (claimedHours) {
delete ticketHash[employeeIdKey][laborTypeKey][rateKey];
}
Object.keys(ticketHash).forEach((employeeIdKey) => {
//At the employee level.
Object.keys(ticketHash[employeeIdKey]).forEach((laborTypeKey) => {
//At the labor level
Object.keys(ticketHash[employeeIdKey][laborTypeKey]).forEach((rateKey) => {
//At the rate level.
const expectedHours = 0;
//Will the following line fail? Probably if it doesn't exist.
const claimedHours = get(ticketHash, `${employeeIdKey}.${laborTypeKey}.${rateKey}`);
if (claimedHours) {
delete ticketHash[employeeIdKey][laborTypeKey][rateKey];
}
totals.push({
employeeid: employeeIdKey,
rate: rateKey,
mod_lbr_ty: laborTypeKey,
expectedHours,
claimedHours: claimedHours || 0,
});
}
);
});
totals.push({
employeeid: employeeIdKey,
rate: rateKey,
mod_lbr_ty: laborTypeKey,
expectedHours,
claimedHours: claimedHours || 0
});
});
if (assignmentHash.unassigned > 0) {
totals.push({
employeeid: undefined,
//rate: rateKey,
//mod_lbr_ty: laborTypeKey,
expectedHours: assignmentHash.unassigned,
claimedHours: 0,
});
}
res.json(totals);
//res.json(assignmentHash);
} catch (error) {
logger.log(
"job-payroll-calculate-labor-error",
"ERROR",
req.user.email,
jobid,
{
jobid: jobid,
error,
}
);
res.status(503).send();
});
});
if (assignmentHash.unassigned > 0) {
totals.push({
employeeid: undefined,
//rate: rateKey,
//mod_lbr_ty: laborTypeKey,
expectedHours: assignmentHash.unassigned,
claimedHours: 0
});
}
res.json(totals);
//res.json(assignmentHash);
} catch (error) {
logger.log("job-payroll-calculate-labor-error", "ERROR", req.user.email, jobid, {
jobid: jobid,
error
});
res.status(503).send();
}
};
get = function (obj, key) {
return key.split(".").reduce(function (o, x) {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
return key.split(".").reduce(function (o, x) {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};

View File

@@ -2,106 +2,87 @@ const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const {
CalculateExpectedHoursForJob,
CalculateTicketsHoursForJob,
} = require("./pay-all");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
const moment = require("moment");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
exports.claimtask = async function (req, res) {
const {jobid, task, calculateOnly, employee} = req.body;
logger.log("job-payroll-pay-all", "DEBUG", req.user.email, jobid, null);
const { jobid, task, calculateOnly, employee } = req.body;
logger.log("job-payroll-pay-all", "DEBUG", req.user.email, jobid, null);
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
const {jobs_by_pk: job} = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_JOB_PAYROLL_DATA, {
id: jobid,
});
try {
const { jobs_by_pk: job } = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOB_PAYROLL_DATA, {
id: jobid
});
const theTaskPreset = job.bodyshop.md_tasks_presets.presets.find(
(tp) => tp.name === task
);
if (!theTaskPreset) {
res
.status(400)
.json({success: false, error: "Provided task preset not found."});
return;
}
//Get all of the assignments that are filtered.
const {assignmentHash, employeeHash} = CalculateExpectedHoursForJob(
job,
theTaskPreset.hourstype
);
const ticketsToInsert = [];
//Then add them in based on a percentage to each employee.
Object.keys(employeeHash).forEach((employeeIdKey) => {
//At the employee level.
Object.keys(employeeHash[employeeIdKey]).forEach((laborTypeKey) => {
//At the labor level
Object.keys(employeeHash[employeeIdKey][laborTypeKey]).forEach(
(rateKey) => {
//At the rate level.
const expectedHours =
employeeHash[employeeIdKey][laborTypeKey][rateKey] *
(theTaskPreset.percent / 100);
ticketsToInsert.push({
task_name: task,
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: employeeIdKey,
productivehrs: expectedHours,
rate: rateKey,
ciecacode: laborTypeKey,
flat_rate: true,
cost_center:
job.bodyshop.md_responsibility_centers.defaults.costs[
laborTypeKey
],
memo: `*Flagged Task* ${theTaskPreset.memo}`,
});
}
);
});
});
if (!calculateOnly) {
//Insert the time ticekts if we're not just calculating them.
const insertResult = await client.request(queries.INSERT_TIME_TICKETS, {
timetickets: ticketsToInsert.filter(
(ticket) => ticket.productivehrs !== 0
),
});
const updateResult = await client.request(queries.UPDATE_JOB, {
jobId: job.id,
job: {
status: theTaskPreset.nextstatus,
completed_tasks: [
...job.completed_tasks,
{
name: task,
completedat: moment(),
completed_by: employee,
useremail: req.user.email,
},
],
},
});
}
res.json({unassignedHours: assignmentHash.unassigned, ticketsToInsert});
} catch (error) {
logger.log("job-payroll-claim-task-error", "ERROR", req.user.email, jobid, {
jobid: jobid,
error,
});
res.status(503).send();
const theTaskPreset = job.bodyshop.md_tasks_presets.presets.find((tp) => tp.name === task);
if (!theTaskPreset) {
res.status(400).json({ success: false, error: "Provided task preset not found." });
return;
}
//Get all of the assignments that are filtered.
const { assignmentHash, employeeHash } = CalculateExpectedHoursForJob(job, theTaskPreset.hourstype);
const ticketsToInsert = [];
//Then add them in based on a percentage to each employee.
Object.keys(employeeHash).forEach((employeeIdKey) => {
//At the employee level.
Object.keys(employeeHash[employeeIdKey]).forEach((laborTypeKey) => {
//At the labor level
Object.keys(employeeHash[employeeIdKey][laborTypeKey]).forEach((rateKey) => {
//At the rate level.
const expectedHours = employeeHash[employeeIdKey][laborTypeKey][rateKey] * (theTaskPreset.percent / 100);
ticketsToInsert.push({
task_name: task,
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: employeeIdKey,
productivehrs: expectedHours,
rate: rateKey,
ciecacode: laborTypeKey,
flat_rate: true,
cost_center: job.bodyshop.md_responsibility_centers.defaults.costs[laborTypeKey],
memo: `*Flagged Task* ${theTaskPreset.memo}`
});
});
});
});
if (!calculateOnly) {
//Insert the time ticekts if we're not just calculating them.
const insertResult = await client.request(queries.INSERT_TIME_TICKETS, {
timetickets: ticketsToInsert.filter((ticket) => ticket.productivehrs !== 0)
});
const updateResult = await client.request(queries.UPDATE_JOB, {
jobId: job.id,
job: {
status: theTaskPreset.nextstatus,
completed_tasks: [
...job.completed_tasks,
{
name: task,
completedat: moment(),
completed_by: employee,
useremail: req.user.email
}
]
}
});
}
res.json({ unassignedHours: assignmentHash.unassigned, ticketsToInsert });
} catch (error) {
logger.log("job-payroll-claim-task-error", "ERROR", req.user.email, jobid, {
jobid: jobid,
error
});
res.status(503).send();
}
};

View File

@@ -5,321 +5,271 @@ const _ = require("lodash");
const rdiff = require("recursive-diff");
const logger = require("../utils/logger");
const {json} = require("body-parser");
const { json } = require("body-parser");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
exports.payall = async function (req, res) {
const {jobid, calculateOnly} = req.body;
logger.log("job-payroll-pay-all", "DEBUG", req.user.email, jobid, null);
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-pay-all", "DEBUG", req.user.email, jobid, null);
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
const {jobs_by_pk: job} = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_JOB_PAYROLL_DATA, {
id: jobid,
});
try {
const { jobs_by_pk: job } = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOB_PAYROLL_DATA, {
id: jobid
});
//iterate over each ticket, building a hash of team -> employee to calculate total assigned hours.
//iterate over each ticket, building a hash of team -> employee to calculate total assigned hours.
const {employeeHash, assignmentHash} = CalculateExpectedHoursForJob(job);
const ticketHash = CalculateTicketsHoursForJob(job);
if (assignmentHash.unassigned > 0) {
res.json({success: false, error: "Not all hours have been assigned."});
return;
}
//Calculate how much time each tech should have by labor type.
//Doing this order creates a diff of changes on the ticket hash to make it the same as the employee hash.
const recursiveDiff = rdiff.getDiff(ticketHash, employeeHash, true);
const ticketsToInsert = [];
recursiveDiff.forEach((diff) => {
//Every iteration is what we would need to insert into the time ticket hash
//so that it would match the employee hash exactly.
const path = diffParser(diff);
if (diff.op === "add") {
console.log(Object.keys(diff.val));
if (typeof diff.val === "object" && Object.keys(diff.val).length > 1) {
//Multiple values to add.
Object.keys(diff.val).forEach((key) => {
console.log("Hours", diff.val[key][Object.keys(diff.val[key])[0]]);
console.log("Rate", Object.keys(diff.val[key])[0]);
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: diff.val[key][Object.keys(diff.val[key])[0]],
rate: Object.keys(diff.val[key])[0],
ciecacode: key,
cost_center:
job.bodyshop.md_responsibility_centers.defaults.costs[key],
flat_rate: true,
memo: `Add unflagged hours. (${req.user.email})`,
});
});
} else {
//Only the 1 value to add.
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: path.hours,
rate: path.rate,
ciecacode: path.mod_lbr_ty,
flat_rate: true,
cost_center:
job.bodyshop.md_responsibility_centers.defaults.costs[
path.mod_lbr_ty
],
memo: `Add unflagged hours. (${req.user.email})`,
});
}
} else if (diff.op === "update") {
//An old ticket amount isn't sufficient
//We can't modify the existing ticket, it might already be committed. So let's add a new one instead.
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: diff.val - diff.oldVal,
rate: path.rate,
ciecacode: path.mod_lbr_ty,
flat_rate: true,
cost_center:
job.bodyshop.md_responsibility_centers.defaults.costs[
path.mod_lbr_ty
],
memo: `Adjust flagged hours per assignment. (${req.user.email})`,
});
} else {
//Has to be a delete
if (
typeof diff.oldVal === "object" &&
Object.keys(diff.oldVal).length > 1
) {
//Multiple oldValues to add.
Object.keys(diff.oldVal).forEach((key) => {
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs:
diff.oldVal[key][Object.keys(diff.oldVal[key])[0]] * -1,
rate: Object.keys(diff.oldVal[key])[0],
ciecacode: key,
cost_center:
job.bodyshop.md_responsibility_centers.defaults.costs[key],
flat_rate: true,
memo: `Remove flagged hours per assignment. (${req.user.email})`,
});
});
} else {
//Only the 1 value to add.
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: path.hours * -1,
rate: path.rate,
ciecacode: path.mod_lbr_ty,
cost_center:
job.bodyshop.md_responsibility_centers.defaults.costs[
path.mod_lbr_ty
],
flat_rate: true,
memo: `Remove flagged hours per assignment. (${req.user.email})`,
});
}
}
});
const insertResult = await client.request(queries.INSERT_TIME_TICKETS, {
timetickets: ticketsToInsert.filter(
(ticket) => ticket.productivehrs !== 0
),
});
res.json(ticketsToInsert.filter((ticket) => ticket.productivehrs !== 0));
} catch (error) {
logger.log(
"job-payroll-labor-totals-error",
"ERROR",
req.user.email,
jobid,
{
jobid: jobid,
error: JSON.stringify(error),
}
);
res.status(400).json({error: error.message});
const { employeeHash, assignmentHash } = CalculateExpectedHoursForJob(job);
const ticketHash = CalculateTicketsHoursForJob(job);
if (assignmentHash.unassigned > 0) {
res.json({ success: false, error: "Not all hours have been assigned." });
return;
}
//Calculate how much time each tech should have by labor type.
//Doing this order creates a diff of changes on the ticket hash to make it the same as the employee hash.
const recursiveDiff = rdiff.getDiff(ticketHash, employeeHash, true);
const ticketsToInsert = [];
recursiveDiff.forEach((diff) => {
//Every iteration is what we would need to insert into the time ticket hash
//so that it would match the employee hash exactly.
const path = diffParser(diff);
if (diff.op === "add") {
console.log(Object.keys(diff.val));
if (typeof diff.val === "object" && Object.keys(diff.val).length > 1) {
//Multiple values to add.
Object.keys(diff.val).forEach((key) => {
console.log("Hours", diff.val[key][Object.keys(diff.val[key])[0]]);
console.log("Rate", Object.keys(diff.val[key])[0]);
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: diff.val[key][Object.keys(diff.val[key])[0]],
rate: Object.keys(diff.val[key])[0],
ciecacode: key,
cost_center: job.bodyshop.md_responsibility_centers.defaults.costs[key],
flat_rate: true,
memo: `Add unflagged hours. (${req.user.email})`
});
});
} else {
//Only the 1 value to add.
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: path.hours,
rate: path.rate,
ciecacode: path.mod_lbr_ty,
flat_rate: true,
cost_center: job.bodyshop.md_responsibility_centers.defaults.costs[path.mod_lbr_ty],
memo: `Add unflagged hours. (${req.user.email})`
});
}
} else if (diff.op === "update") {
//An old ticket amount isn't sufficient
//We can't modify the existing ticket, it might already be committed. So let's add a new one instead.
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: diff.val - diff.oldVal,
rate: path.rate,
ciecacode: path.mod_lbr_ty,
flat_rate: true,
cost_center: job.bodyshop.md_responsibility_centers.defaults.costs[path.mod_lbr_ty],
memo: `Adjust flagged hours per assignment. (${req.user.email})`
});
} else {
//Has to be a delete
if (typeof diff.oldVal === "object" && Object.keys(diff.oldVal).length > 1) {
//Multiple oldValues to add.
Object.keys(diff.oldVal).forEach((key) => {
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: diff.oldVal[key][Object.keys(diff.oldVal[key])[0]] * -1,
rate: Object.keys(diff.oldVal[key])[0],
ciecacode: key,
cost_center: job.bodyshop.md_responsibility_centers.defaults.costs[key],
flat_rate: true,
memo: `Remove flagged hours per assignment. (${req.user.email})`
});
});
} else {
//Only the 1 value to add.
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,
bodyshopid: job.bodyshop.id,
employeeid: path.employeeid,
productivehrs: path.hours * -1,
rate: path.rate,
ciecacode: path.mod_lbr_ty,
cost_center: job.bodyshop.md_responsibility_centers.defaults.costs[path.mod_lbr_ty],
flat_rate: true,
memo: `Remove flagged hours per assignment. (${req.user.email})`
});
}
}
});
const insertResult = await client.request(queries.INSERT_TIME_TICKETS, {
timetickets: ticketsToInsert.filter((ticket) => ticket.productivehrs !== 0)
});
res.json(ticketsToInsert.filter((ticket) => ticket.productivehrs !== 0));
} catch (error) {
logger.log("job-payroll-labor-totals-error", "ERROR", req.user.email, jobid, {
jobid: jobid,
error: JSON.stringify(error)
});
res.status(400).json({ error: error.message });
}
};
function diffParser(diff) {
const type = typeof diff.oldVal;
let mod_lbr_ty, rate, hours;
const type = typeof diff.oldVal;
let mod_lbr_ty, rate, hours;
if (diff.path.length === 1) {
if (diff.op === "add") {
mod_lbr_ty = Object.keys(diff.val)[0];
rate = Object.keys(diff.val[mod_lbr_ty])[0];
// hours = diff.oldVal[mod_lbr_ty][rate];
} else {
mod_lbr_ty = Object.keys(diff.oldVal)[0];
rate = Object.keys(diff.oldVal[mod_lbr_ty])[0];
// hours = diff.oldVal[mod_lbr_ty][rate];
}
} else if (diff.path.length === 2) {
mod_lbr_ty = diff.path[1];
if (diff.op === "add") {
rate = Object.keys(diff.val)[0];
} else {
rate = Object.keys(diff.oldVal)[0];
}
} else if (diff.path.length === 3) {
mod_lbr_ty = diff.path[1];
rate = diff.path[2];
//hours = 0;
}
//Set the hours
if (
typeof diff.val === "number" &&
diff.val !== null &&
diff.val !== undefined
) {
hours = diff.val;
} else if (diff.val !== null && diff.val !== undefined) {
if (diff.path.length === 1) {
hours =
diff.val[Object.keys(diff.val)[0]][
Object.keys(diff.val[Object.keys(diff.val)[0]])
];
} else {
hours = diff.val[Object.keys(diff.val)[0]];
}
} else if (
typeof diff.oldVal === "number" &&
diff.oldVal !== null &&
diff.oldVal !== undefined
) {
hours = diff.oldVal;
if (diff.path.length === 1) {
if (diff.op === "add") {
mod_lbr_ty = Object.keys(diff.val)[0];
rate = Object.keys(diff.val[mod_lbr_ty])[0];
// hours = diff.oldVal[mod_lbr_ty][rate];
} else {
hours = diff.oldVal[Object.keys(diff.oldVal)[0]];
mod_lbr_ty = Object.keys(diff.oldVal)[0];
rate = Object.keys(diff.oldVal[mod_lbr_ty])[0];
// hours = diff.oldVal[mod_lbr_ty][rate];
}
} else if (diff.path.length === 2) {
mod_lbr_ty = diff.path[1];
if (diff.op === "add") {
rate = Object.keys(diff.val)[0];
} else {
rate = Object.keys(diff.oldVal)[0];
}
} else if (diff.path.length === 3) {
mod_lbr_ty = diff.path[1];
rate = diff.path[2];
//hours = 0;
}
const ret = {
multiVal: false,
employeeid: diff.path[0], // Always True
mod_lbr_ty,
rate,
hours,
};
return ret;
//Set the hours
if (typeof diff.val === "number" && diff.val !== null && diff.val !== undefined) {
hours = diff.val;
} else if (diff.val !== null && diff.val !== undefined) {
if (diff.path.length === 1) {
hours = diff.val[Object.keys(diff.val)[0]][Object.keys(diff.val[Object.keys(diff.val)[0]])];
} else {
hours = diff.val[Object.keys(diff.val)[0]];
}
} else if (typeof diff.oldVal === "number" && diff.oldVal !== null && diff.oldVal !== undefined) {
hours = diff.oldVal;
} else {
hours = diff.oldVal[Object.keys(diff.oldVal)[0]];
}
const ret = {
multiVal: false,
employeeid: diff.path[0], // Always True
mod_lbr_ty,
rate,
hours
};
return ret;
}
function CalculateExpectedHoursForJob(job, filterToLbrTypes) {
const assignmentHash = {unassigned: 0};
const employeeHash = {}; // employeeid => Cieca labor type => rate => hours. Contains how many hours each person should be paid.
job.joblines
.filter((jobline) => {
if (!filterToLbrTypes) return true;
else {
return (
filterToLbrTypes.includes(jobline.mod_lbr_ty) ||
(jobline.convertedtolbr &&
filterToLbrTypes.includes(jobline.convertedtolbr_data.mod_lbr_ty))
);
const assignmentHash = { unassigned: 0 };
const employeeHash = {}; // employeeid => Cieca labor type => rate => hours. Contains how many hours each person should be paid.
job.joblines
.filter((jobline) => {
if (!filterToLbrTypes) return true;
else {
return (
filterToLbrTypes.includes(jobline.mod_lbr_ty) ||
(jobline.convertedtolbr && filterToLbrTypes.includes(jobline.convertedtolbr_data.mod_lbr_ty))
);
}
})
.forEach((jobline) => {
if (jobline.convertedtolbr) {
// Line has been converte to labor. Temporarily re-assign the hours.
jobline.mod_lbr_ty = jobline.convertedtolbr_data.mod_lbr_ty;
jobline.mod_lb_hrs += jobline.convertedtolbr_data.mod_lb_hrs;
}
if (jobline.mod_lb_hrs != 0) {
//Check if the line is assigned. If not, keep track of it as an unassigned line by type.
if (jobline.assigned_team === null) {
assignmentHash.unassigned = assignmentHash.unassigned + jobline.mod_lb_hrs;
} else {
//Line is assigned.
if (!assignmentHash[jobline.assigned_team]) {
assignmentHash[jobline.assigned_team] = 0;
}
assignmentHash[jobline.assigned_team] = assignmentHash[jobline.assigned_team] + jobline.mod_lb_hrs;
//Create the assignment breakdown.
const theTeam = job.bodyshop.employee_teams.find((team) => team.id === jobline.assigned_team);
theTeam.employee_team_members.forEach((tm) => {
//Figure out how many hours they are owed at this line, and at what rate.
if (!employeeHash[tm.employee.id]) {
employeeHash[tm.employee.id] = {};
}
})
.forEach((jobline) => {
if (jobline.convertedtolbr) {
// Line has been converte to labor. Temporarily re-assign the hours.
jobline.mod_lbr_ty = jobline.convertedtolbr_data.mod_lbr_ty;
jobline.mod_lb_hrs += jobline.convertedtolbr_data.mod_lb_hrs;
if (!employeeHash[tm.employee.id][jobline.mod_lbr_ty]) {
employeeHash[tm.employee.id][jobline.mod_lbr_ty] = {};
}
if (jobline.mod_lb_hrs != 0) {
//Check if the line is assigned. If not, keep track of it as an unassigned line by type.
if (jobline.assigned_team === null) {
assignmentHash.unassigned =
assignmentHash.unassigned + jobline.mod_lb_hrs;
} else {
//Line is assigned.
if (!assignmentHash[jobline.assigned_team]) {
assignmentHash[jobline.assigned_team] = 0;
}
assignmentHash[jobline.assigned_team] =
assignmentHash[jobline.assigned_team] + jobline.mod_lb_hrs;
//Create the assignment breakdown.
const theTeam = job.bodyshop.employee_teams.find(
(team) => team.id === jobline.assigned_team
);
theTeam.employee_team_members.forEach((tm) => {
//Figure out how many hours they are owed at this line, and at what rate.
if (!employeeHash[tm.employee.id]) {
employeeHash[tm.employee.id] = {};
}
if (!employeeHash[tm.employee.id][jobline.mod_lbr_ty]) {
employeeHash[tm.employee.id][jobline.mod_lbr_ty] = {};
}
if (
!employeeHash[tm.employee.id][jobline.mod_lbr_ty][
tm.labor_rates[jobline.mod_lbr_ty]
]
) {
employeeHash[tm.employee.id][jobline.mod_lbr_ty][
tm.labor_rates[jobline.mod_lbr_ty]
] = 0;
}
const hoursOwed = (tm.percentage * jobline.mod_lb_hrs) / 100;
employeeHash[tm.employee.id][jobline.mod_lbr_ty][
tm.labor_rates[jobline.mod_lbr_ty]
] =
employeeHash[tm.employee.id][jobline.mod_lbr_ty][
tm.labor_rates[jobline.mod_lbr_ty]
] + hoursOwed;
});
}
if (!employeeHash[tm.employee.id][jobline.mod_lbr_ty][tm.labor_rates[jobline.mod_lbr_ty]]) {
employeeHash[tm.employee.id][jobline.mod_lbr_ty][tm.labor_rates[jobline.mod_lbr_ty]] = 0;
}
});
return {assignmentHash, employeeHash};
const hoursOwed = (tm.percentage * jobline.mod_lb_hrs) / 100;
employeeHash[tm.employee.id][jobline.mod_lbr_ty][tm.labor_rates[jobline.mod_lbr_ty]] =
employeeHash[tm.employee.id][jobline.mod_lbr_ty][tm.labor_rates[jobline.mod_lbr_ty]] + hoursOwed;
});
}
}
});
return { assignmentHash, employeeHash };
}
function CalculateTicketsHoursForJob(job) {
const ticketHash = {}; // employeeid => Cieca labor type => rate => hours.
//Calculate how much each employee has been paid so far.
job.timetickets.forEach((ticket) => {
if (!ticketHash[ticket.employeeid]) {
ticketHash[ticket.employeeid] = {};
}
if (!ticketHash[ticket.employeeid][ticket.ciecacode]) {
ticketHash[ticket.employeeid][ticket.ciecacode] = {};
}
if (!ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate]) {
ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate] = 0;
}
ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate] =
ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate] +
ticket.productivehrs;
});
return ticketHash;
const ticketHash = {}; // employeeid => Cieca labor type => rate => hours.
//Calculate how much each employee has been paid so far.
job.timetickets.forEach((ticket) => {
if (!ticketHash[ticket.employeeid]) {
ticketHash[ticket.employeeid] = {};
}
if (!ticketHash[ticket.employeeid][ticket.ciecacode]) {
ticketHash[ticket.employeeid][ticket.ciecacode] = {};
}
if (!ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate]) {
ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate] = 0;
}
ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate] =
ticketHash[ticket.employeeid][ticket.ciecacode][ticket.rate] + ticket.productivehrs;
});
return ticketHash;
}
exports.CalculateExpectedHoursForJob = CalculateExpectedHoursForJob;

View File

@@ -1,29 +1,26 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../utils/logger");
const inlineCssTool = require("inline-css");
exports.inlinecss = (req, res) => {
//Perform request validation
//Perform request validation
logger.log("email-inline-css", "DEBUG", req.user.email, null, null);
logger.log("email-inline-css", "DEBUG", req.user.email, null, null);
const {html, url} = req.body;
const { html, url } = req.body;
inlineCssTool(html, {url: url})
.then((inlinedHtml) => {
res.send(inlinedHtml);
})
.catch((error) => {
logger.log("email-inline-css-error", "ERROR", req.user.email, null, {
error,
});
inlineCssTool(html, { url: url })
.then((inlinedHtml) => {
res.send(inlinedHtml);
})
.catch((error) => {
logger.log("email-inline-css-error", "ERROR", req.user.email, null, {
error
});
res.send(error);
});
res.send(error);
});
};

View File

@@ -1,13 +1,13 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {payments, payables, receivables} = require("../accounting/qbxml/qbxml");
const { payments, payables, receivables } = require("../accounting/qbxml/qbxml");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/qbxml/receivables', withUserGraphQLClientMiddleware, receivables);
router.post('/qbxml/payables', withUserGraphQLClientMiddleware, payables);
router.post('/qbxml/payments', withUserGraphQLClientMiddleware, payments);
router.post("/qbxml/receivables", withUserGraphQLClientMiddleware, receivables);
router.post("/qbxml/payables", withUserGraphQLClientMiddleware, payables);
router.post("/qbxml/payments", withUserGraphQLClientMiddleware, payments);
module.exports = router;

View File

@@ -1,18 +1,18 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const fb = require('../firebase/firebase-handler');
const fb = require("../firebase/firebase-handler");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {createAssociation, createShop, updateShop, updateCounter} = require("../admin/adminops");
const { createAssociation, createShop, updateShop, updateCounter } = require("../admin/adminops");
const validateAdminMiddleware = require("../middleware/validateAdminMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/createassociation', validateAdminMiddleware, createAssociation);
router.post('/createshop', validateAdminMiddleware, createShop);
router.post('/updateshop', validateAdminMiddleware, updateShop);
router.post('/updatecounter', validateAdminMiddleware, updateCounter);
router.post('/updateuser', fb.updateUser);
router.post('/getuser', fb.getUser);
router.post('/createuser', fb.createUser);
router.post("/createassociation", validateAdminMiddleware, createAssociation);
router.post("/createshop", validateAdminMiddleware, createShop);
router.post("/updateshop", validateAdminMiddleware, updateShop);
router.post("/updatecounter", validateAdminMiddleware, updateCounter);
router.post("/updateuser", fb.updateUser);
router.post("/getuser", fb.getUser);
router.post("/createuser", fb.createUser);
module.exports = router;

View File

@@ -1,11 +1,11 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const cdkGetMake = require('../cdk/cdk-get-makes');
const cdkGetMake = require("../cdk/cdk-get-makes");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/getvehicles', withUserGraphQLClientMiddleware, cdkGetMake.default);
router.post("/getvehicles", withUserGraphQLClientMiddleware, cdkGetMake.default);
module.exports = router;

View File

@@ -1,6 +1,6 @@
const express = require("express");
const router = express.Router();
const {lookup, submit} = require("../csi/csi");
const { lookup, submit } = require("../csi/csi");
router.post("/lookup", lookup);
router.post("/submit", submit);

View File

@@ -1,9 +1,9 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {autohouse, claimscorp, kaizen} = require('../data/data');
const { autohouse, claimscorp, kaizen } = require("../data/data");
router.post('/ah', autohouse);
router.post('/cc', claimscorp);
router.post('/kaizen', kaizen);
router.post("/ah", autohouse);
router.post("/cc", claimscorp);
router.post("/kaizen", kaizen);
module.exports = router;

View File

@@ -1,11 +1,11 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {lightbox_credentials, payment_refund, generate_payment_url, postback} = require("../intellipay/intellipay");
const { lightbox_credentials, payment_refund, generate_payment_url, postback } = require("../intellipay/intellipay");
router.post('/lightbox_credentials', validateFirebaseIdTokenMiddleware, lightbox_credentials);
router.post('/payment_refund', validateFirebaseIdTokenMiddleware, payment_refund);
router.post('/generate_payment_url', validateFirebaseIdTokenMiddleware, generate_payment_url);
router.post('/postback', postback);
router.post("/lightbox_credentials", validateFirebaseIdTokenMiddleware, lightbox_credentials);
router.post("/payment_refund", validateFirebaseIdTokenMiddleware, payment_refund);
router.post("/generate_payment_url", validateFirebaseIdTokenMiddleware, generate_payment_url);
router.post("/postback", postback);
module.exports = router;

View File

@@ -1,20 +1,20 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const job = require('../job/job');
const ppc = require('../ccc/partspricechange')
const {partsScan} = require('../parts-scan/parts-scan');
const eventAuthorizationMiddleware = require('../middleware/eventAuthorizationMIddleware');
const job = require("../job/job");
const ppc = require("../ccc/partspricechange");
const { partsScan } = require("../parts-scan/parts-scan");
const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMIddleware");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {totals, statustransition, totalsSsu, costing, lifecycle, costingmulti} = require("../job/job");
const { totals, statustransition, totalsSsu, costing, lifecycle, costingmulti } = require("../job/job");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.post('/totals', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totals);
router.post('/statustransition', eventAuthorizationMiddleware, statustransition);
router.post('/totalsssu', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totalsSsu);
router.post('/costing', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, costing);
router.post('/lifecycle', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, lifecycle);
router.post('/costingmulti', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, costingmulti);
router.post('/partsscan', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, partsScan);
router.post('/ppc', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, ppc.generatePpc);
router.post("/totals", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totals);
router.post("/statustransition", eventAuthorizationMiddleware, statustransition);
router.post("/totalsssu", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totalsSsu);
router.post("/costing", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, costing);
router.post("/lifecycle", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, lifecycle);
router.post("/costingmulti", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, costingmulti);
router.post("/partsscan", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, partsScan);
router.post("/ppc", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, ppc.generatePpc);
module.exports = router;

View File

@@ -1,13 +1,13 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {createSignedUploadURL, downloadFiles, renameKeys, deleteFiles} = require('../media/media');
const { createSignedUploadURL, downloadFiles, renameKeys, deleteFiles } = require("../media/media");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.post('/sign', createSignedUploadURL);
router.post('/download', downloadFiles);
router.post('/rename', renameKeys);
router.post('/delete', deleteFiles);
router.post("/sign", createSignedUploadURL);
router.post("/download", downloadFiles);
router.post("/rename", renameKeys);
router.post("/delete", deleteFiles);
module.exports = router;

View File

@@ -1,4 +1,4 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const logger = require("../../server/utils/logger");
const sendEmail = require("../email/sendemail");
@@ -13,39 +13,35 @@ const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLCl
//Test route to ensure Express is responding.
router.get("/test", async function (req, res) {
const commit = require("child_process").execSync(
"git rev-parse --short HEAD"
);
// console.log(app.get('trust proxy'));
// console.log("remoteAddress", req.socket.remoteAddress);
// console.log("X-Forwarded-For", req.header('x-forwarded-for'));
logger.log("test-api-status", "DEBUG", "api", {commit});
// sendEmail.sendServerEmail({
// subject: `API Check - ${process.env.NODE_ENV}`,
// text: `Server API check has come in. Remote IP: ${req.socket.remoteAddress}, X-Forwarded-For: ${req.header('x-forwarded-for')}`,
// });
sendEmail.sendServerEmail({
subject: `API Check - ${process.env.NODE_ENV}`,
text: `Server API check has come in.`,
});
res.status(200).send(`OK - ${commit}`);
const commit = require("child_process").execSync("git rev-parse --short HEAD");
// console.log(app.get('trust proxy'));
// console.log("remoteAddress", req.socket.remoteAddress);
// console.log("X-Forwarded-For", req.header('x-forwarded-for'));
logger.log("test-api-status", "DEBUG", "api", { commit });
// sendEmail.sendServerEmail({
// subject: `API Check - ${process.env.NODE_ENV}`,
// text: `Server API check has come in. Remote IP: ${req.socket.remoteAddress}, X-Forwarded-For: ${req.header('x-forwarded-for')}`,
// });
sendEmail.sendServerEmail({
subject: `API Check - ${process.env.NODE_ENV}`,
text: `Server API check has come in.`
});
res.status(200).send(`OK - ${commit}`);
});
// Search
router.post("/search", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, os.search);
router.post("/opensearch", eventAuthorizationMiddleware, os.handler);
// IO Events
router.post('/ioevent', ioevent.default);
router.post("/ioevent", ioevent.default);
// Email
router.post('/sendemail', validateFirebaseIdTokenMiddleware, sendEmail.sendEmail);
router.post('/emailbounce', bodyParser.text(), sendEmail.emailBounce);
router.post("/sendemail", validateFirebaseIdTokenMiddleware, sendEmail.sendEmail);
router.post("/emailbounce", bodyParser.text(), sendEmail.emailBounce);
// Handlers
router.post('/record-handler/arms', data.arms);
router.post("/record-handler/arms", data.arms);
router.post("/taskHandler", validateFirebaseIdTokenMiddleware, taskHandler.taskHandler);
module.exports = router;

View File

@@ -1,11 +1,11 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const multer = require('multer');
const multer = require("multer");
const upload = multer();
const {mixdataUpload} = require('../mixdata/mixdata');
const { mixdataUpload } = require("../mixdata/mixdata");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.post('/upload', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, upload.any(), mixdataUpload);
router.post("/upload", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, upload.any(), mixdataUpload);
module.exports = router;

View File

@@ -1,11 +1,11 @@
const express = require('express');
const express = require("express");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const {subscribe, unsubscribe} = require("../firebase/firebase-handler");
const { subscribe, unsubscribe } = require("../firebase/firebase-handler");
const router = express.Router();
router.use(validateFirebaseIdTokenMiddleware);
router.post('/subscribe', subscribe);
router.post('/unsubscribe', unsubscribe);
router.post("/subscribe", subscribe);
router.post("/unsubscribe", unsubscribe);
module.exports = router;

View File

@@ -1,6 +1,6 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const payroll = require('../payroll/payroll');
const payroll = require("../payroll/payroll");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
@@ -12,4 +12,3 @@ router.post("/payall", payroll.payall);
router.post("/claimtask", payroll.claimtask);
module.exports = router;

View File

@@ -1,14 +1,14 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {authorize, callback, receivables, payables, payments} = require('../accounting/qbo/qbo');
const { authorize, callback, receivables, payables, payments } = require("../accounting/qbo/qbo");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware"); // Assuming you have a qbo module for handling QuickBooks Online related functionalities
// Define the routes for QuickBooks Online
router.post('/authorize', validateFirebaseIdTokenMiddleware, authorize);
router.get('/callback', callback);
router.post('/receivables', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, receivables);
router.post('/payables', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, payables);
router.post('/payments', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, payments);
router.post("/authorize", validateFirebaseIdTokenMiddleware, authorize);
router.get("/callback", callback);
router.post("/receivables", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, receivables);
router.post("/payables", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, payables);
router.post("/payments", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, payments);
module.exports = router;

View File

@@ -1,9 +1,9 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {inlinecss} = require('../render/inlinecss');
const { inlinecss } = require("../render/inlinecss");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Define the route for inline CSS rendering
router.post('/inlinecss', validateFirebaseIdTokenMiddleware, inlinecss);
router.post("/inlinecss", validateFirebaseIdTokenMiddleware, inlinecss);
module.exports = router;

View File

@@ -1,9 +1,9 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {job} = require('../scheduling/scheduling-job');
const { job } = require("../scheduling/scheduling-job");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.post('/job', validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, job);
router.post("/job", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, job);
module.exports = router;

View File

@@ -1,17 +1,17 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const twilio = require('twilio');
const {receive} = require('../sms/receive');
const {send} = require('../sms/send');
const {status, markConversationRead} = require('../sms/status');
const twilio = require("twilio");
const { receive } = require("../sms/receive");
const { send } = require("../sms/send");
const { status, markConversationRead } = require("../sms/status");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
// Twilio Webhook Middleware for production
const twilioWebhookMiddleware = twilio.webhook({validate: process.env.NODE_ENV === "PRODUCTION"});
const twilioWebhookMiddleware = twilio.webhook({ validate: process.env.NODE_ENV === "PRODUCTION" });
router.post('/receive', twilioWebhookMiddleware, receive);
router.post('/send', validateFirebaseIdTokenMiddleware, send);
router.post('/status', twilioWebhookMiddleware, status);
router.post('/markConversationRead', validateFirebaseIdTokenMiddleware, markConversationRead);
router.post("/receive", twilioWebhookMiddleware, receive);
router.post("/send", validateFirebaseIdTokenMiddleware, send);
router.post("/status", twilioWebhookMiddleware, status);
router.post("/markConversationRead", validateFirebaseIdTokenMiddleware, markConversationRead);
module.exports = router;

View File

@@ -1,8 +1,8 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {techLogin} = require('../tech/tech');
const { techLogin } = require("../tech/tech");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
router.post('/login', validateFirebaseIdTokenMiddleware, techLogin);
router.post("/login", validateFirebaseIdTokenMiddleware, techLogin);
module.exports = router;

View File

@@ -1,9 +1,9 @@
const express = require('express');
const express = require("express");
const router = express.Router();
const {servertime, jsrAuth} = require('../utils/utils');
const { servertime, jsrAuth } = require("../utils/utils");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
router.post('/time', servertime);
router.post('/jsr', validateFirebaseIdTokenMiddleware, jsrAuth);
router.post("/time", servertime);
router.post("/jsr", validateFirebaseIdTokenMiddleware, jsrAuth);
module.exports = router;

View File

@@ -4,311 +4,271 @@ const Dinero = require("dinero.js");
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const _ = require("lodash");
const {filter} = require("lodash");
const { filter } = require("lodash");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.job = async (req, res) => {
const {jobId} = req.body;
const { jobId } = req.body;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
try {
logger.log("smart-scheduling-start", "DEBUG", req.user.email, jobId, null);
try {
logger.log("smart-scheduling-start", "DEBUG", req.user.email, jobId, null);
const result = await client
.setHeaders({Authorization: BearerToken})
.request(queries.QUERY_UPCOMING_APPOINTMENTS, {
now: moment().startOf("day"),
jobId: jobId,
});
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_UPCOMING_APPOINTMENTS, {
now: moment().startOf("day"),
jobId: jobId
});
const {jobs_by_pk, blockedDays, prodJobs, arrJobs, compJobs} = result;
const {ssbuckets, workingdays, timezone, ss_configuration} =
result.jobs_by_pk.bodyshop;
const jobHrs = result.jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const { jobs_by_pk, blockedDays, prodJobs, arrJobs, compJobs } = result;
const { ssbuckets, workingdays, timezone, ss_configuration } = result.jobs_by_pk.bodyshop;
const jobHrs = result.jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const JobBucket = ssbuckets.filter(
(bucket) =>
bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
)[0];
const load = {
productionTotal: {},
productionHours: 0,
};
//Set the current load.
ssbuckets.forEach((bucket) => {
load.productionTotal[bucket.id] = {count: 0, label: bucket.label};
});
const JobBucket = ssbuckets.filter(
(bucket) => bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
)[0];
const load = {
productionTotal: {},
productionHours: 0
};
//Set the current load.
ssbuckets.forEach((bucket) => {
load.productionTotal[bucket.id] = { count: 0, label: bucket.label };
});
const filteredProdJobsList = prodJobs.filter(
(j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
);
const filteredProdJobsList = prodJobs.filter((j) => JobBucket.id === CheckJobBucket(ssbuckets, j));
filteredProdJobsList.forEach((item) => {
//Add all of the jobs currently in production to the buckets so that we have a starting point.
const bucketId = CheckJobBucket(ssbuckets, item);
if (bucketId) {
load.productionTotal[bucketId].count =
load.productionTotal[bucketId].count + 1;
} else {
console.log("Uh oh, this job doesn't fit in a bucket!", item);
}
});
filteredProdJobsList.forEach((item) => {
//Add all of the jobs currently in production to the buckets so that we have a starting point.
const bucketId = CheckJobBucket(ssbuckets, item);
if (bucketId) {
load.productionTotal[bucketId].count = load.productionTotal[bucketId].count + 1;
} else {
console.log("Uh oh, this job doesn't fit in a bucket!", item);
}
});
// const filteredArrJobs = arrJobs.filter(
// (j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
// );
const filteredArrJobs = [];
// const filteredArrJobs = arrJobs.filter(
// (j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
// );
const filteredArrJobs = [];
arrJobs.forEach((item) => {
let isSameBucket = false;
if (JobBucket.id === CheckJobBucket(ssbuckets, item)) {
filteredArrJobs.push(item);
isSameBucket = true;
}
arrJobs.forEach((item) => {
let isSameBucket = false;
if (JobBucket.id === CheckJobBucket(ssbuckets, item)) {
filteredArrJobs.push(item);
isSameBucket = true;
}
let jobHours =
item.labhrs.aggregate.sum.mod_lb_hrs +
item.larhrs.aggregate.sum.mod_lb_hrs;
let jobHours = item.labhrs.aggregate.sum.mod_lb_hrs + item.larhrs.aggregate.sum.mod_lb_hrs;
const AddJobForSchedulingCalc = !item.inproduction;
const AddJobForSchedulingCalc = !item.inproduction;
const itemDate = moment(item.actual_in || item.scheduled_in)
.tz(timezone)
.format("yyyy-MM-DD");
if (isSameBucket) {
if (!!load[itemDate]) {
load[itemDate].hoursIn =
(load[itemDate].hoursIn || 0) + AddJobForSchedulingCalc
? jobHours
: 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsIn.push(item);
} else {
load[itemDate] = {
jobsIn: AddJobForSchedulingCalc ? [item] : [],
jobsOut: [],
hoursIn: AddJobForSchedulingCalc ? jobHours : 0,
};
}
}
if (!load[itemDate]) {
load[itemDate] = {
jobsIn: [],
jobsOut: [],
hoursIn: 0,
hoursInTotal: 0,
};
}
load[itemDate].hoursInTotal =
(load[itemDate].hoursInTotal || 0) + jobHours;
});
//Get the completing jobs.
let problemJobs = [];
const filteredCompJobs = compJobs.filter(
(j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
);
filteredCompJobs.forEach((item) => {
const inProdJobs = filteredProdJobsList.find((p) => p.id === item.id);
const inArrJobs = filteredArrJobs.find((p) => p.id === item.id);
const AddJobForSchedulingCalc = inProdJobs || inArrJobs;
const itemDate = moment(
item.actual_completion || item.scheduled_completion
)
.tz(timezone)
.format("yyyy-MM-DD");
if (!!load[itemDate]) {
load[itemDate].hoursOut =
(load[itemDate].hoursOut || 0) + AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs +
item.larhrs.aggregate.sum.mod_lb_hrs
: 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsOut.push(item);
} else {
load[itemDate] = {
jobsOut: AddJobForSchedulingCalc ? [item] : [],
hoursOut: AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs +
item.larhrs.aggregate.sum.mod_lb_hrs
: 0,
};
}
});
//Propagate the expected load to each day.
const yesterday = moment().tz(timezone).subtract(1, "day");
const today = moment().tz(timezone);
const end = moment.max([
...filteredArrJobs.map((a) => moment(a.scheduled_in).tz(timezone)),
...filteredCompJobs
.map((p) =>
moment(p.actual_completion || p.scheduled_completion).tz(timezone)
)
.filter((p) => p.isValid() && p.isAfter(yesterday)),
moment().tz(timezone).add(15, "days"),
]);
const range = Math.round(
moment.duration(end.add(20, "days").diff(today)).asDays()
);
for (var day = 0; day < range; day++) {
const current = moment(today)
.tz(timezone)
.add(day, "days")
.format("yyyy-MM-DD");
const prev = moment(today)
.tz(timezone)
.add(day - 1, "days")
.format("yyyy-MM-DD");
if (!!!load[current]) {
load[current] = {};
}
if (day === 0) {
//Starting on day 1. The load is current.
load[current].expectedLoad = CalculateLoad(
load.productionTotal,
ssbuckets,
load[current].jobsIn || [],
load[current].jobsOut || []
);
} else {
load[current].expectedLoad = CalculateLoad(
load[prev].expectedLoad,
ssbuckets,
load[current].jobsIn || [],
load[current].jobsOut || []
);
}
}
//Add in all of the blocked days.
blockedDays.forEach((b) => {
//Find it in the load, set it as blocked.
const startIsoFormat = moment(b.start).tz(timezone).format("YYYY-MM-DD");
if (load[startIsoFormat]) load[startIsoFormat].blocked = true;
else {
load[startIsoFormat] = {blocked: true};
}
});
// //Propose the first 10 dates where we are below target.
const possibleDates = [];
delete load.productionTotal;
const loadKeys = Object.keys(load).sort((a, b) =>
moment(a).isAfter(moment(b)) ? 1 : -1
);
loadKeys.forEach((loadKey) => {
const isShopOpen =
(workingdays[dayOfWeekMapper(moment(loadKey).day())] || false) &&
!load[loadKey].blocked;
let isUnderDailyTotalLimit = true;
if (
ss_configuration &&
ss_configuration.dailyhrslimit &&
ss_configuration.dailyhrslimit > 0 &&
load[loadKey] &&
load[loadKey].hoursInTotal &&
load[loadKey].hoursInTotal > ss_configuration.dailyhrslimit
) {
isUnderDailyTotalLimit = false;
}
if (
load[loadKey].expectedLoad &&
load[loadKey].expectedLoad[JobBucket.id] &&
JobBucket.target > load[loadKey].expectedLoad[JobBucket.id].count &&
isShopOpen &&
isUnderDailyTotalLimit
)
possibleDates.push(new Date(loadKey).toISOString().substr(0, 10));
});
if (possibleDates.length < 11) {
res.json(possibleDates);
const itemDate = moment(item.actual_in || item.scheduled_in)
.tz(timezone)
.format("yyyy-MM-DD");
if (isSameBucket) {
if (!!load[itemDate]) {
load[itemDate].hoursIn = (load[itemDate].hoursIn || 0) + AddJobForSchedulingCalc ? jobHours : 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsIn.push(item);
} else {
res.json(possibleDates.slice(0, 10));
load[itemDate] = {
jobsIn: AddJobForSchedulingCalc ? [item] : [],
jobsOut: [],
hoursIn: AddJobForSchedulingCalc ? jobHours : 0
};
}
} catch (error) {
logger.log("smart-scheduling-error", "ERROR", req.user.email, jobId, {
error,
});
res.status(400).send(error);
}
if (!load[itemDate]) {
load[itemDate] = {
jobsIn: [],
jobsOut: [],
hoursIn: 0,
hoursInTotal: 0
};
}
load[itemDate].hoursInTotal = (load[itemDate].hoursInTotal || 0) + jobHours;
});
//Get the completing jobs.
let problemJobs = [];
const filteredCompJobs = compJobs.filter((j) => JobBucket.id === CheckJobBucket(ssbuckets, j));
filteredCompJobs.forEach((item) => {
const inProdJobs = filteredProdJobsList.find((p) => p.id === item.id);
const inArrJobs = filteredArrJobs.find((p) => p.id === item.id);
const AddJobForSchedulingCalc = inProdJobs || inArrJobs;
const itemDate = moment(item.actual_completion || item.scheduled_completion)
.tz(timezone)
.format("yyyy-MM-DD");
if (!!load[itemDate]) {
load[itemDate].hoursOut =
(load[itemDate].hoursOut || 0) + AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs + item.larhrs.aggregate.sum.mod_lb_hrs
: 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsOut.push(item);
} else {
load[itemDate] = {
jobsOut: AddJobForSchedulingCalc ? [item] : [],
hoursOut: AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs + item.larhrs.aggregate.sum.mod_lb_hrs
: 0
};
}
});
//Propagate the expected load to each day.
const yesterday = moment().tz(timezone).subtract(1, "day");
const today = moment().tz(timezone);
const end = moment.max([
...filteredArrJobs.map((a) => moment(a.scheduled_in).tz(timezone)),
...filteredCompJobs
.map((p) => moment(p.actual_completion || p.scheduled_completion).tz(timezone))
.filter((p) => p.isValid() && p.isAfter(yesterday)),
moment().tz(timezone).add(15, "days")
]);
const range = Math.round(moment.duration(end.add(20, "days").diff(today)).asDays());
for (var day = 0; day < range; day++) {
const current = moment(today).tz(timezone).add(day, "days").format("yyyy-MM-DD");
const prev = moment(today)
.tz(timezone)
.add(day - 1, "days")
.format("yyyy-MM-DD");
if (!!!load[current]) {
load[current] = {};
}
if (day === 0) {
//Starting on day 1. The load is current.
load[current].expectedLoad = CalculateLoad(
load.productionTotal,
ssbuckets,
load[current].jobsIn || [],
load[current].jobsOut || []
);
} else {
load[current].expectedLoad = CalculateLoad(
load[prev].expectedLoad,
ssbuckets,
load[current].jobsIn || [],
load[current].jobsOut || []
);
}
}
//Add in all of the blocked days.
blockedDays.forEach((b) => {
//Find it in the load, set it as blocked.
const startIsoFormat = moment(b.start).tz(timezone).format("YYYY-MM-DD");
if (load[startIsoFormat]) load[startIsoFormat].blocked = true;
else {
load[startIsoFormat] = { blocked: true };
}
});
// //Propose the first 10 dates where we are below target.
const possibleDates = [];
delete load.productionTotal;
const loadKeys = Object.keys(load).sort((a, b) => (moment(a).isAfter(moment(b)) ? 1 : -1));
loadKeys.forEach((loadKey) => {
const isShopOpen = (workingdays[dayOfWeekMapper(moment(loadKey).day())] || false) && !load[loadKey].blocked;
let isUnderDailyTotalLimit = true;
if (
ss_configuration &&
ss_configuration.dailyhrslimit &&
ss_configuration.dailyhrslimit > 0 &&
load[loadKey] &&
load[loadKey].hoursInTotal &&
load[loadKey].hoursInTotal > ss_configuration.dailyhrslimit
) {
isUnderDailyTotalLimit = false;
}
if (
load[loadKey].expectedLoad &&
load[loadKey].expectedLoad[JobBucket.id] &&
JobBucket.target > load[loadKey].expectedLoad[JobBucket.id].count &&
isShopOpen &&
isUnderDailyTotalLimit
)
possibleDates.push(new Date(loadKey).toISOString().substr(0, 10));
});
if (possibleDates.length < 11) {
res.json(possibleDates);
} else {
res.json(possibleDates.slice(0, 10));
}
} catch (error) {
logger.log("smart-scheduling-error", "ERROR", req.user.email, jobId, {
error
});
res.status(400).send(error);
}
};
const dayOfWeekMapper = (numberOfDay) => {
switch (numberOfDay) {
case 0:
return "sunday";
case 1:
return "monday";
case 2:
return "tuesday";
case 3:
return "wednesday";
case 4:
return "thursday";
case 5:
return "friday";
case 6:
return "saturday";
}
switch (numberOfDay) {
case 0:
return "sunday";
case 1:
return "monday";
case 2:
return "tuesday";
case 3:
return "wednesday";
case 4:
return "thursday";
case 5:
return "friday";
case 6:
return "saturday";
}
};
const CheckJobBucket = (buckets, job) => {
const jobHours =
job.labhrs.aggregate.sum.mod_lb_hrs + job.larhrs.aggregate.sum.mod_lb_hrs;
const jobHours = job.labhrs.aggregate.sum.mod_lb_hrs + job.larhrs.aggregate.sum.mod_lb_hrs;
const matchingBucket = buckets.filter((b) =>
b.gte <= jobHours && b.lt ? b.lt > jobHours : true
);
const matchingBucket = buckets.filter((b) => (b.gte <= jobHours && b.lt ? b.lt > jobHours : true));
return matchingBucket[0] && matchingBucket[0].id;
return matchingBucket[0] && matchingBucket[0].id;
};
const CalculateLoad = (currentLoad, buckets, jobsIn, jobsOut) => {
//Add the jobs coming
const newLoad = _.cloneDeep(currentLoad);
jobsIn.forEach((job) => {
const bucketId = CheckJobBucket(buckets, job);
if (bucketId) {
newLoad[bucketId].count = newLoad[bucketId].count + 1;
} else {
console.log(
"[Util Arr Job]Uh oh, this job doesn't fit in a bucket!",
job
);
}
});
//Add the jobs coming
const newLoad = _.cloneDeep(currentLoad);
jobsIn.forEach((job) => {
const bucketId = CheckJobBucket(buckets, job);
if (bucketId) {
newLoad[bucketId].count = newLoad[bucketId].count + 1;
} else {
console.log("[Util Arr Job]Uh oh, this job doesn't fit in a bucket!", job);
}
});
jobsOut.forEach((job) => {
const bucketId = CheckJobBucket(buckets, job);
if (bucketId) {
newLoad[bucketId].count = newLoad[bucketId].count - 1;
if (newLoad[bucketId].count < 0) {
console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job);
}
} else {
console.log(
"[Util Out Job]Uh oh, this job doesn't fit in a bucket!",
job
);
}
});
jobsOut.forEach((job) => {
const bucketId = CheckJobBucket(buckets, job);
if (bucketId) {
newLoad[bucketId].count = newLoad[bucketId].count - 1;
if (newLoad[bucketId].count < 0) {
console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job);
}
} else {
console.log("[Util Out Job]Uh oh, this job doesn't fit in a bucket!", job);
}
});
return newLoad;
return newLoad;
};

View File

@@ -1,146 +1,132 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const {phone} = require("phone");
const {admin} = require("../firebase/firebase-handler");
const { phone } = require("phone");
const { admin } = require("../firebase/firebase-handler");
const logger = require("../utils/logger");
const InstanceManager = require("../utils/instanceMgr").default;
exports.receive = async (req, res) => {
//Perform request validation
//Perform request validation
logger.log("sms-inbound", "DEBUG", "api", null, {
logger.log("sms-inbound", "DEBUG", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body)
});
if (!!!req.body || !!!req.body.MessagingServiceSid || !!!req.body.SmsMessageSid) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
type: "malformed-request"
});
res.status(400);
res.json({ success: false, error: "Malformed Request" });
} else {
try {
const response = await client.request(queries.FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
mssid: req.body.MessagingServiceSid,
phone: phone(req.body.From).phoneNumber
});
let newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
});
if (
!!!req.body ||
!!!req.body.MessagingServiceSid ||
!!!req.body.SmsMessageSid
) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
image_path: generateMediaArray(req.body)
};
if (response.bodyshops[0]) {
//Found a bodyshop - should always happen.
if (response.bodyshops[0].conversations.length === 0) {
//No conversation Found, create one.
console.log("[SMS Receive] No conversation found. Creating one.");
newMessage.conversation = {
data: {
bodyshopid: response.bodyshops[0].id,
phone_num: phone(req.body.From).phoneNumber
}
};
} else if (response.bodyshops[0].conversations.length === 1) {
//Just add it to the conversation
console.log("[SMS Receive] Conversation found. Added ID.");
newMessage.conversationid = response.bodyshops[0].conversations[0].id;
} else {
//We should never get here.
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
type: "malformed-request",
});
res.status(400);
res.json({success: false, error: "Malformed Request"});
} else {
try {
const response = await client.request(
queries.FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID,
{
mssid: req.body.MessagingServiceSid,
phone: phone(req.body.From).phoneNumber,
}
);
let newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
};
if (response.bodyshops[0]) {
//Found a bodyshop - should always happen.
if (response.bodyshops[0].conversations.length === 0) {
//No conversation Found, create one.
console.log("[SMS Receive] No conversation found. Creating one.");
newMessage.conversation = {
data: {
bodyshopid: response.bodyshops[0].id,
phone_num: phone(req.body.From).phoneNumber,
},
};
} else if (response.bodyshops[0].conversations.length === 1) {
//Just add it to the conversation
console.log("[SMS Receive] Conversation found. Added ID.");
newMessage.conversationid = response.bodyshops[0].conversations[0].id;
} else {
//We should never get here.
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
type: "duplicate-phone",
});
}
try {
let insertresp;
if (response.bodyshops[0].conversations[0]) {
insertresp = await client.request(queries.INSERT_MESSAGE, {
msg: newMessage,
conversationid:
response.bodyshops[0].conversations[0] &&
response.bodyshops[0].conversations[0].id,
});
} else {
insertresp = await client.request(queries.RECEIVE_MESSAGE, {
msg: newMessage,
});
}
const message = insertresp.insert_messages.returning[0];
const data = {
type: "messaging-inbound",
conversationid: message.conversationid || "",
text: message.text || "",
messageid: message.id || "",
phone_num: message.conversation.phone_num || "",
};
const fcmresp = await admin.messaging().send({
topic: `${message.conversation.bodyshop.imexshopid}-messaging`,
notification: {
title:
InstanceManager({
imex:`ImEX Online Message - ${data.phone_num}` ,
rome: `Rome Online Message - ${data.phone_num}`,
promanager: `ProManager Message - ${data.phone_num}`
})
,
body: message.image_path ? `Image ${message.text}` : message.text,
//imageUrl: "https://thinkimex.com/img/io-fcm.png", //TODO:AIO Resolve addresses for other instances
},
data,
});
logger.log("sms-inbound-success", "DEBUG", "api", null, {
newMessage,
fcmresp,
});
res.status(200).send("");
} catch (e2) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
error: e2,
});
res.sendStatus(500).json(e2);
}
}
} catch (e1) {
console.log("e1", e1);
res.sendStatus(500).json(e1);
messagingServiceSid: req.body.MessagingServiceSid,
type: "duplicate-phone"
});
}
try {
let insertresp;
if (response.bodyshops[0].conversations[0]) {
insertresp = await client.request(queries.INSERT_MESSAGE, {
msg: newMessage,
conversationid: response.bodyshops[0].conversations[0] && response.bodyshops[0].conversations[0].id
});
} else {
insertresp = await client.request(queries.RECEIVE_MESSAGE, {
msg: newMessage
});
}
const message = insertresp.insert_messages.returning[0];
const data = {
type: "messaging-inbound",
conversationid: message.conversationid || "",
text: message.text || "",
messageid: message.id || "",
phone_num: message.conversation.phone_num || ""
};
const fcmresp = await admin.messaging().send({
topic: `${message.conversation.bodyshop.imexshopid}-messaging`,
notification: {
title: InstanceManager({
imex: `ImEX Online Message - ${data.phone_num}`,
rome: `Rome Online Message - ${data.phone_num}`,
promanager: `ProManager Message - ${data.phone_num}`
}),
body: message.image_path ? `Image ${message.text}` : message.text
//imageUrl: "https://thinkimex.com/img/io-fcm.png", //TODO:AIO Resolve addresses for other instances
},
data
});
logger.log("sms-inbound-success", "DEBUG", "api", null, {
newMessage,
fcmresp
});
res.status(200).send("");
} catch (e2) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
error: e2
});
res.sendStatus(500).json(e2);
}
}
} catch (e1) {
console.log("e1", e1);
res.sendStatus(500).json(e1);
}
}
};
// const sampleMessage: {
@@ -194,15 +180,15 @@ exports.receive = async (req, res) => {
// MediaContentType0: 'video/3gpp',
const generateMediaArray = (body) => {
const {NumMedia} = body;
if (parseInt(NumMedia) > 0) {
//stuff
const ret = [];
for (var i = 0; i < parseInt(NumMedia); i++) {
ret.push(body[`MediaUrl${i}`]);
}
return ret;
} else {
return null;
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
//stuff
const ret = [];
for (var i = 0; i < parseInt(NumMedia); i++) {
ret.push(body[`MediaUrl${i}`]);
}
return ret;
} else {
return null;
}
};

View File

@@ -1,127 +1,100 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const twilio = require("twilio");
const {phone} = require("phone");
const { phone } = require("phone");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const client = twilio(
process.env.TWILIO_AUTH_TOKEN,
process.env.TWILIO_AUTH_KEY
);
const {admin} = require("../firebase/firebase-handler");
const client = twilio(process.env.TWILIO_AUTH_TOKEN, process.env.TWILIO_AUTH_KEY);
const { admin } = require("../firebase/firebase-handler");
const gqlClient = require("../graphql-client/graphql-client").client;
exports.send = (req, res) => {
const {
to,
messagingServiceSid,
body,
conversationid,
selectedMedia,
imexshopid,
} = req.body;
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
logger.log("sms-outbound", "DEBUG", req.user.email, null, {
logger.log("sms-outbound", "DEBUG", req.user.email, null, {
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src),
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
if (!!to && !!messagingServiceSid && (!!body || !!selectedMedia.length > 0) && !!conversationid) {
client.messages
.create({
body: body,
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src),
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : [],
});
if (
!!to &&
!!messagingServiceSid &&
(!!body || !!selectedMedia.length > 0) &&
!!conversationid
) {
client.messages
.create({
body: body,
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src),
})
.then((message) => {
let newMessage = {
msid: message.sid,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0
? selectedMedia.map((i) => i.src)
: [],
};
gqlClient
.request(queries.INSERT_MESSAGE, {msg: newMessage, conversationid})
.then((r2) => {
//console.log("Responding GQL Message ID", JSON.stringify(r2));
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
conversationid,
});
const data = {
type: "messaging-outbound",
conversationid: newMessage.conversationid || "",
};
admin.messaging().send({
topic: `${imexshopid}-messaging`,
data,
});
res.sendStatus(200);
})
.catch((e2) => {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
msid: message.sid,
conversationid,
error: e2,
});
//res.json({ success: false, message: e2 });
});
})
.catch((e1) => {
//res.json({ success: false, message: error });
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
conversationid,
error: e1,
});
mediaUrl: selectedMedia.map((i) => i.src)
})
.then((message) => {
let newMessage = {
msid: message.sid,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
};
gqlClient
.request(queries.INSERT_MESSAGE, { msg: newMessage, conversationid })
.then((r2) => {
//console.log("Responding GQL Message ID", JSON.stringify(r2));
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
conversationid
});
} else {
const data = {
type: "messaging-outbound",
conversationid: newMessage.conversationid || ""
};
admin.messaging().send({
topic: `${imexshopid}-messaging`,
data
});
res.sendStatus(200);
})
.catch((e2) => {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
msid: message.sid,
conversationid,
error: e2
});
//res.json({ success: false, message: e2 });
});
})
.catch((e1) => {
//res.json({ success: false, message: error });
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
type: "missing-parameters",
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0
? selectedMedia.map((i) => i.src)
: [],
conversationid,
error: e1
});
res
.status(400)
.json({success: false, message: "Missing required parameter(s)."});
}
});
} else {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
type: "missing-parameters",
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
res.status(400).json({ success: false, message: "Missing required parameter(s)." });
}
};

View File

@@ -1,55 +1,52 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const {phone} = require("phone");
const { phone } = require("phone");
const logger = require("../utils/logger");
const {admin} = require("../firebase/firebase-handler");
const { admin } = require("../firebase/firebase-handler");
exports.status = (req, res) => {
const {SmsSid, SmsStatus} = req.body;
client
.request(queries.UPDATE_MESSAGE_STATUS, {
msid: SmsSid,
fields: {status: SmsStatus},
})
.then((response) => {
logger.log("sms-status-update", "DEBUG", "api", null, {
msid: SmsSid,
fields: {status: SmsStatus},
});
})
.catch((error) => {
logger.log("sms-status-update-error", "ERROR", "api", null, {
msid: SmsSid,
fields: {status: SmsStatus},
error,
});
});
res.sendStatus(200);
const { SmsSid, SmsStatus } = req.body;
client
.request(queries.UPDATE_MESSAGE_STATUS, {
msid: SmsSid,
fields: { status: SmsStatus }
})
.then((response) => {
logger.log("sms-status-update", "DEBUG", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus }
});
})
.catch((error) => {
logger.log("sms-status-update-error", "ERROR", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
error
});
});
res.sendStatus(200);
};
exports.markConversationRead = async (req, res) => {
const {conversationid, imexshopid} = req.body;
admin.messaging().send({
topic: `${imexshopid}-messaging`,
// notification: {
// title: `ImEX Online Message - ${data.phone_num}`,
// body: message.image_path ? `Image ${message.text}` : message.text,
// imageUrl: "https://thinkimex.com/img/logo512.png",
// },
data: {
type: "messaging-mark-conversation-read",
conversationid: conversationid || "",
},
});
res.send(200);
const { conversationid, imexshopid } = req.body;
admin.messaging().send({
topic: `${imexshopid}-messaging`,
// notification: {
// title: `ImEX Online Message - ${data.phone_num}`,
// body: message.image_path ? `Image ${message.text}` : message.text,
// imageUrl: "https://thinkimex.com/img/logo512.png",
// },
data: {
type: "messaging-mark-conversation-read",
conversationid: conversationid || ""
}
});
res.send(200);
};
// Inbound Sample

View File

@@ -1,45 +1,42 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const stripe = require('stripe')(process.env.STRIPE_SECRET_KEY);
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY);
const processor = async (req, res) => {
const {amount, stripe_acct_id} = req.body;
const { amount, stripe_acct_id } = req.body;
try {
await stripe.paymentIntents
.create(
{
payment_method_types: ["card"],
amount: amount,
currency: "cad",
application_fee_amount: 50,
},
{
stripeAccount: stripe_acct_id,
}
)
.then(function (paymentIntent) {
try {
return res.send({
clientSecret: paymentIntent.client_secret,
});
} catch (err) {
return res.status(500).send({
error: err.message,
});
}
});
} catch (error) {
console.log("error", error);
res.status(400).send(error);
}
try {
await stripe.paymentIntents
.create(
{
payment_method_types: ["card"],
amount: amount,
currency: "cad",
application_fee_amount: 50
},
{
stripeAccount: stripe_acct_id
}
)
.then(function (paymentIntent) {
try {
return res.send({
clientSecret: paymentIntent.client_secret
});
} catch (err) {
return res.status(500).send({
error: err.message
});
}
});
} catch (error) {
console.log("error", error);
res.status(400).send(error);
}
};
exports.payment = processor;

View File

@@ -1,9 +1,6 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const emailer = require("../email/sendemail");
@@ -11,74 +8,76 @@ const moment = require("moment-timezone");
const converter = require("json-2-csv");
exports.taskHandler = async (req, res) => {
try {
const {bodyshopid, query, variables, text, to, subject, timezone} = req.body;
try {
const { bodyshopid, query, variables, text, to, subject, timezone } = req.body;
//Check the variables to see if they are an object.
Object.keys(variables).forEach((key) => {
if (typeof variables[key] === "object") {
if (variables[key].function) {
variables[key] = functionMapper(variables[key].function, timezone);
}
}
});
//Check the variables to see if they are an object.
Object.keys(variables).forEach((key) => {
if (typeof variables[key] === "object") {
if (variables[key].function) {
variables[key] = functionMapper(variables[key].function, timezone);
}
}
});
const response = await client.request(query, variables);
const rootElement = response[Object.keys(response)[0]]; //This element should always be an array.
const response = await client.request(query, variables);
const rootElement = response[Object.keys(response)[0]]; //This element should always be an array.
const csv = converter.json2csv(rootElement, {emptyFieldValue: ""});
const csv = converter.json2csv(rootElement, { emptyFieldValue: "" });
emailer.sendTaskEmail({
to,
subject,
text,
attachments: [{filename: "query.csv", content: csv}],
}).catch(err => {
console.error('Errors sending CSV Email.')
});
emailer
.sendTaskEmail({
to,
subject,
text,
attachments: [{ filename: "query.csv", content: csv }]
})
.catch((err) => {
console.error("Errors sending CSV Email.");
});
return res.status(200).send(csv);
} catch (error) {
res.status(500).json({error: error.message, stack: error.stackTrace});
}
return res.status(200).send(csv);
} catch (error) {
res.status(500).json({ error: error.message, stack: error.stackTrace });
}
};
const isoFormat = "YYYY-MM-DD";
function functionMapper(f, timezone) {
switch (f) {
case "date.today":
return moment().tz(timezone).format(isoFormat);
case "date.now":
return moment().tz(timezone);
case "date.yesterday":
return moment().tz(timezone).subtract(1, "day").format(isoFormat);
case "date.3daysago":
return moment().tz(timezone).subtract(3, "day").format(isoFormat);
case "date.7daysago":
return moment().tz(timezone).subtract(7, "day").format(isoFormat);
case "date.tomorrow":
return moment().tz(timezone).add(1, "day").format(isoFormat);
case "date.3daysfromnow":
return moment().tz(timezone).add(3, "day").format(isoFormat);
case "date.7daysfromnow":
return moment().tz(timezone).add(7, "day").format(isoFormat);
case "date.yesterdaytz":
return moment().tz(timezone).subtract(1, "day");
case "date.3daysagotz":
return moment().tz(timezone).subtract(3, "day");
case "date.7daysagotz":
return moment().tz(timezone).subtract(7, "day");
case "date.tomorrowtz":
return moment().tz(timezone).add(1, "day");
case "date.3daysfromnowtz":
return moment().tz(timezone).add(3, "day");
case "date.7daysfromnowtz":
return moment().tz(timezone).add(7, "day");
switch (f) {
case "date.today":
return moment().tz(timezone).format(isoFormat);
case "date.now":
return moment().tz(timezone);
case "date.yesterday":
return moment().tz(timezone).subtract(1, "day").format(isoFormat);
case "date.3daysago":
return moment().tz(timezone).subtract(3, "day").format(isoFormat);
case "date.7daysago":
return moment().tz(timezone).subtract(7, "day").format(isoFormat);
case "date.tomorrow":
return moment().tz(timezone).add(1, "day").format(isoFormat);
case "date.3daysfromnow":
return moment().tz(timezone).add(3, "day").format(isoFormat);
case "date.7daysfromnow":
return moment().tz(timezone).add(7, "day").format(isoFormat);
case "date.yesterdaytz":
return moment().tz(timezone).subtract(1, "day");
case "date.3daysagotz":
return moment().tz(timezone).subtract(3, "day");
case "date.7daysagotz":
return moment().tz(timezone).subtract(7, "day");
case "date.tomorrowtz":
return moment().tz(timezone).add(1, "day");
case "date.3daysfromnowtz":
return moment().tz(timezone).add(3, "day");
case "date.7daysfromnowtz":
return moment().tz(timezone).add(7, "day");
case "date.now":
return moment().tz(timezone);
default:
return f;
}
case "date.now":
return moment().tz(timezone);
default:
return f;
}
}

View File

@@ -3,48 +3,45 @@ const queries = require("../graphql-client/queries");
const path = require("path");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.techLogin = async (req, res) => {
const {shopid, employeeid, pin} = req.body;
logger.log("tech-console-login", "DEBUG", req.user.email, null, null);
try {
const result = await client.request(queries.QUERY_EMPLOYEE_PIN, {
shopId: shopid,
employeeId: employeeid,
});
const { shopid, employeeid, pin } = req.body;
logger.log("tech-console-login", "DEBUG", req.user.email, null, null);
try {
const result = await client.request(queries.QUERY_EMPLOYEE_PIN, {
shopId: shopid,
employeeId: employeeid
});
let valid = false;
let error;
let technician;
if (result.employees && result.employees[0]) {
const dbRecord = result.employees[0];
if (dbRecord.pin === pin && dbRecord.active === true) {
valid = true;
delete dbRecord.pin;
technician = dbRecord;
} else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "wrong-pin",
});
error = "The employee ID and PIN combination are not correct.";
}
} else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "invalid-employee",
});
error = "The employee ID does not exist.";
}
res.json({valid, technician, error});
} catch (error) {
let valid = false;
let error;
let technician;
if (result.employees && result.employees[0]) {
const dbRecord = result.employees[0];
if (dbRecord.pin === pin && dbRecord.active === true) {
valid = true;
delete dbRecord.pin;
technician = dbRecord;
} else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
error,
type: "wrong-pin"
});
res.status(400).send(error);
error = "The employee ID and PIN combination are not correct.";
}
} else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "invalid-employee"
});
error = "The employee ID does not exist.";
}
res.json({ valid, technician, error });
} catch (error) {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
error
});
res.status(400).send(error);
}
};

View File

@@ -3,11 +3,11 @@
* @type {string[]}
*/
const adminEmail = [
"patrick@imex.dev",
//"patrick@imex.test",
"patrick@imex.prod",
"patrick@imexsystems.ca",
"patrick@thinkimex.com",
"patrick@imex.dev",
//"patrick@imex.test",
"patrick@imex.prod",
"patrick@imexsystems.ca",
"patrick@thinkimex.com"
];
module.exports = adminEmail;
module.exports = adminEmail;

View File

@@ -4,79 +4,84 @@ const getLifecycleStatusColor = require("./getLifecycleStatusColor");
const _ = require("lodash");
const calculateStatusDuration = (transitions, statuses) => {
let statusDuration = {};
let totalDuration = 0;
let totalCurrentStatusDuration = null;
let summations = [];
let statusDuration = {};
let totalDuration = 0;
let totalCurrentStatusDuration = null;
let summations = [];
transitions.forEach((transition, index) => {
let duration = transition.duration;
totalDuration += duration;
if (transition.start && !transition.end) {
const startMoment = moment(transition.start);
const nowMoment = moment();
const duration = moment.duration(nowMoment.diff(startMoment));
totalCurrentStatusDuration = {
value: duration.asMilliseconds(),
humanReadable: durationToHumanReadable(duration)
};
}
if (!transition.prev_value) {
statusDuration[transition.value] = {
value: duration,
humanReadable: durationToHumanReadable(moment.duration(duration))
};
} else {
if (statusDuration[transition.value]) {
statusDuration[transition.value].value += duration;
statusDuration[transition.value].humanReadable = durationToHumanReadable(moment.duration(statusDuration[transition.value].value));
} else {
statusDuration[transition.value] = {
value: duration,
humanReadable: durationToHumanReadable(moment.duration(duration))
};
}
}
});
// Calculate the percentage for each status
// Calculate the percentage for each status
let totalPercentage = 0;
const statusKeys = Object.keys(statusDuration);
statusKeys.forEach((status, index) => {
if (index !== statusKeys.length - 1) {
const percentage = (statusDuration[status].value / totalDuration) * 100;
totalPercentage += percentage;
statusDuration[status].percentage = percentage;
} else {
statusDuration[status].percentage = 100 - totalPercentage;
}
});
for (let [status, {value, humanReadable}] of Object.entries(statusDuration)) {
if (status !== 'total') {
summations.push({
status,
value,
humanReadable,
percentage: statusDuration[status].percentage,
color: getLifecycleStatusColor(status),
roundedPercentage: `${Math.round(statusDuration[status].percentage)}%`
});
}
transitions.forEach((transition, index) => {
let duration = transition.duration;
totalDuration += duration;
if (transition.start && !transition.end) {
const startMoment = moment(transition.start);
const nowMoment = moment();
const duration = moment.duration(nowMoment.diff(startMoment));
totalCurrentStatusDuration = {
value: duration.asMilliseconds(),
humanReadable: durationToHumanReadable(duration)
};
}
const humanReadableTotal = durationToHumanReadable(moment.duration(totalDuration));
if (!transition.prev_value) {
statusDuration[transition.value] = {
value: duration,
humanReadable: durationToHumanReadable(moment.duration(duration))
};
} else {
if (statusDuration[transition.value]) {
statusDuration[transition.value].value += duration;
statusDuration[transition.value].humanReadable = durationToHumanReadable(
moment.duration(statusDuration[transition.value].value)
);
} else {
statusDuration[transition.value] = {
value: duration,
humanReadable: durationToHumanReadable(moment.duration(duration))
};
}
}
});
return {
summations: _.isArray(statuses) && !_.isEmpty(statuses) ? summations.sort((a, b) => {
// Calculate the percentage for each status
// Calculate the percentage for each status
let totalPercentage = 0;
const statusKeys = Object.keys(statusDuration);
statusKeys.forEach((status, index) => {
if (index !== statusKeys.length - 1) {
const percentage = (statusDuration[status].value / totalDuration) * 100;
totalPercentage += percentage;
statusDuration[status].percentage = percentage;
} else {
statusDuration[status].percentage = 100 - totalPercentage;
}
});
for (let [status, { value, humanReadable }] of Object.entries(statusDuration)) {
if (status !== "total") {
summations.push({
status,
value,
humanReadable,
percentage: statusDuration[status].percentage,
color: getLifecycleStatusColor(status),
roundedPercentage: `${Math.round(statusDuration[status].percentage)}%`
});
}
}
const humanReadableTotal = durationToHumanReadable(moment.duration(totalDuration));
return {
summations:
_.isArray(statuses) && !_.isEmpty(statuses)
? summations.sort((a, b) => {
return statuses.indexOf(a.status) - statuses.indexOf(b.status);
}) : summations,
totalStatuses: summations.length,
total: totalDuration,
totalCurrentStatusDuration,
humanReadableTotal
};
}
module.exports = calculateStatusDuration;
})
: summations,
totalStatuses: summations.length,
total: totalDuration,
totalCurrentStatusDuration,
humanReadableTotal
};
};
module.exports = calculateStatusDuration;

View File

@@ -1,22 +1,22 @@
const durationToHumanReadable = (duration) => {
if (!duration) return 'N/A';
if (!duration) return "N/A";
let parts = [];
let parts = [];
let years = duration.years();
let months = duration.months();
let days = duration.days();
let hours = duration.hours();
let minutes = duration.minutes();
let seconds = duration.seconds();
let years = duration.years();
let months = duration.months();
let days = duration.days();
let hours = duration.hours();
let minutes = duration.minutes();
let seconds = duration.seconds();
if (years) parts.push(years + ' year' + (years > 1 ? 's' : ''));
if (months) parts.push(months + ' month' + (months > 1 ? 's' : ''));
if (days) parts.push(days + ' day' + (days > 1 ? 's' : ''));
if (hours) parts.push(hours + ' hour' + (hours > 1 ? 's' : ''));
if (minutes) parts.push(minutes + ' minute' + (minutes > 1 ? 's' : ''));
if (seconds) parts.push(seconds + ' second' + (seconds > 1 ? 's' : ''));
if (years) parts.push(years + " year" + (years > 1 ? "s" : ""));
if (months) parts.push(months + " month" + (months > 1 ? "s" : ""));
if (days) parts.push(days + " day" + (days > 1 ? "s" : ""));
if (hours) parts.push(hours + " hour" + (hours > 1 ? "s" : ""));
if (minutes) parts.push(minutes + " minute" + (minutes > 1 ? "s" : ""));
if (seconds) parts.push(seconds + " second" + (seconds > 1 ? "s" : ""));
return parts.join(', ');
}
module.exports = durationToHumanReadable;
return parts.join(", ");
};
module.exports = durationToHumanReadable;

View File

@@ -1,11 +1,11 @@
const crypto = require('crypto');
const crypto = require("crypto");
const getLifecycleStatusColor = (key) => {
const hash = crypto.createHash('sha256');
hash.update(key);
const hashedKey = hash.digest('hex');
const num = parseInt(hashedKey, 16);
return '#' + (num % 16777215).toString(16).padStart(6, '0');
const hash = crypto.createHash("sha256");
hash.update(key);
const hashedKey = hash.digest("hex");
const num = parseInt(hashedKey, 16);
return "#" + (num % 16777215).toString(16).padStart(6, "0");
};
module.exports = getLifecycleStatusColor;
module.exports = getLifecycleStatusColor;

View File

@@ -12,18 +12,17 @@ function InstanceManager({ args, instance, debug, executeFunction, rome, promana
let propToReturn = null;
switch (instance || process.env.INSTANCE) {
case 'IMEX':
case "IMEX":
propToReturn = imex;
break;
case 'ROME':
case "ROME":
propToReturn = rome; //TODO:AIO Implement USE_IMEX
break;
case 'PROMANAGER':
case "PROMANAGER":
//Return the rome prop if USE_ROME.
//If not USE_ROME, we want to default back to the rome prop if it's undefined.
//If null, we might want to show nothing, so make sure we return null.
propToReturn =
promanager === 'USE_ROME' ? rome : promanager !== undefined ? promanager : rome;
propToReturn = promanager === "USE_ROME" ? rome : promanager !== undefined ? promanager : rome;
break;
default:
propToReturn = imex;
@@ -31,17 +30,17 @@ function InstanceManager({ args, instance, debug, executeFunction, rome, promana
}
if (debug) {
console.log('InstanceRenderManager Debugger');
console.log('=========================');
console.log("InstanceRenderManager Debugger");
console.log("=========================");
console.log({ executeFunction, rome, promanager, imex, debug, propToReturn });
console.log('=========================');
console.log("=========================");
}
//Checking to see if we need to default to another one.
if (propToReturn === 'imex') {
if (propToReturn === "imex") {
propToReturn = imex;
}
if (executeFunction && typeof propToReturn === 'function') return propToReturn(...args);
if (executeFunction && typeof propToReturn === "function") return propToReturn(...args);
return propToReturn === undefined ? null : propToReturn;
}

View File

@@ -1,25 +1,25 @@
const graylog2 = require("graylog2");
const logger = new graylog2.graylog({
servers: [{host: "logs.bodyshop.app", port: 12201}],
servers: [{ host: "logs.bodyshop.app", port: 12201 }]
});
function log(message, type, user, record, object) {
if (type !== "ioevent")
console.log(message, {
type,
env: process.env.NODE_ENV || "development",
user,
record,
...object,
});
logger.log(message, message, {
type,
env: process.env.NODE_ENV || "development",
user,
record,
...object,
if (type !== "ioevent")
console.log(message, {
type,
env: process.env.NODE_ENV || "development",
user,
record,
...object
});
logger.log(message, message, {
type,
env: process.env.NODE_ENV || "development",
user,
record,
...object
});
}
module.exports = {log};
module.exports = { log };

View File

@@ -1,12 +1,7 @@
exports.servertime = (req, res) => {
res.status(200).send(new Date());
res.status(200).send(new Date());
};
exports.jsrAuth = async (req, res) => {
res.send(
"Basic " +
Buffer.from(
`${process.env.JSR_USER}:${process.env.JSR_PASSWORD}`
).toString("base64")
);
res.send("Basic " + Buffer.from(`${process.env.JSR_USER}:${process.env.JSR_PASSWORD}`).toString("base64"));
};

View File

@@ -1,269 +1,224 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const {io} = require("../../server");
const {admin} = require("../firebase/firebase-handler");
const {
default: CdkJobExport,
CdkSelectedCustomer,
} = require("../cdk/cdk-job-export");
const { io } = require("../../server");
const { admin } = require("../firebase/firebase-handler");
const { default: CdkJobExport, CdkSelectedCustomer } = require("../cdk/cdk-job-export");
const CdkGetMakes = require("../cdk/cdk-get-makes").default;
const CdkCalculateAllocations =
require("../cdk/cdk-calculate-allocations").default;
const {isArray} = require("lodash");
const CdkCalculateAllocations = require("../cdk/cdk-calculate-allocations").default;
const { isArray } = require("lodash");
const logger = require("../utils/logger");
const {
default: PbsExportJob,
PbsSelectedCustomer,
} = require("../accounting/pbs/pbs-job-export");
const { default: PbsExportJob, PbsSelectedCustomer } = require("../accounting/pbs/pbs-job-export");
const {
PbsCalculateAllocationsAp,
PbsExportAp,
} = require("../accounting/pbs/pbs-ap-allocations");
const { PbsCalculateAllocationsAp, PbsExportAp } = require("../accounting/pbs/pbs-ap-allocations");
io.use(function (socket, next) {
try {
if (socket.handshake.auth.token) {
admin
.auth()
.verifyIdToken(socket.handshake.auth.token)
.then((user) => {
socket.user = user;
next();
})
.catch((error) => {
next(new Error("Authentication error", JSON.stringify(error)));
});
} else {
next(new Error("Authentication error - no authorization token."));
}
} catch (error) {
console.log("Uncaught connection error:::", error);
logger.log("websocket-connection-error", "error", null, null, {
token: socket.handshake.auth.token,
...error,
try {
if (socket.handshake.auth.token) {
admin
.auth()
.verifyIdToken(socket.handshake.auth.token)
.then((user) => {
socket.user = user;
next();
})
.catch((error) => {
next(new Error("Authentication error", JSON.stringify(error)));
});
next(new Error(`Authentication error ${error}`));
} else {
next(new Error("Authentication error - no authorization token."));
}
} catch (error) {
console.log("Uncaught connection error:::", error);
logger.log("websocket-connection-error", "error", null, null, {
token: socket.handshake.auth.token,
...error
});
next(new Error(`Authentication error ${error}`));
}
});
io.on("connection", (socket) => {
socket.log_level = "TRACE";
createLogEvent(socket, "DEBUG", `Connected and Authenticated.`);
socket.log_level = "TRACE";
createLogEvent(socket, "DEBUG", `Connected and Authenticated.`);
socket.on("set-log-level", (level) => {
socket.log_level = level;
socket.emit("log-event", {
timestamp: new Date(),
level: "INFO",
message: `Updated log level to ${level}`,
});
socket.on("set-log-level", (level) => {
socket.log_level = level;
socket.emit("log-event", {
timestamp: new Date(),
level: "INFO",
message: `Updated log level to ${level}`
});
});
///CDK
socket.on("cdk-export-job", (jobid) => {
CdkJobExport(socket, jobid);
});
socket.on("cdk-selected-customer", (selectedCustomerId) => {
createLogEvent(
socket,
"DEBUG",
`User selected customer ID ${selectedCustomerId}`
);
socket.selectedCustomerId = selectedCustomerId;
CdkSelectedCustomer(socket, selectedCustomerId);
});
///CDK
socket.on("cdk-export-job", (jobid) => {
CdkJobExport(socket, jobid);
});
socket.on("cdk-selected-customer", (selectedCustomerId) => {
createLogEvent(socket, "DEBUG", `User selected customer ID ${selectedCustomerId}`);
socket.selectedCustomerId = selectedCustomerId;
CdkSelectedCustomer(socket, selectedCustomerId);
});
socket.on("cdk-get-makes", async (cdk_dealerid, callback) => {
try {
const makes = await CdkGetMakes(socket, cdk_dealerid);
callback(makes);
} catch (error) {
createLogEvent(
socket,
"ERROR",
`Error in cdk-get-makes WS call. ${JSON.stringify(error, null, 2)}`
);
}
});
socket.on("cdk-get-makes", async (cdk_dealerid, callback) => {
try {
const makes = await CdkGetMakes(socket, cdk_dealerid);
callback(makes);
} catch (error) {
createLogEvent(socket, "ERROR", `Error in cdk-get-makes WS call. ${JSON.stringify(error, null, 2)}`);
}
});
socket.on("cdk-calculate-allocations", async (jobid, callback) => {
const allocations = await CdkCalculateAllocations(socket, jobid);
createLogEvent(socket, "DEBUG", `Allocations calculated.`);
createLogEvent(
socket,
"TRACE",
`Allocations calculated. ${JSON.stringify(allocations, null, 2)}`
);
socket.on("cdk-calculate-allocations", async (jobid, callback) => {
const allocations = await CdkCalculateAllocations(socket, jobid);
createLogEvent(socket, "DEBUG", `Allocations calculated.`);
createLogEvent(socket, "TRACE", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`);
callback(allocations);
});
//END CDK
callback(allocations);
});
//END CDK
//PBS AR
socket.on("pbs-calculate-allocations", async (jobid, callback) => {
const allocations = await CdkCalculateAllocations(socket, jobid);
createLogEvent(socket, "DEBUG", `Allocations calculated.`);
createLogEvent(
socket,
"TRACE",
`Allocations calculated. ${JSON.stringify(allocations, null, 2)}`
);
//PBS AR
socket.on("pbs-calculate-allocations", async (jobid, callback) => {
const allocations = await CdkCalculateAllocations(socket, jobid);
createLogEvent(socket, "DEBUG", `Allocations calculated.`);
createLogEvent(socket, "TRACE", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`);
callback(allocations);
});
socket.on("pbs-export-job", (jobid) => {
PbsExportJob(socket, jobid);
});
socket.on("pbs-selected-customer", (selectedCustomerId) => {
createLogEvent(
socket,
"DEBUG",
`User selected customer ID ${selectedCustomerId}`
);
socket.selectedCustomerId = selectedCustomerId;
PbsSelectedCustomer(socket, selectedCustomerId);
});
//End PBS AR
callback(allocations);
});
socket.on("pbs-export-job", (jobid) => {
PbsExportJob(socket, jobid);
});
socket.on("pbs-selected-customer", (selectedCustomerId) => {
createLogEvent(socket, "DEBUG", `User selected customer ID ${selectedCustomerId}`);
socket.selectedCustomerId = selectedCustomerId;
PbsSelectedCustomer(socket, selectedCustomerId);
});
//End PBS AR
//PBS AP
socket.on("pbs-calculate-allocations-ap", async (billids, callback) => {
const allocations = await PbsCalculateAllocationsAp(socket, billids);
createLogEvent(socket, "DEBUG", `AP Allocations calculated.`);
createLogEvent(
socket,
"TRACE",
`Allocations calculated. ${JSON.stringify(allocations, null, 2)}`
);
socket.apAllocations = allocations;
callback(allocations);
});
//PBS AP
socket.on("pbs-calculate-allocations-ap", async (billids, callback) => {
const allocations = await PbsCalculateAllocationsAp(socket, billids);
createLogEvent(socket, "DEBUG", `AP Allocations calculated.`);
createLogEvent(socket, "TRACE", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`);
socket.apAllocations = allocations;
callback(allocations);
});
socket.on("pbs-export-ap", ({billids, txEnvelope}) => {
socket.txEnvelope = txEnvelope;
PbsExportAp(socket, {billids, txEnvelope});
});
socket.on("pbs-export-ap", ({ billids, txEnvelope }) => {
socket.txEnvelope = txEnvelope;
PbsExportAp(socket, { billids, txEnvelope });
});
//END PBS AP
//END PBS AP
socket.on("disconnect", () => {
createLogEvent(socket, "DEBUG", `User disconnected.`);
});
socket.on("disconnect", () => {
createLogEvent(socket, "DEBUG", `User disconnected.`);
});
});
function createLogEvent(socket, level, message) {
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
console.log(
`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${
socket.id
} - ${message}`
);
socket.emit("log-event", {
timestamp: new Date(),
level,
message,
});
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`);
socket.emit("log-event", {
timestamp: new Date(),
level,
message
});
logger.log("ws-log-event", level, socket.user.email, socket.recordid, {
wsmessage: message,
});
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
timestamp: new Date(),
level,
message,
});
}
// if (level === "ERROR") {
// throw new Error(message);
// }
}
}
function createJsonEvent(socket, level, message, json) {
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
console.log(
`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${
socket.id
} - ${message}`
);
socket.emit("log-event", {
timestamp: new Date(),
level,
message,
});
}
logger.log("ws-log-event-json", level, socket.user.email, socket.recordid, {
wsmessage: message,
json,
logger.log("ws-log-event", level, socket.user.email, socket.recordid, {
wsmessage: message
});
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
timestamp: new Date(),
level,
message,
});
socket.logEvents.push({
timestamp: new Date(),
level,
message
});
}
// if (level === "ERROR") {
// throw new Error(message);
// }
}
}
function createJsonEvent(socket, level, message, json) {
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) {
console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`);
socket.emit("log-event", {
timestamp: new Date(),
level,
message
});
}
logger.log("ws-log-event-json", level, socket.user.email, socket.recordid, {
wsmessage: message,
json
});
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
timestamp: new Date(),
level,
message
});
}
// if (level === "ERROR") {
// throw new Error(message);
// }
}
function createXmlEvent(socket, xml, message, isError = false) {
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy("TRACE")) {
socket.emit("log-event", {
timestamp: new Date(),
level: isError ? "ERROR" : "TRACE",
message: `${message}: ${xml}`,
});
}
if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy("TRACE")) {
socket.emit("log-event", {
timestamp: new Date(),
level: isError ? "ERROR" : "TRACE",
message: `${message}: ${xml}`
});
}
logger.log(
isError ? "ws-log-event-xml-error" : "ws-log-event-xml",
isError ? "ERROR" : "TRACE",
socket.user.email,
socket.recordid,
{
wsmessage: message,
xml,
}
);
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
timestamp: new Date(),
level: isError ? "ERROR" : "TRACE",
message,
xml,
});
logger.log(
isError ? "ws-log-event-xml-error" : "ws-log-event-xml",
isError ? "ERROR" : "TRACE",
socket.user.email,
socket.recordid,
{
wsmessage: message,
xml
}
);
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
timestamp: new Date(),
level: isError ? "ERROR" : "TRACE",
message,
xml
});
}
}
function LogLevelHierarchy(level) {
switch (level) {
case "XML":
return 5;
case "TRACE":
return 5;
case "DEBUG":
return 4;
case "INFO":
return 3;
case "WARNING":
return 2;
case "ERROR":
return 1;
default:
return 3;
}
switch (level) {
case "XML":
return 5;
case "TRACE":
return 5;
case "DEBUG":
return 4;
case "INFO":
return 3;
case "WARNING":
return 2;
case "ERROR":
return 1;
default:
return 3;
}
}
exports.createLogEvent = createLogEvent;