Lint all the things

This commit is contained in:
Dave
2025-08-19 16:23:29 -04:00
parent f6d6b548be
commit 33fb60ca1a
640 changed files with 2129 additions and 3927 deletions

View File

@@ -112,6 +112,7 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte
await InsertFailedExportLog(socket, error);
}
};
// Was Successful
async function CheckForErrors(socket, response) {
if (response.WasSuccessful === undefined || response.WasSuccessful === true) {
@@ -138,7 +139,7 @@ async function QueryVehicleFromDms(socket) {
try {
if (!socket.JobData.v_vin) return null;
const { data: VehicleGetResponse, request } = await axios.post(
const { data: VehicleGetResponse } = await axios.post(
PBS_ENDPOINTS.VehicleGet,
{
SerialNumber: socket.JobData.bodyshop.pbs_serialnumber,

View File

@@ -19,7 +19,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
@@ -65,36 +65,34 @@ exports.default = async (req, res) => {
vendorRecord = await InsertVendorRecord(oauthClient, qbo_realmId, req, bill);
}
const insertResults = await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
// //No error. Mark the job exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
await client.setHeaders({ Authorization: BearerToken }).request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
}
ret.push({ billid: bill.id, success: true });
} catch (error) {
logger.log("qbo-paybles-create-error", "ERROR", req.user.email, null, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error?.authResponse && error.authResponse.body) ||
error.response?.data?.Fault?.Error.map((e) => e.Detail).join(", ") ||
(error && error.message)
error?.message
});
ret.push({
billid: bill.id,
@@ -107,7 +105,7 @@ exports.default = async (req, res) => {
//Add the export log error.
if (elgen) {
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
@@ -141,7 +139,9 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(bill.vendor.name)}'`
`select *
From vendor
where DisplayName = '${StandardizeName(bill.vendor.name)}'`
),
method: "POST",
headers: {
@@ -156,7 +156,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -194,7 +194,7 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Vendor;
} catch (error) {
@@ -263,11 +263,11 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DocNumber: bill.invoice_number,
//...(bill.job.class ? { ClassRef: { Id: classes[bill.job.class] } } : {}),
...(!(
@@ -280,8 +280,8 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
: {}),
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
PrivateNote: `RO ${bill.job.ro_number || ""}`,
Line: lines
};
@@ -305,7 +305,7 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Bill;
} catch (error) {
@@ -353,8 +353,8 @@ const generateBillLine = (
accountingconfig.qbo && accountingconfig.qbo_usa && region_config.includes("CA_")
? {}
: {
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
AccountRef: {
value: accounts[account.accountname]
}
@@ -373,7 +373,9 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Account where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
`select *
From Account
where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
),
method: "POST",
headers: {
@@ -387,10 +389,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: accounts.response?.status,
bodyshopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, accounts);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -403,9 +410,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: taxCodes.status,
bodyshopid,
email: req.user.email
})
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Class`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -418,7 +430,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: classes.status,
bodyshopid,
email: req.user.email
})
});
const taxCodeMapping = {};
taxCodes.json &&

View File

@@ -12,7 +12,6 @@ const { refresh: refreshOauthToken, setNewRefreshToken } = require("./qbo-callba
const OAuthClient = require("intuit-oauth");
const CreateInvoiceLines = require("../qb-receivables-lines").default;
const moment = require("moment-timezone");
const GraphQLClient = require("graphql-request").GraphQLClient;
const { generateOwnerTier } = require("../qbxml/qbxml-utils");
const { createMultiQbPayerLines } = require("../qb-receivables-lines");
@@ -21,7 +20,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
@@ -226,7 +225,10 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}' and Active = true`
`select *
From Customer
where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}'
and Active = true`
),
method: "POST",
headers: {
@@ -241,7 +243,7 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -296,7 +298,7 @@ async function InsertInsuranceCo(oauthClient, qbo_realmId, req, job, bodyshop) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -316,7 +318,10 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(ownerName)}' and Active = true`
`select *
From Customer
where DisplayName = '${StandardizeName(ownerName)}'
and Active = true`
),
method: "POST",
headers: {
@@ -331,7 +336,7 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -358,11 +363,11 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
...(job.ownr_ea ? { PrimaryEmailAddr: { Address: job.ownr_ea.trim() } } : {}),
...(isThreeTier
? {
Job: true,
ParentRef: {
value: parentTierRef.Id
Job: true,
ParentRef: {
value: parentTierRef.Id
}
}
}
: {})
};
try {
@@ -382,7 +387,7 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -401,7 +406,10 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${job.ro_number}' and Active = true`
`select *
From Customer
where DisplayName = '${job.ro_number}'
and Active = true`
),
method: "POST",
headers: {
@@ -416,7 +424,7 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -464,7 +472,7 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -480,7 +488,13 @@ exports.InsertJob = InsertJob;
async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item where active=true maxresults 1000`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Item
where active = true maxresults 1000`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -494,10 +508,16 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, items);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode where active=true`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode
where active = true`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -511,9 +531,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Class`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -527,7 +552,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
const taxCodeMapping = {};
taxCodes.json &&
@@ -578,55 +603,57 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
DocNumber: job.ro_number,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}
}
}),
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -660,7 +687,7 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {
@@ -702,56 +729,58 @@ async function InsertInvoiceMultiPayerInvoice(
DocNumber: job.ro_number + suffix,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_") && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}
}
}),
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -785,7 +814,7 @@ async function InsertInvoiceMultiPayerInvoice(
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {

View File

@@ -1,4 +1,3 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");

View File

@@ -1,10 +1,12 @@
exports.addQbxmlHeader = addQbxmlHeader = (xml) => {
const addQbxmlHeader = (xml) => {
return `<?xml version="1.0" encoding="utf-8"?>
<?qbxml version="13.0"?>
${xml}
`;
};
exports.addQbxmlHeader = addQbxmlHeader;
exports.generateSourceTier = (jobs_by_pk) => {
return jobs_by_pk.ins_co_nm && jobs_by_pk.ins_co_nm.trim().replace(":", " ");
};

View File

@@ -1,6 +1,5 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)

View File

@@ -1,5 +1,4 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;

View File

@@ -6,7 +6,7 @@
const decodeComment = (comment) => {
try {
return comment ? JSON.parse(Buffer.from(comment, "base64").toString()) : null;
} catch (error) {
} catch {
return null; // Handle malformed base64 string gracefully
}
};

View File

@@ -1,5 +1,3 @@
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const path = require("path");
const logger = require("../utils/logger");
@@ -11,30 +9,10 @@ require("dotenv").config({
exports.default = async (req, res) => {
const { useremail, bodyshopid, operationName, variables, env, time, dbevent, user } = req.body;
const {
ioRedis,
ioHelpers: { getBodyshopRoom }
} = req;
try {
// await client.request(queries.INSERT_IOEVENT, {
// event: {
// operationname: operationName,
// time,
// dbevent,
// env,
// variables,
// bodyshopid,
// useremail
// }
// });
// ioRedis.to(getBodyshopRoom(bodyshopid)).emit("bodyshop-message", {
// operationName,
// useremail
// });
res.sendStatus(200);
} catch (error) {
} catch {
logger.log("ioevent-error", "silly", user, null, {
operationname: operationName,
time,

View File

@@ -1,8 +1,6 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
//const client = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
@@ -18,7 +16,8 @@ async function StatusTransition(req, res) {
const { id: jobid, status: value, shopid: bodyshopid } = req.body.event.data.new;
// Create record OPEN on new item, enter state
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record, create a new record saying we came from previous status going to previous status
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record,
// create a new record saying we came from previous status going to previous status
// (Timeline)
// Final status is exported, there is no end date as there is no further transition (has no end date)
try {

View File

@@ -1,10 +1,7 @@
const path = require("path");
const logger = require("../utils/logger");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { InstanceRegion } = require("../utils/instanceMgr");
const archiver = require("archiver");
const stream = require("node:stream");
const base64UrlEncode = require("./util/base64UrlEncode");
const createHmacSha256 = require("./util/createHmacSha256");
const {
@@ -91,11 +88,11 @@ const getThumbnailUrls = async (req, res) => {
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//If there's no jobid and no billid, we're in temporary documents.
const data = await (
billid ? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid }) :
jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const data = await (billid
? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid })
: jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const s3client = new S3Client({ region: InstanceRegion() });
@@ -106,7 +103,7 @@ const getThumbnailUrls = async (req, res) => {
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key = keyStandardize(document)
let key = keyStandardize(document);
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
@@ -193,7 +190,10 @@ const downloadFiles = async (req, res) => {
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, {
message: err.message,
stack: err.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
@@ -202,7 +202,7 @@ const downloadFiles = async (req, res) => {
try {
for (const doc of data.documents) {
let key = keyStandardize(doc)
let key = keyStandardize(doc);
let response;
try {
response = await s3client.send(
@@ -212,13 +212,21 @@ const downloadFiles = async (req, res) => {
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
@@ -393,7 +401,6 @@ const keyStandardize = (doc) => {
}
};
module.exports = {
generateSignedUploadUrls,
getThumbnailUrls,

View File

@@ -1,5 +1,3 @@
const path = require("path");
/**
* Checks if the event secret is correct
* It adds the following properties to the request object:

View File

@@ -1,5 +1,4 @@
const path = require("path");
const _ = require("lodash");
const xml2js = require("xml2js");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");

View File

@@ -280,7 +280,7 @@ const getQueue = () => {
/**
* Dispatches notifications to the `addQueue` for processing.
*/
const dispatchAppsToQueue = async ({ appsToDispatch, logger }) => {
const dispatchAppsToQueue = async ({ appsToDispatch }) => {
const appQueue = getQueue();
for (const app of appsToDispatch) {

View File

@@ -1,6 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
@@ -8,6 +7,12 @@ const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require(".
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
const get = (obj, key) => {
return key.split(".").reduce((o, x) => {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};
exports.calculatelabor = async function (req, res) {
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-calculate-labor", "DEBUG", req.user.email, jobid, null);
@@ -99,9 +104,3 @@ exports.calculatelabor = async function (req, res) {
res.status(503).send();
}
};
get = function (obj, key) {
return key.split(".").reduce(function (o, x) {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};

View File

@@ -1,8 +1,7 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
const { CalculateExpectedHoursForJob } = require("./pay-all");
const moment = require("moment");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";

View File

@@ -1,6 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const _ = require("lodash");
const rdiff = require("recursive-diff");
const logger = require("../utils/logger");

View File

@@ -1,10 +1,9 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const _ = require("lodash");
const { filter } = require("lodash");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
@@ -26,12 +25,10 @@ exports.job = async (req, res) => {
});
const { jobs_by_pk, blockedDays, prodJobs, arrJobs, compJobs } = result;
const { ssbuckets, workingdays, timezone, ss_configuration } = result.jobs_by_pk.bodyshop;
const jobHrs = result.jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const { ssbuckets, workingdays, timezone, ss_configuration } = jobs_by_pk.bodyshop;
const jobHrs = jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const JobBucket = ssbuckets.filter(
(bucket) => bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
)[0];
const JobBucket = ssbuckets.filter((bucket) => bucket.gte <= jobHrs && (bucket.lt ? bucket.lt > jobHrs : true))[0];
const load = {
productionTotal: {},
productionHours: 0
@@ -73,7 +70,7 @@ exports.job = async (req, res) => {
.tz(timezone)
.format("yyyy-MM-DD");
if (isSameBucket) {
if (!!load[itemDate]) {
if (load[itemDate]) {
load[itemDate].hoursIn = (load[itemDate].hoursIn || 0) + AddJobForSchedulingCalc ? jobHours : 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsIn.push(item);
} else {
@@ -97,7 +94,6 @@ exports.job = async (req, res) => {
});
//Get the completing jobs.
let problemJobs = [];
const filteredCompJobs = compJobs.filter((j) => JobBucket.id === CheckJobBucket(ssbuckets, j));
filteredCompJobs.forEach((item) => {
@@ -109,7 +105,7 @@ exports.job = async (req, res) => {
const itemDate = moment(item.actual_completion || item.scheduled_completion)
.tz(timezone)
.format("yyyy-MM-DD");
if (!!load[itemDate]) {
if (load[itemDate]) {
load[itemDate].hoursOut =
(load[itemDate].hoursOut || 0) + AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs + item.larhrs.aggregate.sum.mod_lb_hrs
@@ -143,7 +139,7 @@ exports.job = async (req, res) => {
.tz(timezone)
.add(day - 1, "days")
.format("yyyy-MM-DD");
if (!!!load[current]) {
if (!load[current]) {
load[current] = {};
}
if (day === 0) {

View File

@@ -291,7 +291,13 @@ const receive = async (req, res) => {
res.status(200).send("");
} catch (e) {
handleError(req, e, res, "RECEIVE_MESSAGE", logger);
handleError({
req,
res,
logger,
error: e,
context: "RECEIVE_MESSAGE"
});
}
};
@@ -301,11 +307,11 @@ const receive = async (req, res) => {
* @param logger
* @returns {null|*[]}
*/
const generateMediaArray = (body, logger) => {
const generateMediaArray = (body) => {
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
if (parseInt(NumMedia, 10) > 0) {
const ret = [];
for (let i = 0; i < parseInt(NumMedia); i++) {
for (let i = 0; i < parseInt(NumMedia, 10); i++) {
ret.push(body[`MediaUrl${i}`]);
}
return ret;
@@ -322,7 +328,7 @@ const generateMediaArray = (body, logger) => {
* @param context
* @param logger
*/
const handleError = (req, error, res, context, logger) => {
const handleError = ({ req, error, res, context, logger }) => {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,

View File

@@ -11,7 +11,7 @@ const gqlClient = require("../graphql-client/graphql-client").client;
* @returns {Promise<void>}
*/
const send = async (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
const { to, messagingServiceSid, body, conversationid, selectedMedia } = req.body;
const {
ioRedis,
logger,

View File

@@ -139,7 +139,7 @@ const status = async (req, res) => {
const markConversationRead = async (req, res) => {
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
ioHelpers: { getBodyshopRoom }
} = req;
const { conversation, imexshopid, bodyshopid } = req.body;

View File

@@ -77,9 +77,6 @@ function functionMapper(f, timezone) {
return moment().tz(timezone).add(3, "day");
case "date.7daysfromnowtz":
return moment().tz(timezone).add(7, "day");
case "date.now":
return moment().tz(timezone);
default:
return f;
}

View File

@@ -1,4 +1,4 @@
import { describe, it, expect } from "vitest";
import { describe, expect, it } from "vitest";
function add(a, b) {
return a + b;

View File

@@ -9,7 +9,7 @@ const calculateStatusDuration = (transitions, statuses) => {
let totalCurrentStatusDuration = null;
let summations = [];
transitions.forEach((transition, index) => {
transitions.forEach((transition) => {
let duration = transition.duration;
totalDuration += duration;
if (transition.start && !transition.end) {

View File

@@ -6,7 +6,7 @@
* @param logger
* @returns {{getBodyshopRoom: (function(*): string), getBodyshopConversationRoom: (function({bodyshopId: *, conversationId: *}): string)}}
*/
const applyIOHelpers = ({ app, api, io, logger }) => {
const applyIOHelpers = ({ app }) => {
// Global Bodyshop Room
const getBodyshopRoom = (bodyshopId) => `bodyshop-broadcast-room:${bodyshopId}`;

View File

@@ -15,7 +15,6 @@ const getHostNameOrIP = require("./getHostNameOrIP");
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
warn: { level: 1, name: "warn" },
@@ -38,7 +37,7 @@ const region = InstanceRegion();
const estimateLogSize = (logEntry) => {
let estimatedSize = 0;
for (const key in logEntry) {
if (logEntry.hasOwnProperty(key)) {
if (Object.prototype.hasOwnProperty.call(logEntry, key)) {
const value = logEntry[key];
if (value === undefined || value === null) {
estimatedSize += key.length; // Only count the key length if value is undefined or null
@@ -102,11 +101,13 @@ const createLogger = () => {
const labelColor = "\x1b[33m"; // Yellow
const separatorColor = "\x1b[35m|\x1b[0m"; // Magenta for separators
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${meta
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${
user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${
meta
? `\n${separatorColor} ${labelColor}meta:\x1b[0m ${JSON.stringify(meta, null, 2)} ${separatorColor}`
: ""
}`;
}`;
})
)
})
@@ -195,9 +196,19 @@ const createLogger = () => {
winstonLogger.log(logEntry);
};
const LogIntegrationCall = async ({ platform, method, name, jobid, paymentid, billid, status, bodyshopid, email }) => {
const LogIntegrationCall = async ({
platform,
method,
name,
jobid,
paymentid,
billid,
status,
bodyshopid,
email
}) => {
try {
//Insert the record.
//Insert the record.
await client.request(queries.INSERT_INTEGRATION_LOG, {
log: {
platform,
@@ -211,7 +222,6 @@ const createLogger = () => {
email
}
});
} catch (error) {
console.trace("Stack", error?.stack);
log("integration-log-error", "ERROR", email, null, {

View File

@@ -182,7 +182,6 @@ const redisSocketEvents = ({
const registerSyncEvents = (socket) => {
socket.on("sync-notification-read", async ({ email, bodyshopId, notificationId }) => {
try {
const userEmail = socket.user.email;
const socketMapping = await getUserSocketMappingByBodyshop(email, bodyshopId);
const timestamp = new Date().toISOString();