Merge branch 'master-AIO' into feature/IO-3020-IO-3036-imex-lite-rome-lite

This commit is contained in:
Patrick Fic
2024-12-04 12:58:23 -08:00
81 changed files with 4206 additions and 3939 deletions

View File

@@ -547,6 +547,61 @@ exports.default = function ({ bodyshop, jobs_by_pk, qbo = false, items, taxCodes
}
}
if (jobs_by_pk.job_totals.totals?.ttl_adjustment) {
// Do not need to check for ImEX or Rome because ImEX uses a different totals calculation that will never set this field.
if (qbo) {
const taxAccountCode = findTaxCode(
{
local: false,
federal: InstanceManager({ imex: true, rome: false }),
state: jobs_by_pk.tax_lbr_rt === 0 ? false : true
},
bodyshop.md_responsibility_centers.sales_tax_codes
);
const QboTaxId = InstanceManager({
imex: taxCodes[taxAccountCode],
rome: CheckQBOUSATaxID({
// jobline: jobline,
type: "adjustment",
job: jobs_by_pk
})
});
InvoiceLineAdd.push({
DetailType: "SalesItemLineDetail",
Amount: Dinero(jobs_by_pk.job_totals.totals?.ttl_adjustment).toFormat(DineroQbFormat),
SalesItemLineDetail: {
...(jobs_by_pk.class ? { ClassRef: { value: classes[jobs_by_pk.class] } } : {}),
ItemRef: {
value: items[responsibilityCenters.ttl_adjustment?.accountitem]
},
TaxCodeRef: {
value: QboTaxId
},
Qty: 1
}
});
} else {
InvoiceLineAdd.push({
ItemRef: {
FullName: responsibilityCenters.ttl_adjustment?.accountitem
},
Desc: "Adjustment",
Quantity: 1,
Amount: Dinero(jobs_by_pk.job_totals.totals?.ttl_adjustment).toFormat(DineroQbFormat),
SalesTaxCodeRef: InstanceManager({
imex: {
FullName: "E"
},
rome: {
FullName: bodyshop.md_responsibility_centers.taxes.itemexemptcode || "NON"
}
})
});
}
}
//Add tax lines
const job_totals = jobs_by_pk.job_totals;
const federal_tax = Dinero(job_totals.totals.federal_tax);
@@ -823,7 +878,60 @@ exports.default = function ({ bodyshop, jobs_by_pk, qbo = false, items, taxCodes
}
}
}
if (jobs_by_pk.job_totals.totals.ttl_tax_adjustment) {
// Do not need to check for ImEX or Rome because ImEX uses a different totals calculation that will never set this field.
if (qbo) {
const taxAccountCode = findTaxCode(
{
local: false,
federal: InstanceManager({ imex: true, rome: false }),
state: jobs_by_pk.tax_lbr_rt === 0 ? false : true
},
bodyshop.md_responsibility_centers.sales_tax_codes
);
const QboTaxId = InstanceManager({
imex: taxCodes[taxAccountCode],
rome: CheckQBOUSATaxID({
// jobline: jobline,
type: "adjustment",
job: jobs_by_pk
})
});
InvoiceLineAdd.push({
DetailType: "SalesItemLineDetail",
Amount: Dinero(jobs_by_pk.job_totals.totals?.ttl_tax_adjustment).toFormat(DineroQbFormat),
SalesItemLineDetail: {
...(jobs_by_pk.class ? { ClassRef: { value: classes[jobs_by_pk.class] } } : {}),
ItemRef: {
value: items[responsibilityCenters.ttl_tax_adjustment?.accountitem]
},
TaxCodeRef: {
value: QboTaxId
},
Qty: 1
}
});
} else {
InvoiceLineAdd.push({
ItemRef: {
FullName: responsibilityCenters.ttl_tax_adjustment?.accountitem
},
Desc: "Tax Adjustment",
Quantity: 1,
Amount: Dinero(jobs_by_pk.job_totals.totals?.ttl_tax_adjustment).toFormat(DineroQbFormat),
SalesTaxCodeRef: InstanceManager({
imex: {
FullName: "E"
},
rome: {
FullName: bodyshop.md_responsibility_centers.taxes.itemexemptcode || "NON"
}
})
});
}
}
if (!qbo && InvoiceLineAdd.length === 0) {
//Handle the scenario where there is a $0 sale invoice.
InvoiceLineAdd.push({

View File

@@ -351,6 +351,7 @@ function calculateAllocations(connectionData, job) {
// console.log("NO MASH ACCOUNT FOUND!!");
}
}
if (InstanceManager({ rome: true })) {
//profile level adjustments for parts
Object.keys(job.job_totals.parts.adjustments).forEach((key) => {
@@ -426,6 +427,41 @@ function calculateAllocations(connectionData, job) {
} else {
return { ...taxAllocations[key], tax: key };
}
})
}),
...(job.job_totals.totals.ttl_adjustment
? [
{
center: "SUB ADJ",
sale: Dinero(job.job_totals.totals.ttl_adjustment),
cost: Dinero(),
profitCenter: {
name: "SUB ADJ",
accountdesc: "SUB ADJ",
accountitem: "SUB ADJ",
accountname: "SUB ADJ",
dms_acctnumber: bodyshop.md_responsibility_centers.ttl_adjustment.dms_acctnumber
},
costCenter: {}
}
]
: []),
...(job.job_totals.totals.ttl_tax_adjustment
? [
{
center: "TAX ADJ",
sale: Dinero(job.job_totals.totals.ttl_tax_adjustment),
cost: Dinero(),
profitCenter: {
name: "TAX ADJ",
accountdesc: "TAX ADJ",
accountitem: "TAX ADJ",
accountname: "TAX ADJ",
dms_acctnumber: bodyshop.md_responsibility_centers.ttl_tax_adjustment.dms_acctnumber
},
costCenter: {}
}
]
: [])
];
}

View File

@@ -25,15 +25,15 @@ const ftpSetup = {
port: process.env.AUTOHOUSE_PORT,
username: process.env.AUTOHOUSE_USER,
password: process.env.AUTOHOUSE_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
@@ -55,12 +55,13 @@ exports.default = async (req, res) => {
try {
logger.log("autohouse-start", "DEBUG", "api", null, null);
const allXMLResults = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("autohouse-shopsToProcess-generated", "DEBUG", "api", null, null);
@@ -69,27 +70,18 @@ exports.default = async (req, res) => {
logger.log("autohouse-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await processShopData(shopsToProcess, start, end, skipUpload, allXMLResults, allErrors);
await sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
allXMLResults.map((x) => ({
imexshopid: x.imexshopid,
filename: x.filename,
count: x.count,
result: x.result
})),
null,
2
)}`
@@ -101,8 +93,8 @@ exports.default = async (req, res) => {
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
async function processShopData(shopsToProcess, start, end, skipUpload, allXMLResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const erroredJobs = [];
try {
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
@@ -132,12 +124,27 @@ async function processBatch(batch, start, end) {
});
}
const ret = builder.create({}, autoHouseObject).end({ allowEmptyTags: true });
const xmlObj = {
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
xml: builder.create({}, autoHouseObject).end({ allowEmptyTags: true }),
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`,
count: autoHouseObject.AutoHouseExport.RepairOrder.length
};
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
if (skipUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
} else {
await uploadViaSFTP(xmlObj);
}
allXMLResults.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
count: xmlObj.count,
filename: xmlObj.filename,
result: xmlObj.result
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
@@ -169,33 +176,35 @@ async function processBatch(batch, start, end) {
}
}
async function uploadViaSFTP(allxmlsToUpload) {
async function uploadViaSFTP(xmlObj) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
logger.log("autohouse-sftp-connection-error", "ERROR", "api", xmlObj.bodyshopid, {
error: errors.message,
stack: errors.stack
})
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("autohouse-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("autohouse-sftp-upload", "DEBUG", "api", xmlObj.bodyshopid, {
imexshopid: xmlObj.imexshopid,
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("autohouse-sftp-upload-error", "ERROR", "api", xmlObj.bodyshopid, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
logger.log("autohouse-sftp-error", "ERROR", "api", xmlObj.bodyshopid, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();

View File

@@ -17,15 +17,15 @@ const ftpSetup = {
port: process.env.CHATTER_PORT,
username: process.env.CHATTER_USER,
privateKey: null,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allcsvsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
@@ -47,12 +47,13 @@ exports.default = async (req, res) => {
try {
logger.log("chatter-start", "DEBUG", "api", null, null);
const allChatterObjects = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("chatter-shopsToProcess-generated", "DEBUG", "api", null, null);
@@ -62,29 +63,24 @@ exports.default = async (req, res) => {
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
await fs.promises.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
}
} else {
await uploadViaSFTP(allcsvsToUpload);
}
})();
batchPromises.push(batchPromise);
await processBatch(shopsToProcess, start, end, allChatterObjects, allErrors);
const csvToUpload = {
count: allChatterObjects.length,
csv: converter.json2csv(allChatterObjects, { emptyFieldValue: "" }),
filename: `IMEX_ONLINE_solicitation_${moment().format("YYYYMMDD")}.csv`
};
if (skipUpload) {
await fs.promises.writeFile(`./logs/${csvToUpload.filename}`, csvToUpload.csv);
} else {
await uploadViaSFTP(csvToUpload);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\n
Uploaded:\n${JSON.stringify({ filename: csvToUpload.filename, count: csvToUpload.count, result: csvToUpload.result }, null, 2)}`
});
logger.log("chatter-end", "DEBUG", "api", null, null);
@@ -93,8 +89,8 @@ exports.default = async (req, res) => {
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
async function processBatch(shopsToProcess, start, end, allChatterObjects, allErrors) {
for (const bodyshop of shopsToProcess) {
try {
logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
@@ -113,18 +109,11 @@ async function processBatch(batch, start, end) {
lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
transaction_id: j.ro_number,
email: j.ownr_ea,
phone_number: j.ownr_ph1
phone_number: j.ownr_ph1,
transaction_time: (j.actual_delivery && moment(j.actual_delivery).tz(bodyshop.timezone).format("YYYYMMDD-HHmm")) || ""
};
});
const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" });
allcsvsToUpload.push({
count: chatterObject.length,
csv: ret,
filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv`
});
allChatterObjects.push(...chatterObject);
logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
@@ -166,7 +155,7 @@ async function getPrivateKey() {
}
}
async function uploadViaSFTP(allcsvsToUpload) {
async function uploadViaSFTP(csvToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("chatter-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
@@ -178,21 +167,19 @@ async function uploadViaSFTP(allcsvsToUpload) {
//Connect to the FTP and upload all.
await sftp.connect({ ...ftpSetup, privateKey });
for (const csvObj of allcsvsToUpload) {
try {
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("chatter-sftp-upload", "DEBUG", "api", null, {
filename: csvObj.filename,
result: csvObj.result
});
} catch (error) {
logger.log("chatter-sftp-upload-error", "ERROR", "api", null, {
filename: csvObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
try {
csvToUpload.result = await sftp.put(Buffer.from(csvToUpload.csv), `${csvToUpload.filename}`);
logger.log("chatter-sftp-upload", "DEBUG", "api", null, {
filename: csvToUpload.filename,
result: csvToUpload.result
});
} catch (error) {
logger.log("chatter-sftp-upload-error", "ERROR", "api", null, {
filename: csvToUpload.filename,
error: csvToUpload.message,
stack: csvToUpload.stack
});
throw error;
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });

View File

@@ -24,15 +24,15 @@ const ftpSetup = {
port: process.env.CLAIMSCORP_PORT,
username: process.env.CLAIMSCORP_USER,
password: process.env.CLAIMSCORP_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
@@ -54,12 +54,13 @@ exports.default = async (req, res) => {
try {
logger.log("claimscorp-start", "DEBUG", "api", null, null);
const allXMLResults = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_CLAIMSCORP_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("claimscorp-shopsToProcess-generated", "DEBUG", "api", null, null);
@@ -68,27 +69,18 @@ exports.default = async (req, res) => {
logger.log("claimscorp-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await processShopData(shopsToProcess, start, end, skipUpload, allXMLResults, allErrors);
await sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
allXMLResults.map((x) => ({
imexshopid: x.imexshopid,
filename: x.filename,
count: x.count,
result: x.result
})),
null,
2
)}`
@@ -100,8 +92,8 @@ exports.default = async (req, res) => {
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
async function processShopData(shopsToProcess, start, end, skipUpload, allXMLResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const erroredJobs = [];
try {
logger.log("claimscorp-start-shop-extract", "DEBUG", "api", bodyshop.id, {
@@ -135,12 +127,27 @@ async function processBatch(batch, start, end) {
});
}
const ret = builder.create({}, claimsCorpObject).end({ allowEmptyTags: true });
const xmlObj = {
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
xml: builder.create({}, claimsCorpObject).end({ allowEmptyTags: true }),
filename: `${bodyshop.claimscorpid}-${moment().format("YYYYMMDDTHHMMss")}.xml`,
count: claimsCorpObject.DataFeed.ShopInfo.RO.length
};
allxmlsToUpload.push({
count: claimsCorpObject.DataFeed.ShopInfo.RO.length,
xml: ret,
filename: `${bodyshop.claimscorpid}-${moment().format("YYYYMMDDTHHMMss")}.xml`
if (skipUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
} else {
await uploadViaSFTP(xmlObj);
}
allXMLResults.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
count: xmlObj.count,
filename: xmlObj.filename,
result: xmlObj.result
});
logger.log("claimscorp-end-shop-extract", "DEBUG", "api", bodyshop.id, {
@@ -172,33 +179,38 @@ async function processBatch(batch, start, end) {
}
}
async function uploadViaSFTP(allxmlsToUpload) {
async function uploadViaSFTP(xmlObj) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("claimscorp-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
logger.log("claimscorp-sftp-connection-error", "ERROR", "api", xmlObj.bodyshopid, {
error: errors.message,
stack: errors.stack
})
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("claimscorp-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("claimscorp-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("claimscorp-sftp-upload", "DEBUG", "api", xmlObj.bodyshopid, {
imexshopid: xmlObj.imexshopid,
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("claimscorp-sftp-upload-error", "ERROR", "api", xmlObj.bodyshopid, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
} catch (error) {
logger.log("claimscorp-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
logger.log("claimscorp-sftp-error", "ERROR", "api", xmlObj.bodyshopid, {
error: error.message,
stack: error.stack
});
throw error;
} finally {
sftp.end();

View File

@@ -23,15 +23,15 @@ const ftpSetup = {
port: process.env.KAIZEN_PORT,
username: process.env.KAIZEN_USER,
password: process.env.KAIZEN_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
@@ -53,12 +53,13 @@ exports.default = async (req, res) => {
try {
logger.log("kaizen-start", "DEBUG", "api", null, null);
const allXMLResults = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_KAIZEN_SHOPS, { imexshopid: kaizenShopsIDs }); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("kaizen-shopsToProcess-generated", "DEBUG", "api", null, null);
@@ -67,27 +68,18 @@ exports.default = async (req, res) => {
logger.log("kaizen-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await processShopData(shopsToProcess, start, end, skipUpload, allXMLResults, allErrors);
await sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
allXMLResults.map((x) => ({
imexshopid: x.imexshopid,
filename: x.filename,
count: x.count,
result: x.result
})),
null,
2
)}`
@@ -99,8 +91,8 @@ exports.default = async (req, res) => {
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
async function processShopData(shopsToProcess, start, end, skipUpload, allXMLResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const erroredJobs = [];
try {
logger.log("kaizen-start-shop-extract", "DEBUG", "api", bodyshop.id, {
@@ -133,12 +125,26 @@ async function processBatch(batch, start, end) {
});
}
const ret = builder.create({}, kaizenObject).end({ allowEmptyTags: true });
const xmlObj = {
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
xml: builder.create({}, kaizenObject).end({ allowEmptyTags: true }),
filename: `${bodyshop.shopname}-${moment().format("YYYYMMDDTHHMMss")}.xml`,
count: kaizenObject.DataFeed.ShopInfo.Jobs.length
};
allxmlsToUpload.push({
count: kaizenObject.DataFeed.ShopInfo.Jobs.length,
xml: ret,
filename: `${bodyshop.shopname}-${moment().format("YYYYMMDDTHHMMss")}.xml`
if (skipUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
} else {
await uploadViaSFTP(xmlObj);
}
allXMLResults.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
count: xmlObj.count,
filename: xmlObj.filename,
result: xmlObj.result
});
logger.log("kaizen-end-shop-extract", "DEBUG", "api", bodyshop.id, {
@@ -170,33 +176,35 @@ async function processBatch(batch, start, end) {
}
}
async function uploadViaSFTP(allxmlsToUpload) {
async function uploadViaSFTP(xmlObj) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("kaizen-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
logger.log("kaizen-sftp-connection-error", "ERROR", "api", xmlObj.bodyshopid, {
error: errors.message,
stack: errors.stack
})
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("kaizen-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("kaizen-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("kaizen-sftp-upload", "DEBUG", "api", xmlObj.bodyshopid, {
imexshopid: xmlObj.imexshopid,
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("kaizen-sftp-upload-error", "ERROR", "api", xmlObj.bodyshopid, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
} catch (error) {
logger.log("kaizen-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
logger.log("kaizen-sftp-error", "ERROR", "api", xmlObj.bodyshopid, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();

View File

@@ -253,7 +253,7 @@ const sendNotification = async (req, res) => {
admin
.messaging()
.send({
topic: "PRD_PATRICK-messaging",
topic: req.body.topic,
notification: {
title: `ImEX Online Message - `,
body: "Test Noti."

View File

@@ -4,6 +4,7 @@ query FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID($mssid: String!, $phone: String!) {
id
conversations(where: { phone_num: { _eq: $phone } }) {
id
created_at
}
}
}`;
@@ -87,6 +88,21 @@ mutation RECEIVE_MESSAGE($msg: [messages_insert_input!]!) {
updated_at
unreadcnt
phone_num
label
job_conversations {
job {
id
ro_number
ownr_fn
ownr_ln
ownr_co_nm
}
}
messages_aggregate (where: { read: { _eq: false }, isoutbound: { _eq: false } }){
aggregate {
count
}
}
}
conversationid
created_at
@@ -116,6 +132,7 @@ mutation INSERT_MESSAGE($msg: [messages_insert_input!]!, $conversationid: uuid!)
id
archived
bodyshop {
id
imexshopid
}
created_at
@@ -144,6 +161,11 @@ mutation UPDATE_MESSAGE($msid: String!, $fields: messages_set_input!) {
update_messages(where: { msid: { _eq: $msid } }, _set: $fields) {
returning {
id
status
conversationid
conversation{
bodyshopid
}
}
}
}`;
@@ -840,6 +862,7 @@ exports.CHATTER_QUERY = `query CHATTER_EXPORT($start: timestamptz, $bodyshopid:
timezone
}
jobs(where: {_and: [{converted: {_eq: true}}, {actual_delivery: {_gt: $start}}, {actual_delivery: {_lte: $end}}, {shopid: {_eq: $bodyshopid}}, {_or: [{ownr_ph1: {_is_null: false}}, {ownr_ea: {_is_null: false}}]}]}) {
actual_delivery
id
created_at
ro_number
@@ -1365,6 +1388,7 @@ exports.GET_JOB_BY_PK = `query GET_JOB_BY_PK($id: uuid!) {
cieca_pfl
cieca_pft
cieca_pfo
cieca_ttl
vehicle {
id
notes
@@ -1494,7 +1518,8 @@ exports.GET_JOB_BY_PK = `query GET_JOB_BY_PK($id: uuid!) {
}`;
//TODO:AIO The above query used to have parts order lines in it. Validate that this doesn't need it.
exports.QUERY_JOB_COSTING_DETAILS = ` query QUERY_JOB_COSTING_DETAILS($id: uuid!) {
exports.QUERY_JOB_COSTING_DETAILS = `
query QUERY_JOB_COSTING_DETAILS($id: uuid!) {
jobs_by_pk(id: $id) {
ro_number
clm_total
@@ -2544,3 +2569,44 @@ exports.GET_JOBS_BY_PKS = `query GET_JOBS_BY_PKS($ids: [uuid!]!) {
}
}
`;
exports.MARK_MESSAGES_AS_READ = `mutation MARK_MESSAGES_AS_READ($conversationId: uuid!) {
update_messages(where: { conversationid: { _eq: $conversationId } }, _set: { read: true }) {
returning {
id
}
affected_rows
}
}
`;
exports.CREATE_CONVERSATION = `mutation CREATE_CONVERSATION($conversation: [conversations_insert_input!]!) {
insert_conversations(objects: $conversation) {
returning {
id
phone_num
archived
label
unreadcnt
job_conversations {
jobid
conversationid
job {
id
ro_number
ownr_fn
ownr_ln
ownr_co_nm
}
}
messages_aggregate(where: { read: { _eq: false }, isoutbound: { _eq: false } }) {
aggregate {
count
}
}
created_at
updated_at
}
}
}
`;

View File

@@ -149,6 +149,58 @@ exports.generate_payment_url = async (req, res) => {
}
};
//Reference: https://intellipay.com/dist/webapi26.html#operation/fee
exports.checkfee = async (req, res) => {
// Requires amount, bodyshop.imexshopid, and state? to get data.
logger.log("intellipay-fee-check", "DEBUG", req.user?.email, null, null);
//If there's no amount, there can't be a fee. Skip the call.
if (!req.body.amount || req.body.amount <= 0) {
res.json({ fee: 0 });
return;
}
const shopCredentials = await getShopCredentials(req.body.bodyshop);
try {
const options = {
method: "POST",
headers: { "content-type": "application/x-www-form-urlencoded" },
//TODO: Move these to environment variables/database.
data: qs.stringify(
{
method: "fee",
...shopCredentials,
amount: req.body.amount,
paymenttype: `CC`,
cardnum: "4111111111111111", //Not needed per documentation, but incorrect values come back without it.
state:
req.body.bodyshop?.state && req.body.bodyshop.state?.length === 2
? req.body.bodyshop.state.toUpperCase()
: "ZZ" //Same as above
},
{ sort: false } //ColdFusion Query Strings depend on order. This preserves it.
),
url: `https://${domain}.cpteller.com/api/26/webapi.cfc`
};
const response = await axios(options);
if (response.data?.error) {
res.status(400).json({ error: response.data.error });
} else if (response.data < 0) {
res.json({ error: "Fee amount negative. Check API credentials & account configuration." });
} else {
res.json({ fee: response.data });
}
} catch (error) {
//console.log(error);
logger.log("intellipay-fee-check-error", "ERROR", req.user?.email, null, {
error: error.message
});
res.status(400).json({ error });
}
};
exports.postback = async (req, res) => {
try {
logger.log("intellipay-postback", "DEBUG", req.user?.email, null, req.body);

View File

@@ -848,6 +848,41 @@ function GenerateCostingData(job) {
gppercent: formatGpPercent(0)
});
}
//Push adjustments to bottom line.
if (job.job_totals?.totals?.ttl_adjustment) {
//Add to totals.
const Adjustment = Dinero(job.job_totals.totals.ttl_adjustment); //Need to invert, since this is being assigned as a cost.
summaryData.totalAdditionalSales = summaryData.totalAdditionalSales.add(Adjustment);
summaryData.totalSales = summaryData.totalSales.add(Adjustment);
//Add to lines.
costCenterData.push({
id: "AdjEst",
cost_center: "Adjustment (Est. Match)",
sale_labor: Dinero().toFormat(),
sale_labor_dinero: Dinero(),
sale_parts: Dinero().toFormat(),
sale_parts_dinero: Dinero(),
sale_additional: Adjustment.toFormat(),
sale_additional_dinero: Adjustment,
sale_sublet: Dinero(),
sale_sublet_dinero: Dinero(),
sales: Adjustment.toFormat(),
sales_dinero: Adjustment,
cost_parts: Dinero().toFormat(),
cost_parts_dinero: Dinero(),
cost_labor: Dinero().toFormat(), //Adjustment.toFormat(),
cost_labor_dinero: Dinero(), // Adjustment,
cost_additional: Dinero(),
cost_additional_dinero: Dinero(),
cost_sublet: Dinero(),
cost_sublet_dinero: Dinero(),
costs: Dinero().toFormat(),
costs_dinero: Dinero(),
gpdollars_dinero: Dinero(),
gpdollars: Dinero().toFormat(),
gppercent: formatGpPercent(0)
});
}
//Final summary data massaging.

View File

@@ -49,7 +49,7 @@ exports.totalsSsu = async function (req, res) {
} catch (error) {
logger.log("job-totals-ssu-USA-error", "ERROR", req?.user?.email, id, {
jobid: id,
error
error: error.message
});
res.status(503).send();
}
@@ -68,6 +68,54 @@ async function TotalsServerSide(req, res) {
ret.additional = CalculateAdditional(job);
ret.totals = CalculateTaxesTotals(job, ret);
// Sub total scrubbbing.
const emsTotal =
job.cieca_ttl.data.n_ttl_amt === job.cieca_ttl.data.g_ttl_amt //It looks like sometimes, gross and net are the same, but they shouldn't be.
? job.cieca_ttl.data.n_ttl_amt - job.cieca_ttl.data.g_tax
: job.cieca_ttl.data.g_ttl_amt - job.cieca_ttl.data.g_tax; //If they are, adjust the gross total down by the tax amount.
const ttlDifference =
emsTotal -
ret.totals.subtotal
.add(
Dinero({
amount: Math.round((job.adjustment_bottom_line || 0) * 100)
}).multiply(-1) //Add back in the adjustment to the subtotal. We don't want to scrub it twice.
)
.getAmount() /
100;
if (Math.abs(ttlDifference) > 0.0) {
//If difference is greater than a pennny, we need to adjust it.
ret.totals.ttl_adjustment = Dinero({ amount: Math.round(ttlDifference * 100) });
ret.totals.subtotal = ret.totals.subtotal.add(ret.totals.ttl_adjustment);
ret.totals.total_repairs = ret.totals.total_repairs.add(ret.totals.ttl_adjustment);
ret.totals.net_repairs = ret.totals.net_repairs.add(ret.totals.ttl_adjustment);
logger.log("job-totals-USA-ttl-adj", "DEBUG", null, job.id, {
adjAmount: ttlDifference
});
}
//Taxes Scrubbing
const emsTaxTotal = job.cieca_ttl.data.g_tax;
const totalUsTaxes =
(ret.totals.us_sales_tax_breakdown.ty1Tax.getAmount() +
ret.totals.us_sales_tax_breakdown.ty2Tax.getAmount() +
ret.totals.us_sales_tax_breakdown.ty3Tax.getAmount() +
ret.totals.us_sales_tax_breakdown.ty4Tax.getAmount() +
ret.totals.us_sales_tax_breakdown.ty5Tax.getAmount()) /
100;
const ttlTaxDifference = emsTaxTotal - totalUsTaxes;
if (Math.abs(ttlTaxDifference) > 0.0) {
//If difference is greater than a pennny, we need to adjust it.
ret.totals.ttl_tax_adjustment = Dinero({ amount: Math.round(ttlTaxDifference * 100) });
ret.totals.total_repairs = ret.totals.total_repairs.add(ret.totals.ttl_tax_adjustment);
ret.totals.net_repairs = ret.totals.net_repairs.add(ret.totals.ttl_tax_adjustment);
logger.log("job-totals-USA-ttl-tax-adj", "DEBUG", null, job.id, {
adjAmount: ttlTaxDifference
});
}
return ret;
} catch (error) {
logger.log("job-totals-ssu-USA-error", "ERROR", req.user?.email, job.id, {
@@ -842,17 +890,21 @@ function CalculateTaxesTotals(job, otherTotals) {
}
});
//Add towing and storage taxable amounts
const stlTowing = job.cieca_stl?.data.find((c) => c.ttl_typecd === "OTTW");
const stlStorage = job.cieca_stl?.data.find((c) => c.ttl_typecd === "OTST");
const stlTowing = job.cieca_stl?.data.find((c) => c.ttl_typecd === "OTTW" || c.ttl_type === "OTTW");
const stlStorage = job.cieca_stl?.data.find((c) => c.ttl_typecd === "OTST" || c.ttl_type === "OTST");
if (stlTowing)
taxableAmounts.TOW = Dinero({
amount: Math.round(stlTowing.t_amt * 100)
});
taxableAmounts.TOW = taxableAmounts.TOW.add(
Dinero({
amount: Math.round(stlTowing.t_amt * 100)
})
);
if (stlStorage)
taxableAmounts.TOW = Dinero({
amount: Math.round(stlStorage.t_amt * 100)
});
taxableAmounts.TOW = taxableAmounts.TOW.add(
(taxableAmounts.TOW = Dinero({
amount: Math.round(stlStorage.t_amt * 100)
}))
);
const pfp = job.parts_tax_rates;
@@ -959,7 +1011,7 @@ function CalculateTaxesTotals(job, otherTotals) {
}
}
} catch (error) {
logger.log("job-totals-USA Key with issue", "error", null, null, {
logger.log("job-totals-USA Key with issue", "error", null, job.id, {
key
});
}
@@ -989,7 +1041,7 @@ function CalculateTaxesTotals(job, otherTotals) {
for (let threshCounter = 1; threshCounter <= 5; threshCounter++) {
const thresholdAmount = parseFloat(job.cieca_pft[`ty${tyCounter}_thres${threshCounter}`]) || 0;
const thresholdTaxRate = parseFloat(job.cieca_pft[`ty${tyCounter}_rate${threshCounter}`]) || 0;
// console.log(taxTierKey, tyCounter, threshCounter, thresholdAmount, thresholdTaxRate);
let taxableAmountInThisThreshold;
if (
thresholdAmount === 9999.99 ||
@@ -1012,11 +1064,8 @@ function CalculateTaxesTotals(job, otherTotals) {
taxableAmountInThisThreshold = Dinero({
amount: Math.round(thresholdAmount * 100)
});
remainingTaxableAmounts[taxTierKey] = remainingTaxableAmounts[taxTierKey].subtract(
Dinero({
amount: Math.round(taxableAmountInThisThreshold * 100)
})
);
remainingTaxableAmounts[taxTierKey] =
remainingTaxableAmounts[taxTierKey].subtract(taxableAmountInThisThreshold);
}
}
@@ -1025,8 +1074,8 @@ function CalculateTaxesTotals(job, otherTotals) {
totalTaxByTier[taxTierKey] = totalTaxByTier[taxTierKey].add(taxAmountToAdd);
}
} catch (error) {
logger.log("job-totals-USA - PFP Calculation Error", "error", null, null, {
error
logger.log("job-totals-USA - PFP Calculation Error", "error", null, job.id, {
error: error.message
});
}
});

View File

@@ -1,11 +1,12 @@
const express = require("express");
const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { lightbox_credentials, payment_refund, generate_payment_url, postback } = require("../intellipay/intellipay");
const { lightbox_credentials, payment_refund, generate_payment_url, postback, checkfee } = require("../intellipay/intellipay");
router.post("/lightbox_credentials", validateFirebaseIdTokenMiddleware, lightbox_credentials);
router.post("/payment_refund", validateFirebaseIdTokenMiddleware, payment_refund);
router.post("/generate_payment_url", validateFirebaseIdTokenMiddleware, generate_payment_url);
router.post("/checkfee", validateFirebaseIdTokenMiddleware, checkfee);
router.post("/postback", postback);
module.exports = router;

View File

@@ -13,6 +13,7 @@ const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLCl
const { taskAssignedEmail, tasksRemindEmail } = require("../email/tasksEmails");
const { canvastest } = require("../render/canvas-handler");
const { alertCheck } = require("../alerts/alertcheck");
const uuid = require("uuid").v4;
//Test route to ensure Express is responding.
router.get("/test", eventAuthorizationMiddleware, async function (req, res) {
@@ -57,6 +58,59 @@ router.get("/test-logs", eventAuthorizationMiddleware, (req, res) => {
return res.status(500).send("Logs tested.");
});
router.get("/wstest", eventAuthorizationMiddleware, (req, res) => {
const { ioRedis } = req;
ioRedis.to(`bodyshop-broadcast-room:bfec8c8c-b7f1-49e0-be4c-524455f4e582`).emit("new-message-summary", {
isoutbound: true,
conversationId: "2b44d692-a9e4-4ed4-9c6b-7d8b0c44a0f6",
msid: "SM5d053957bc0da29399b768c23bffcc0f",
summary: true
});
// TODO: Do we need to add more content here?
ioRedis
.to(`bodyshop-conversation-room:bfec8c8c-b7f1-49e0-be4c-524455f4e582:2b44d692-a9e4-4ed4-9c6b-7d8b0c44a0f6`)
.emit("new-message-detailed", {
//
// msid: "SMbbd7703a898fef7f2c07c148ade8a6cd",
// text: "test2",
// conversationid: "2b44d692-a9e4-4ed4-9c6b-7d8b0c44a0f6",
// isoutbound: true,
// userid: "patrick@imex.dev",
// image: false,
// image_path: [],
newMessage: {
conversation: {
id: uuid(),
archived: false,
bodyshop: {
id: "bfec8c8c-b7f1-49e0-be4c-524455f4e582",
imexshopid: "APPLE"
},
created_at: "2024-11-19T19:46:38.984633+00:00",
updated_at: "2024-11-19T22:40:48.346875+00:00",
unreadcnt: 0,
phone_num: "+16138676684"
},
conversationid: "2b44d692-a9e4-4ed4-9c6b-7d8b0c44a0f6",
created_at: "2024-11-19T22:40:48.346875+00:00",
id: "68604ea9-c411-43ec-ab83-899868e58819",
image_path: [],
image: false,
isoutbound: true,
msid: "SMbbd7703a898fef7f2c07c148ade8a6cd",
read: false,
text: `This is a test ${Math.round(Math.random() * 100)}`,
updated_at: "2024-11-19T22:40:48.346875+00:00",
status: "posted",
userid: "patrick@imex.dev"
},
conversationId: "2b44d692-a9e4-4ed4-9c6b-7d8b0c44a0f6",
summary: false
}); // TODO: Do we need to add more content here?
return res.status(500).send("Logs tested.");
});
// Search
router.post("/search", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, os.search);

View File

@@ -1,11 +1,12 @@
const express = require("express");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { subscribe, unsubscribe } = require("../firebase/firebase-handler");
const { subscribe, unsubscribe, sendNotification } = require("../firebase/firebase-handler");
const router = express.Router();
router.use(validateFirebaseIdTokenMiddleware);
router.post("/subscribe", subscribe);
router.post("/unsubscribe", unsubscribe);
router.post("/sendtestnotification", sendNotification);
module.exports = router;

View File

@@ -11,186 +11,161 @@ const logger = require("../utils/logger");
const InstanceManager = require("../utils/instanceMgr").default;
exports.receive = async (req, res) => {
//Perform request validation
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
logger.log("sms-inbound", "DEBUG", "api", null, {
const loggerData = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body)
});
};
if (!!!req.body || !!!req.body.MessagingServiceSid || !!!req.body.SmsMessageSid) {
logger.log("sms-inbound", "DEBUG", "api", null, loggerData);
if (!req.body || !req.body.MessagingServiceSid || !req.body.SmsMessageSid) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
...loggerData,
type: "malformed-request"
});
return res.status(400).json({ success: false, error: "Malformed Request" });
}
try {
// Step 1: Find the bodyshop and existing conversation
const response = await client.request(queries.FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
mssid: req.body.MessagingServiceSid,
phone: phone(req.body.From).phoneNumber
});
if (!response.bodyshops[0]) {
return res.status(400).json({ success: false, error: "No matching bodyshop" });
}
const bodyshop = response.bodyshops[0];
// Sort conversations by `updated_at` (or `created_at`) and pick the last one
const sortedConversations = bodyshop.conversations.sort((a, b) => new Date(a.created_at) - new Date(b.created_at));
const existingConversation = sortedConversations.length
? sortedConversations[sortedConversations.length - 1]
: null;
let conversationid;
let newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
type: "malformed-request"
});
res.status(400);
res.json({ success: false, error: "Malformed Request" });
} else {
try {
const response = await client.request(queries.FIND_BODYSHOP_BY_MESSAGING_SERVICE_SID, {
mssid: req.body.MessagingServiceSid,
phone: phone(req.body.From).phoneNumber
});
isoutbound: false,
userid: null // Add additional fields as necessary
};
let newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body)
};
if (response.bodyshops[0]) {
//Found a bodyshop - should always happen.
if (response.bodyshops[0].conversations.length === 0) {
//No conversation Found, create one.
//console.log("[SMS Receive] No conversation found. Creating one.");
newMessage.conversation = {
data: {
bodyshopid: response.bodyshops[0].id,
phone_num: phone(req.body.From).phoneNumber
}
};
} else if (response.bodyshops[0].conversations.length === 1) {
//Just add it to the conversation
//console.log("[SMS Receive] Conversation found. Added ID.");
newMessage.conversationid = response.bodyshops[0].conversations[0].id;
} else {
//We should never get here.
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
type: "duplicate-phone"
});
}
try {
let insertresp;
if (response.bodyshops[0].conversations[0]) {
insertresp = await client.request(queries.INSERT_MESSAGE, {
msg: newMessage,
conversationid: response.bodyshops[0].conversations[0] && response.bodyshops[0].conversations[0].id
});
} else {
insertresp = await client.request(queries.RECEIVE_MESSAGE, {
msg: newMessage
});
}
const message = insertresp.insert_messages.returning[0];
const data = {
type: "messaging-inbound",
conversationid: message.conversationid || "",
text: message.text || "",
messageid: message.id || "",
phone_num: message.conversation.phone_num || ""
};
if (existingConversation) {
// Use the existing conversation
conversationid = existingConversation.id;
const fcmresp = await admin.messaging().send({
topic: `${message.conversation.bodyshop.imexshopid}-messaging`,
notification: {
title: InstanceManager({
imex: `ImEX Online Message - ${data.phone_num}`,
rome: `Rome Online Message - ${data.phone_num}`
}),
body: message.image_path ? `Image ${message.text}` : message.text
//imageUrl: "https://thinkimex.com/img/io-fcm.png", //TODO:AIO Resolve addresses for other instances
},
data
});
logger.log("sms-inbound-success", "DEBUG", "api", null, {
newMessage,
fcmresp
});
res.status(200).send("");
} catch (e2) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
error: e2
});
res.sendStatus(500).json(e2);
}
// Unarchive the conversation if necessary
if (existingConversation.archived) {
await client.request(queries.UNARCHIVE_CONVERSATION, {
id: conversationid,
archived: false
});
}
} catch (e1) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
error: e1
} else {
// Create a new conversation
const newConversationResponse = await client.request(queries.CREATE_CONVERSATION, {
conversation: {
bodyshopid: bodyshop.id,
phone_num: phone(req.body.From).phoneNumber,
archived: false
}
});
res.sendStatus(500).json(e1);
const createdConversation = newConversationResponse.insert_conversations.returning[0];
conversationid = createdConversation.id;
}
// Ensure `conversationid` is added to the message
newMessage.conversationid = conversationid;
// Step 3: Insert the message into the conversation
const insertresp = await client.request(queries.INSERT_MESSAGE, {
msg: newMessage,
conversationid: conversationid
});
const message = insertresp?.insert_messages?.returning?.[0];
const conversation = message?.conversation || null;
if (!conversation) {
throw new Error("Conversation data is missing from the response.");
}
// Step 4: Notify clients through Redis
const broadcastRoom = getBodyshopRoom(conversation.bodyshop.id);
const conversationRoom = getBodyshopConversationRoom({
bodyshopId: conversation.bodyshop.id,
conversationId: conversation.id
});
const commonPayload = {
isoutbound: false,
conversationId: conversation.id,
updated_at: message.updated_at,
msid: message.sid
};
ioRedis.to(broadcastRoom).emit("new-message-summary", {
...commonPayload,
existingConversation: !!existingConversation,
newConversation: !existingConversation ? conversation : null,
summary: true
});
ioRedis.to(conversationRoom).emit("new-message-detailed", {
newMessage: message,
...commonPayload,
newConversation: !existingConversation ? conversation : null,
existingConversation: !!existingConversation,
summary: false
});
// Step 5: Send FCM notification
const fcmresp = await admin.messaging().send({
topic: `${message.conversation.bodyshop.imexshopid}-messaging`,
notification: {
title: InstanceManager({
imex: `ImEX Online Message - ${message.conversation.phone_num}`,
rome: `Rome Online Message - ${message.conversation.phone_num}`,
}),
body: message.image_path ? `Image ${message.text}` : message.text
},
data: {
type: "messaging-inbound",
conversationid: message.conversationid || "",
text: message.text || "",
messageid: message.id || "",
phone_num: message.conversation.phone_num || ""
}
});
logger.log("sms-inbound-success", "DEBUG", "api", null, {
newMessage,
fcmresp
});
res.status(200).send("");
} catch (e) {
handleError(req, e, res, "RECEIVE_MESSAGE");
}
};
// const sampleMessage: {
// "ToCountry": "CA",
// "ToState": "BC",
// "SmsMessageSid": "SMad7bddaf3454c0904999d6018b1e8f49",
// "NumMedia": "0",
// "ToCity": "Vancouver",
// "FromZip": "",
// "SmsSid": "SMad7bddaf3454c0904999d6018b1e8f49",
// "FromState": "BC",
// "SmsStatus": "received",
// "FromCity": "VANCOUVER",
// "Body": "Hi",
// "FromCountry": "CA",
// "To": "+16043301606",
// "MessagingServiceSid": "MG6e259e2add04ffa0d0aa355038670ee1",
// "ToZip": "",
// "NumSegments": "1",
// "MessageSid": "SMad7bddaf3454c0904999d6018b1e8f49",
// "AccountSid": "AC6c09d337d6b9c68ab6488c2052bd457c",
// "From": "+16049992002",
// "ApiVersion": "2010-04-01"
// }
// ] req.body {
// [0] ToCountry: 'CA',
// [0] MediaContentType0: 'image/jpeg',
// [0] ToState: 'BC',
// [0] SmsMessageSid: 'MM14fa2851ba26e0dc2b62073f8e7cdf27',
// [0] NumMedia: '1',
// [0] ToCity: 'Vancouver',
// [0] FromZip: '',
// [0] SmsSid: 'MM14fa2851ba26e0dc2b62073f8e7cdf27',
// [0] FromState: 'BC',
// [0] SmsStatus: 'received',
// [0] FromCity: 'VANCOUVER',
// [0] Body: '',
// [0] FromCountry: 'CA',
// [0] To: '+16043301606',
// [0] MessagingServiceSid: 'MG6e259e2add04ffa0d0aa355038670ee1',
// [0] ToZip: '',
// [0] NumSegments: '1',
// [0] MessageSid: 'MM14fa2851ba26e0dc2b62073f8e7cdf27',
// [0] AccountSid: 'AC6c09d337d6b9c68ab6488c2052bd457c',
// [0] From: '+16049992002',
// [0] MediaUrl0: 'https://api.twilio.com/2010-04-01/Accounts/AC6c09d337d6b9c68ab6488c2052bd457c/Messages/MM14fa2851ba26e0dc2b62073f8e7cdf27/Media/MEf129dd37979852f395eb29ffb126e19e',
// [0] ApiVersion: '2010-04-01'
// [0] }
// [0] MediaContentType0: 'image/jpeg',
// MediaContentType0: 'video/3gpp',
const generateMediaArray = (body) => {
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
//stuff
const ret = [];
for (var i = 0; i < parseInt(NumMedia); i++) {
for (let i = 0; i < parseInt(NumMedia); i++) {
ret.push(body[`MediaUrl${i}`]);
}
return ret;
@@ -198,3 +173,17 @@ const generateMediaArray = (body) => {
return null;
}
};
const handleError = (req, error, res, context) => {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
context,
error
});
res.status(500).json({ error: error.message || "Internal Server Error" });
};

View File

@@ -8,15 +8,17 @@ const { phone } = require("phone");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const client = twilio(process.env.TWILIO_AUTH_TOKEN, process.env.TWILIO_AUTH_KEY);
const { admin } = require("../firebase/firebase-handler");
const gqlClient = require("../graphql-client/graphql-client").client;
exports.send = (req, res) => {
exports.send = async (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
logger.log("sms-outbound", "DEBUG", req.user.email, null, {
messagingServiceSid: messagingServiceSid,
messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src),
text: body,
@@ -27,66 +29,10 @@ exports.send = (req, res) => {
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
if (!!to && !!messagingServiceSid && (!!body || !!selectedMedia.length > 0) && !!conversationid) {
client.messages
.create({
body: body,
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src)
})
.then((message) => {
let newMessage = {
msid: message.sid,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
};
gqlClient
.request(queries.INSERT_MESSAGE, { msg: newMessage, conversationid })
.then((r2) => {
//console.log("Responding GQL Message ID", JSON.stringify(r2));
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
conversationid
});
const data = {
type: "messaging-outbound",
conversationid: newMessage.conversationid || ""
};
admin.messaging().send({
topic: `${imexshopid}-messaging`,
data
});
res.sendStatus(200);
})
.catch((e2) => {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
msid: message.sid,
conversationid,
error: e2
});
//res.json({ success: false, message: e2 });
});
})
.catch((e1) => {
//res.json({ success: false, message: error });
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
conversationid,
error: e1
});
});
} else {
if (!to || !messagingServiceSid || (!body && selectedMedia.length === 0) || !conversationid) {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
type: "missing-parameters",
messagingServiceSid: messagingServiceSid,
messagingServiceSid,
to: phone(to).phoneNumber,
text: body,
conversationid,
@@ -96,5 +42,72 @@ exports.send = (req, res) => {
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
});
res.status(400).json({ success: false, message: "Missing required parameter(s)." });
return;
}
try {
const message = await client.messages.create({
body,
messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src)
});
const newMessage = {
msid: message.sid,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path: req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : []
};
try {
const gqlResponse = await gqlClient.request(queries.INSERT_MESSAGE, { msg: newMessage, conversationid });
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
conversationid
});
const insertedMessage = gqlResponse.insert_messages.returning[0];
const broadcastRoom = getBodyshopRoom(insertedMessage.conversation.bodyshop.id);
const conversationRoom = getBodyshopConversationRoom({
bodyshopId: insertedMessage.conversation.bodyshop.id,
conversationId: insertedMessage.conversation.id
});
ioRedis.to(broadcastRoom).emit("new-message-summary", {
isoutbound: true,
conversationId: conversationid,
updated_at: insertedMessage.updated_at,
msid: message.sid,
summary: true
});
ioRedis.to(conversationRoom).emit("new-message-detailed", {
newMessage: insertedMessage,
conversationId: conversationid,
summary: false
});
res.sendStatus(200);
} catch (gqlError) {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
msid: message.sid,
conversationid,
error: gqlError.message,
stack: gqlError.stack
});
res.status(500).json({ success: false, message: "Failed to insert message into database." });
}
} catch (twilioError) {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
conversationid,
error: twilioError.message,
stack: twilioError.stack
});
res.status(500).json({ success: false, message: "Failed to send message through Twilio." });
}
};

View File

@@ -5,59 +5,102 @@ require("dotenv").config({
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const { phone } = require("phone");
const logger = require("../utils/logger");
const { admin } = require("../firebase/firebase-handler");
exports.status = (req, res) => {
exports.status = async (req, res) => {
const { SmsSid, SmsStatus } = req.body;
client
.request(queries.UPDATE_MESSAGE_STATUS, {
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
try {
// Ignore status 'queued'
if (SmsStatus === "queued") {
return res.status(200).json({ message: "Status 'queued' disregarded." });
}
// Update message status in the database
const response = await client.request(queries.UPDATE_MESSAGE_STATUS, {
msid: SmsSid,
fields: { status: SmsStatus }
})
.then((response) => {
});
const message = response.update_messages.returning[0];
if (message) {
logger.log("sms-status-update", "DEBUG", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus }
});
})
.catch((error) => {
logger.log("sms-status-update-error", "ERROR", "api", null, {
// Emit WebSocket event to notify the change in message status
const conversationRoom = getBodyshopConversationRoom({
bodyshopId: message.conversation.bodyshopid,
conversationId: message.conversationid
});
ioRedis.to(conversationRoom).emit("message-changed", {
...message,
status: SmsStatus,
type: "status-changed"
});
} else {
logger.log("sms-status-update-warning", "WARN", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
error
warning: "No message returned from the database update."
});
}
res.sendStatus(200);
} catch (error) {
logger.log("sms-status-update-error", "ERROR", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
stack: error.stack,
message: error.message
});
res.sendStatus(200);
res.status(500).json({ error: "Failed to update message status." });
}
};
exports.markConversationRead = async (req, res) => {
const { conversationid, imexshopid } = req.body;
admin.messaging().send({
topic: `${imexshopid}-messaging`,
// notification: {
// title: `ImEX Online Message - ${data.phone_num}`,
// body: message.image_path ? `Image ${message.text}` : message.text,
// imageUrl: "https://thinkimex.com/img/logo512.png",
// },
data: {
type: "messaging-mark-conversation-read",
conversationid: conversationid || ""
}
});
res.send(200);
};
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
} = req;
const { conversation, imexshopid, bodyshopid } = req.body;
// Inbound Sample
// {
// "SmsSid": "SM5205ea340e06437799d9345e7283457c",
// "SmsStatus": "queued",
// "MessageStatus": "queued",
// "To": "+16049992002",
// "MessagingServiceSid": "MG6e259e2add04ffa0d0aa355038670ee1",
// "MessageSid": "SM5205ea340e06437799d9345e7283457c",
// "AccountSid": "AC6c09d337d6b9c68ab6488c2052bd457c",
// "From": "+16043301606",
// "ApiVersion": "2010-04-01"
// }
// Alternatively, support both payload formats
const conversationId = conversation?.id || req.body.conversationId;
if (!conversationId || !imexshopid || !bodyshopid) {
return res.status(400).json({ error: "Invalid conversation data provided." });
}
try {
const response = await client.request(queries.MARK_MESSAGES_AS_READ, {
conversationId
});
const updatedMessageIds = response.update_messages.returning.map((message) => message.id);
const broadcastRoom = getBodyshopRoom(bodyshopid);
ioRedis.to(broadcastRoom).emit("conversation-changed", {
type: "conversation-marked-read",
conversationId,
affectedMessages: response.update_messages.affected_rows,
messageIds: updatedMessageIds
});
res.status(200).json({
success: true,
message: "Conversation marked as read."
});
} catch (error) {
console.error("Error marking conversation as read:", error);
res.status(500).json({ error: "Failed to mark conversation as read." });
}
};

View File

@@ -0,0 +1,26 @@
// Load environment variables THIS MUST BE AT THE TOP
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const { networkInterfaces, hostname } = require("node:os");
const getHostNameOrIP = () => {
// Try to get the hostname first
const hostName = hostname();
if (hostName) return hostName;
const interfaces = networkInterfaces();
for (const name of Object.keys(interfaces)) {
for (const iface of interfaces[name]) {
if (iface.family === "IPv4" && !iface.internal) {
return iface.address;
}
}
}
return "127.0.0.1";
};
module.exports = getHostNameOrIP;

View File

@@ -1,8 +1,12 @@
const applyIOHelpers = ({ app, api, io, logger }) => {
const getBodyshopRoom = (bodyshopID) => `bodyshop-broadcast-room:${bodyshopID}`;
// Messaging - conversation specific room to handle detailed messages when the user has a conversation open.
const getBodyshopConversationRoom = ({bodyshopId, conversationId}) =>
`bodyshop-conversation-room:${bodyshopId}:${conversationId}`;
const ioHelpersAPI = {
getBodyshopRoom
getBodyshopRoom,
getBodyshopConversationRoom
};
// Helper middleware

View File

@@ -8,10 +8,10 @@ const InstanceManager = require("../utils/instanceMgr").default;
const winston = require("winston");
const WinstonCloudWatch = require("winston-cloudwatch");
const { isString, isEmpty } = require("lodash");
const { networkInterfaces, hostname } = require("node:os");
const { uploadFileToS3 } = require("./s3");
const { v4 } = require("uuid");
const { InstanceRegion } = require("./instanceMgr");
const getHostNameOrIP = require("./getHostNameOrIP");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
@@ -76,22 +76,6 @@ const createLogger = () => {
})();
};
const getHostNameOrIP = () => {
// Try to get the hostname first
const hostName = hostname();
if (hostName) return hostName;
const interfaces = networkInterfaces();
for (const name of Object.keys(interfaces)) {
for (const iface of interfaces[name]) {
if (iface.family === "IPv4" && !iface.internal) {
return iface.address;
}
}
}
return "127.0.0.1";
};
const createProductionTransport = (level, logStreamName, filters) => {
return new WinstonCloudWatch({
level,

View File

@@ -0,0 +1,52 @@
// Load environment variables THIS MUST BE AT THE TOP
const path = require("path");
const getHostNameOrIP = require("./getHostNameOrIP");
const logger = require("./logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const CrispStatusReporter = require("crisp-status-reporter").CrispStatusReporter;
const InstanceManager = require("../utils/instanceMgr").default;
function StartStatusReporter() {
//For ImEX Online.
return InstanceManager({
executeFunction: true,
args: [],
imex: () => {
if (
process.env.NODE_ENV === undefined ||
!process.env.CRISP_SECRET_TOKEN ||
!process.env.CRISP_SERVICE_IDENTIFIER ||
!process.env.CRISP_NODE_IDENTIFIER
) {
logger.log("crisp-status-update-error", "DEBUG", null, null, { message: "Environment Variables not set." });
return;
}
try {
const crispStatusReporter = new CrispStatusReporter({
token: process.env.CRISP_SECRET_TOKEN, // Your reporter token (given by Crisp)
service_id: process.env.CRISP_SERVICE_IDENTIFIER, // Service ID containing the parent Node for Replica (given by Crisp)
node_id: process.env.CRISP_NODE_IDENTIFIER, // Node ID containing Replica (given by Crisp)
replica_id: getHostNameOrIP(), // Unique Replica ID for instance (ie. your IP on the LAN)
interval: 30 // Reporting interval (in seconds; defaults to 30 seconds if not set)
// console: {
// debug: (log_message, data) => logger.log("crisp-status-update", "DEBUG", null, null, { log_message, data }),
// log: (log_message, data) => logger.log("crisp-status-update", "DEBUG", null, null, { log_message, data }),
// warn: (log_message, data) => logger.log("crisp-status-update", "WARN", null, null, { log_message, data }),
// error: (log_message, data) => logger.log("crisp-status-update", "ERROR", null, null, { log_message, data })
// } // Console instance if you need to debug issues,
});
return crispStatusReporter;
} catch (error) {
logger.log("crisp-status-update-error", "DEBUG", null, null, { error: error.message });
}
}
});
}
module.exports = StartStatusReporter;

View File

@@ -3,7 +3,7 @@ const { admin } = require("../firebase/firebase-handler");
const redisSocketEvents = ({
io,
redisHelpers: { setSessionData, clearSessionData }, // Note: Used if we persist user to Redis
ioHelpers: { getBodyshopRoom },
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom },
logger
}) => {
// Logging helper functions
@@ -46,18 +46,25 @@ const redisSocketEvents = ({
// Token Update Events
const registerUpdateEvents = (socket) => {
let latestTokenTimestamp = 0;
const updateToken = async (newToken) => {
const currentTimestamp = Date.now();
latestTokenTimestamp = currentTimestamp;
try {
// noinspection UnnecessaryLocalVariableJS
// Verify token with Firebase Admin SDK
const user = await admin.auth().verifyIdToken(newToken, true);
// Skip outdated token validations
if (currentTimestamp < latestTokenTimestamp) {
createLogEvent(socket, "warn", "Outdated token validation skipped.");
return;
}
socket.user = user;
// If We ever want to persist user Data across workers
// await setSessionData(socket.id, "user", user);
// Uncomment for further testing
// createLogEvent(socket, "debug", "Token updated successfully");
createLogEvent(socket, "debug", `Token updated successfully for socket ID: ${socket.id}`);
socket.emit("token-updated", { success: true });
} catch (error) {
if (error.code === "auth/id-token-expired") {
@@ -66,16 +73,20 @@ const redisSocketEvents = ({
success: false,
error: "Stale token."
});
} else {
createLogEvent(socket, "error", `Token update failed: ${error.message}`);
socket.emit("token-updated", { success: false, error: error.message });
// For any other errors, optionally disconnect the socket
socket.disconnect();
return; // Avoid disconnecting for expired tokens
}
createLogEvent(socket, "error", `Token update failed for socket ID: ${socket.id}, Error: ${error.message}`);
socket.emit("token-updated", { success: false, error: error.message });
// Optionally disconnect for invalid tokens or other errors
socket.disconnect();
}
};
socket.on("update-token", updateToken);
};
// Room Broadcast Events
const registerRoomAndBroadcastEvents = (socket) => {
const joinBodyshopRoom = (bodyshopUUID) => {
@@ -113,6 +124,7 @@ const redisSocketEvents = ({
socket.on("leave-bodyshop-room", leaveBodyshopRoom);
socket.on("broadcast-to-bodyshop", broadcastToBodyshopRoom);
};
// Disconnect Events
const registerDisconnectEvents = (socket) => {
const disconnect = () => {
@@ -130,9 +142,64 @@ const redisSocketEvents = ({
socket.on("disconnect", disconnect);
};
// Messaging Events
const registerMessagingEvents = (socket) => {
const joinConversationRoom = async ({ bodyshopId, conversationId }) => {
try {
const room = getBodyshopConversationRoom({ bodyshopId, conversationId });
socket.join(room);
} catch (error) {
logger.log("Failed to Join Conversation Room", "error", "io-redis", null, {
bodyshopId,
conversationId,
error: error.message,
stack: error.stack
});
}
};
const leaveConversationRoom = ({ bodyshopId, conversationId }) => {
try {
const room = getBodyshopConversationRoom({ bodyshopId, conversationId });
socket.leave(room);
} catch (error) {
logger.log("Failed to Leave Conversation Room", "error", "io-redis", null, {
bodyshopId,
conversationId,
error: error.message,
stack: error.stack
});
}
};
const conversationModified = ({ bodyshopId, conversationId, ...fields }) => {
try {
// Retrieve the room name for the conversation
const room = getBodyshopRoom(bodyshopId);
// Emit the updated data to all clients in the room
io.to(room).emit("conversation-changed", {
conversationId,
...fields
});
} catch (error) {
logger.log("Failed to handle conversation modification", "error", "io-redis", null, {
bodyshopId,
conversationId,
fields,
error: error.message,
stack: error.stack
});
}
};
socket.on("conversation-modified", conversationModified);
socket.on("join-bodyshop-conversation", joinConversationRoom);
socket.on("leave-bodyshop-conversation", leaveConversationRoom);
};
// Call Handlers
registerRoomAndBroadcastEvents(socket);
registerUpdateEvents(socket);
registerMessagingEvents(socket);
registerDisconnectEvents(socket);
};