Merged in release/2024-11-08 (pull request #1893)

Release/2024-11-08 into master-AIO - IO-2921, IO-2969, IO-3001, IO-3015, IO-3017, IO-3018, IO-3025

Approved-by: Allan Carr
This commit is contained in:
Dave Richer
2024-11-09 04:52:01 +00:00
20 changed files with 384 additions and 248 deletions

View File

@@ -0,0 +1,24 @@
#!/bin/bash
# Install required packages
dnf install -y fontconfig freetype
# Move to the /tmp directory for temporary download and extraction
cd /tmp
# Download the Montserrat font zip file
wget https://images.imex.online/fonts/montserrat.zip -O montserrat.zip
# Unzip the downloaded font file
unzip montserrat.zip -d montserrat
# Move the font files to the system fonts directory
mv montserrat/*.ttf /usr/share/fonts
# Rebuild the font cache
fc-cache -fv
# Clean up
rm -rf /tmp/montserrat /tmp/montserrat.zip
echo "Montserrat fonts installed and cached successfully."

View File

@@ -1,10 +1,10 @@
import { Card, Table, Tag } from "antd";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import { useTranslation } from "react-i18next";
import React, { useEffect, useState } from "react";
import dayjs from "../../../utils/day";
import DashboardRefreshRequired from "../refresh-required.component";
import axios from "axios";
import React, { useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import dayjs from "../../../utils/day";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import DashboardRefreshRequired from "../refresh-required.component";
const fortyFiveDaysAgo = () => dayjs().subtract(45, "day").toLocaleString();
@@ -46,6 +46,11 @@ export default function JobLifecycleDashboardComponent({ data, bodyshop, ...card
dataIndex: "humanReadable",
key: "humanReadable"
},
{
title: t("job_lifecycle.columns.average_human_readable"),
dataIndex: "averageHumanReadable",
key: "averageHumanReadable"
},
{
title: t("job_lifecycle.columns.status_count"),
key: "statusCount",

View File

@@ -1338,6 +1338,8 @@
},
"job_lifecycle": {
"columns": {
"average_human_readable": "Average Human Readable",
"average_value": "Average Value",
"duration": "Duration",
"end": "End",
"human_readable": "Human Readable",

View File

@@ -1338,6 +1338,8 @@
},
"job_lifecycle": {
"columns": {
"average_human_readable": "",
"average_value": "",
"duration": "",
"end": "",
"human_readable": "",

View File

@@ -1338,6 +1338,8 @@
},
"job_lifecycle": {
"columns": {
"average_human_readable": "",
"average_value": "",
"duration": "",
"end": "",
"human_readable": "",

View File

@@ -167,6 +167,27 @@ services:
# volumes:
# - redis-insight-data:/db
# ##Optional Container for SFTP/SSH Server for testing
# ssh-sftp-server:
# image: atmoz/sftp:alpine # Using an image with SFTP support
# container_name: ssh-sftp-server
# hostname: ssh-sftp-server
# networks:
# - redis-cluster-net
# ports:
# - "2222:22" # Expose port 22 for SSH/SFTP (mapped to 2222 on the host)
# volumes:
# - ./certs/id_rsa.pub:/home/user/.ssh/keys/id_rsa.pub:ro # Mount the SSH public key
# - ./upload:/home/user/upload # Mount a local directory for SFTP uploads
# environment:
# - SFTP_USERS=user:password:1001:100:upload
# command: >
# /bin/sh -c "
# echo 'Match User user' >> /etc/ssh/sshd_config &&
# sed -i -e 's#ForceCommand internal-sftp#ForceCommand internal-sftp -d /upload#' /etc/ssh/sshd_config &&
# /usr/sbin/sshd -D
# "
networks:
redis-cluster-net:
driver: bridge

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1 @@
CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1,9 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
-- CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,7 @@
CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1 @@
CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1,4 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
-- WHERE read = false AND isoutbound = false;

View File

@@ -0,0 +1,2 @@
CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
WHERE read = false AND isoutbound = false;

View File

@@ -167,7 +167,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
const Vendor = {
DisplayName: bill.vendor.name
DisplayName: StandardizeName(bill.vendor.name)
};
try {
const result = await oauthClient.makeApiCall({

View File

@@ -10,7 +10,7 @@ function urlBuilder(realmId, object, query = null) {
}
function StandardizeName(str) {
return str.replace(new RegExp(/'/g), "\\'");
return str.replace(new RegExp(/'/g), "\\'").trim();
}
exports.urlBuilder = urlBuilder;

View File

@@ -13,6 +13,7 @@ let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const AHDineroFormat = "0.00";
const AhDateFormat = "MMDDYYYY";
@@ -26,170 +27,176 @@ const ftpSetup = {
password: process.env.AUTOHOUSE_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"]
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
//Query for the List of Bodyshop Clients.
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const autoHouseObject = {
AutoHouseExport: {
RepairOrder: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
};
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
const batchSize = 10;
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
autoHouseObject
)
.end({ allowEmptyTags: true });
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("autohouse-shopsToProcess-generated", "DEBUG", "api", null, null);
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autuhouseid: bodyshop.autuhouseid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
if (shopsToProcess.length === 0) {
logger.log("autohouse-shopsToProcess-empty", "DEBUG", "api", null, null);
res.sendStatus(200);
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
...errors
})
);
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("autohouse-end", "DEBUG", "api", null, null);
res.sendStatus(200);
}
} catch (error) {
logger.log("autohouse-shopsToProcess-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
res.status(500).json({ error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
//Connect to the FTP and upload all.
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
await sftp.connect(ftpSetup);
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
for (const xmlObj of allxmlsToUpload) {
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const autoHouseObject = {
AutoHouseExport: {
RepairOrder: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
};
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, autoHouseObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autuhouseid: bodyshop.autuhouseid,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autuhouseid: bodyshop.autuhouseid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, { filename: xmlObj.filename });
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("autohouse-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -287,8 +294,8 @@ const CreateRepairOrderTag = (job, errorCallback) => {
InsuranceCo: job.ins_co_nm || "",
CompanyName: job.ins_co_nm || "",
Address: job.ins_addr1 || "",
City: job.ins_addr1 || "",
State: job.ins_city || "",
City: job.ins_city || "",
State: job.ins_st || "",
Zip: job.ins_zip || "",
Phone: job.ins_ph1 || "",
Fax: job.ins_fax || "",

View File

@@ -22,135 +22,128 @@ const ftpSetup = {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allcsvsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("chatter-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const allcsvsToUpload = [];
const allErrors = [];
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
//Query for the List of Bodyshop Clients.
logger.log("chatter-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const chatterObject = jobs.map((j) => {
return {
poc_trigger_code: bodyshops_by_pk.chatterid,
firstname: j.ownr_co_nm ? null : j.ownr_fn,
lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
transaction_id: j.ro_number,
email: j.ownr_ea,
phone_number: j.ownr_ph1
};
});
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" });
const batchSize = 10;
allcsvsToUpload.push({
count: chatterObject.length,
csv: ret,
filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv`
});
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("chatter-shopsToProcess-generated", "DEBUG", "api", null, null);
logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname
});
}
if (shopsToProcess.length === 0) {
logger.log("chatter-shopsToProcess-empty", "DEBUG", "api", null, null);
res.sendStatus(200);
return;
}
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
await processBatch(batch, start, end);
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
}
} else {
await uploadViaSFTP(allcsvsToUpload);
}
sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
res.json(allcsvsToUpload);
return;
logger.log("chatter-end", "DEBUG", "api", null, null);
res.sendStatus(200);
}
const sftp = new Client();
sftp.on("error", (errors) => logger.log("chatter-sftp-error", "ERROR", "api", null, { ...errors }));
try {
//Get the private key from AWS Secrets Manager.
ftpSetup.privateKey = await getPrivateKey();
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const csvObj of allcsvsToUpload) {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename });
const uploadResult = await sftp.put(Buffer.from(csvObj.xml), `/${csvObj.filename}`);
logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, { uploadResult });
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { ...error });
} finally {
sftp.end();
}
sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
logger.log("chatter-shopsToProcess-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
res.status(500).json({ error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
try {
logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const chatterObject = jobs.map((j) => {
return {
poc_trigger_code: bodyshops_by_pk.chatterid,
firstname: j.ownr_co_nm ? null : j.ownr_fn,
lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
transaction_id: j.ro_number,
email: j.ownr_ea,
phone_number: j.ownr_ph1
};
});
const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" });
allcsvsToUpload.push({
count: chatterObject.length,
csv: ret,
filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv`
});
logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname
});
}
}
}
async function getPrivateKey() {
// Connect to AWS Secrets Manager
const client = new SecretsManagerClient({ region: "ca-central-1" });
@@ -160,10 +153,49 @@ async function getPrivateKey() {
try {
const { SecretString, SecretBinary } = await client.send(command);
if (SecretString || SecretBinary) logger.log("chatter-retrieved-private-key", "DEBUG", "api", null, null);
const chatterPrivateKey = SecretString ? JSON.parse(SecretString) : JSON.parse(Buffer.from(SecretBinary, "base64").toString("ascii"));
return chatterPrivateKey.private_key;
const chatterPrivateKey = SecretString
? SecretString
: Buffer.from(SecretBinary, "base64").toString("ascii");
return chatterPrivateKey;
} catch (error) {
logger.log("chatter-get-private-key", "ERROR", "api", null, error);
throw err;
logger.log("chatter-get-private-key", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
}
}
async function uploadViaSFTP(allcsvsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("chatter-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Get the private key from AWS Secrets Manager.
const privateKey = await getPrivateKey();
//Connect to the FTP and upload all.
await sftp.connect({ ...ftpSetup, privateKey });
for (const csvObj of allcsvsToUpload) {
try {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename });
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, {
filename: csvObj.filename,
result: csvObj.result
});
} catch (error) {
logger.log("chatter-sftp-upload-error", "ERROR", "api", null, {
filename: csvObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}

View File

@@ -78,16 +78,20 @@ const jobLifecycle = async (req, res) => {
Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = (value / finalTotal) * 100;
const percentage = finalTotal > 0 ? (value / finalTotal) * 100 : 0;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
const averageValue = _.size(jobIDs) > 0 ? value / jobIDs.length : 0;
const averageHumanReadable = durationToHumanReadable(moment.duration(averageValue));
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage
roundedPercentage,
averageValue,
averageHumanReadable
});
});
@@ -100,7 +104,12 @@ const jobLifecycle = async (req, res) => {
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal))
humanReadable: durationToHumanReadable(moment.duration(finalTotal)),
averageValue: _.size(jobIDs) > 0 ? finalTotal / jobIDs.length : 0,
averageHumanReadable:
_.size(jobIDs) > 0
? durationToHumanReadable(moment.duration(finalTotal / jobIDs.length))
: durationToHumanReadable(moment.duration(0))
}
});
};

2
upload/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
*
!.gitignore