Merged in release/2024-11-08 (pull request #1893)

Release/2024-11-08 into master-AIO - IO-2921, IO-2969, IO-3001, IO-3015, IO-3017, IO-3018, IO-3025

Approved-by: Allan Carr
This commit is contained in:
Dave Richer
2024-11-09 04:52:01 +00:00
20 changed files with 384 additions and 248 deletions

View File

@@ -0,0 +1,24 @@
#!/bin/bash
# Install required packages
dnf install -y fontconfig freetype
# Move to the /tmp directory for temporary download and extraction
cd /tmp
# Download the Montserrat font zip file
wget https://images.imex.online/fonts/montserrat.zip -O montserrat.zip
# Unzip the downloaded font file
unzip montserrat.zip -d montserrat
# Move the font files to the system fonts directory
mv montserrat/*.ttf /usr/share/fonts
# Rebuild the font cache
fc-cache -fv
# Clean up
rm -rf /tmp/montserrat /tmp/montserrat.zip
echo "Montserrat fonts installed and cached successfully."

View File

@@ -1,10 +1,10 @@
import { Card, Table, Tag } from "antd"; import { Card, Table, Tag } from "antd";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import { useTranslation } from "react-i18next";
import React, { useEffect, useState } from "react";
import dayjs from "../../../utils/day";
import DashboardRefreshRequired from "../refresh-required.component";
import axios from "axios"; import axios from "axios";
import React, { useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import dayjs from "../../../utils/day";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import DashboardRefreshRequired from "../refresh-required.component";
const fortyFiveDaysAgo = () => dayjs().subtract(45, "day").toLocaleString(); const fortyFiveDaysAgo = () => dayjs().subtract(45, "day").toLocaleString();
@@ -46,6 +46,11 @@ export default function JobLifecycleDashboardComponent({ data, bodyshop, ...card
dataIndex: "humanReadable", dataIndex: "humanReadable",
key: "humanReadable" key: "humanReadable"
}, },
{
title: t("job_lifecycle.columns.average_human_readable"),
dataIndex: "averageHumanReadable",
key: "averageHumanReadable"
},
{ {
title: t("job_lifecycle.columns.status_count"), title: t("job_lifecycle.columns.status_count"),
key: "statusCount", key: "statusCount",

View File

@@ -1338,6 +1338,8 @@
}, },
"job_lifecycle": { "job_lifecycle": {
"columns": { "columns": {
"average_human_readable": "Average Human Readable",
"average_value": "Average Value",
"duration": "Duration", "duration": "Duration",
"end": "End", "end": "End",
"human_readable": "Human Readable", "human_readable": "Human Readable",

View File

@@ -1338,6 +1338,8 @@
}, },
"job_lifecycle": { "job_lifecycle": {
"columns": { "columns": {
"average_human_readable": "",
"average_value": "",
"duration": "", "duration": "",
"end": "", "end": "",
"human_readable": "", "human_readable": "",

View File

@@ -1338,6 +1338,8 @@
}, },
"job_lifecycle": { "job_lifecycle": {
"columns": { "columns": {
"average_human_readable": "",
"average_value": "",
"duration": "", "duration": "",
"end": "", "end": "",
"human_readable": "", "human_readable": "",

View File

@@ -167,6 +167,27 @@ services:
# volumes: # volumes:
# - redis-insight-data:/db # - redis-insight-data:/db
# ##Optional Container for SFTP/SSH Server for testing
# ssh-sftp-server:
# image: atmoz/sftp:alpine # Using an image with SFTP support
# container_name: ssh-sftp-server
# hostname: ssh-sftp-server
# networks:
# - redis-cluster-net
# ports:
# - "2222:22" # Expose port 22 for SSH/SFTP (mapped to 2222 on the host)
# volumes:
# - ./certs/id_rsa.pub:/home/user/.ssh/keys/id_rsa.pub:ro # Mount the SSH public key
# - ./upload:/home/user/upload # Mount a local directory for SFTP uploads
# environment:
# - SFTP_USERS=user:password:1001:100:upload
# command: >
# /bin/sh -c "
# echo 'Match User user' >> /etc/ssh/sshd_config &&
# sed -i -e 's#ForceCommand internal-sftp#ForceCommand internal-sftp -d /upload#' /etc/ssh/sshd_config &&
# /usr/sbin/sshd -D
# "
networks: networks:
redis-cluster-net: redis-cluster-net:
driver: bridge driver: bridge

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1 @@
CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1,9 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
-- CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,7 @@
CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1 @@
CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1,4 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
-- WHERE read = false AND isoutbound = false;

View File

@@ -0,0 +1,2 @@
CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
WHERE read = false AND isoutbound = false;

View File

@@ -167,7 +167,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) { async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
const Vendor = { const Vendor = {
DisplayName: bill.vendor.name DisplayName: StandardizeName(bill.vendor.name)
}; };
try { try {
const result = await oauthClient.makeApiCall({ const result = await oauthClient.makeApiCall({

View File

@@ -10,7 +10,7 @@ function urlBuilder(realmId, object, query = null) {
} }
function StandardizeName(str) { function StandardizeName(str) {
return str.replace(new RegExp(/'/g), "\\'"); return str.replace(new RegExp(/'/g), "\\'").trim();
} }
exports.urlBuilder = urlBuilder; exports.urlBuilder = urlBuilder;

View File

@@ -13,6 +13,7 @@ let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client; const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail"); const { sendServerEmail } = require("../email/sendemail");
const AHDineroFormat = "0.00"; const AHDineroFormat = "0.00";
const AhDateFormat = "MMDDYYYY"; const AhDateFormat = "MMDDYYYY";
@@ -26,36 +27,83 @@ const ftpSetup = {
password: process.env.AUTOHOUSE_PASSWORD, password: process.env.AUTOHOUSE_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data), debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: { algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"] serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
} }
}; };
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => { exports.default = async (req, res) => {
// Only process if in production environment. // Only process if in production environment.
if (process.env.NODE_ENV !== "production") { if (process.env.NODE_ENV !== "production") {
res.sendStatus(403); res.sendStatus(403);
return; return;
} }
// Only process if the appropriate token is provided.
//Query for the List of Bodyshop Clients.
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) { if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401); res.sendStatus(401);
return; return;
} }
const allxmlsToUpload = [];
const allErrors = [];
try { try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) { //Query for the List of Bodyshop Clients.
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("autohouse-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("autohouse-shopsToProcess-empty", "DEBUG", "api", null, null);
res.sendStatus(200);
return;
}
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("autohouse-end", "DEBUG", "api", null, null);
res.sendStatus(200);
}
} catch (error) {
logger.log("autohouse-shopsToProcess-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
res.status(500).json({ error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, { logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname shopname: bodyshop.shopname
}); });
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, { const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id, bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"), start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
@@ -79,16 +127,7 @@ exports.default = async (req, res) => {
}); });
} }
var ret = builder const ret = builder.create({}, autoHouseObject).end({ allowEmptyTags: true });
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
autoHouseObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({ allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length, count: autoHouseObject.AutoHouseExport.RepairOrder.length,
@@ -101,9 +140,7 @@ exports.default = async (req, res) => {
}); });
} catch (error) { } catch (error) {
//Error at the shop level. //Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, { logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
...error
});
allErrors.push({ allErrors.push({
bodyshopid: bodyshop.id, bodyshopid: bodyshop.id,
@@ -116,7 +153,7 @@ exports.default = async (req, res) => {
allErrors.push({ allErrors.push({
bodyshopid: bodyshop.id, bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid, imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid, autuhouseid: bodyshop.autuhouseid,
errors: erroredJobs.map((ej) => ({ errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number, ro_number: ej.job?.ro_number,
jobid: ej.job?.id, jobid: ej.job?.id,
@@ -125,71 +162,41 @@ exports.default = async (req, res) => {
}); });
} }
} }
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
} }
res.json(allxmlsToUpload); async function uploadViaSFTP(allxmlsToUpload) {
sendServerEmail({ const sftp = new Client();
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) => sftp.on("error", (errors) =>
logger.log("autohouse-sftp-error", "ERROR", "api", null, { logger.log("autohouse-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
...errors
})
); );
try { try {
//Connect to the FTP and upload all. //Connect to the FTP and upload all.
await sftp.connect(ftpSetup); await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) { for (const xmlObj of allxmlsToUpload) {
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, { try {
filename: xmlObj.filename logger.log("autohouse-sftp-upload", "DEBUG", "api", null, { filename: xmlObj.filename });
}); xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, { logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
uploadResult filename: xmlObj.filename,
result: xmlObj.result
}); });
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
} catch (error) { } catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, { logger.log("autohouse-sftp-upload-error", "ERROR", "api", null, {
...error filename: xmlObj.filename,
error: error.message,
stack: error.stack
}); });
throw error;
}
}
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally { } finally {
sftp.end(); sftp.end();
} }
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
} }
};
const CreateRepairOrderTag = (job, errorCallback) => { const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2 //Level 2
@@ -287,8 +294,8 @@ const CreateRepairOrderTag = (job, errorCallback) => {
InsuranceCo: job.ins_co_nm || "", InsuranceCo: job.ins_co_nm || "",
CompanyName: job.ins_co_nm || "", CompanyName: job.ins_co_nm || "",
Address: job.ins_addr1 || "", Address: job.ins_addr1 || "",
City: job.ins_addr1 || "", City: job.ins_city || "",
State: job.ins_city || "", State: job.ins_st || "",
Zip: job.ins_zip || "", Zip: job.ins_zip || "",
Phone: job.ins_ph1 || "", Phone: job.ins_ph1 || "",
Fax: job.ins_fax || "", Fax: job.ins_fax || "",

View File

@@ -22,31 +22,79 @@ const ftpSetup = {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"] serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
} }
}; };
const allcsvsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => { exports.default = async (req, res) => {
// Only process if in production environment. // Only process if in production environment.
if (process.env.NODE_ENV !== "production") { if (process.env.NODE_ENV !== "production") {
res.sendStatus(403); res.sendStatus(403);
return; return;
} }
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) { if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401); res.sendStatus(401);
return; return;
} }
try {
//Query for the List of Bodyshop Clients. //Query for the List of Bodyshop Clients.
logger.log("chatter-start", "DEBUG", "api", null, null); logger.log("chatter-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS); const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid] const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const allcsvsToUpload = []; const batchSize = 10;
const allErrors = [];
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("chatter-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("chatter-shopsToProcess-empty", "DEBUG", "api", null, null);
res.sendStatus(200);
return;
}
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
await processBatch(batch, start, end);
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
}
} else {
await uploadViaSFTP(allcsvsToUpload);
}
sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("chatter-end", "DEBUG", "api", null, null);
res.sendStatus(200);
}
} catch (error) {
logger.log("chatter-shopsToProcess-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
res.status(500).json({ error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
try { try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, { logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname shopname: bodyshop.shopname
}); });
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, { const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, {
bodyshopid: bodyshop.id, bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"), start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"),
@@ -77,9 +125,7 @@ exports.default = async (req, res) => {
}); });
} catch (error) { } catch (error) {
//Error at the shop level. //Error at the shop level.
logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, { logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
...error
});
allErrors.push({ allErrors.push({
bodyshopid: bodyshop.id, bodyshopid: bodyshop.id,
@@ -96,61 +142,8 @@ exports.default = async (req, res) => {
}); });
} }
} }
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
} }
sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.json(allcsvsToUpload);
return;
}
const sftp = new Client();
sftp.on("error", (errors) => logger.log("chatter-sftp-error", "ERROR", "api", null, { ...errors }));
try {
//Get the private key from AWS Secrets Manager.
ftpSetup.privateKey = await getPrivateKey();
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const csvObj of allcsvsToUpload) {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename });
const uploadResult = await sftp.put(Buffer.from(csvObj.xml), `/${csvObj.filename}`);
logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, { uploadResult });
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { ...error });
} finally {
sftp.end();
}
sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
async function getPrivateKey() { async function getPrivateKey() {
// Connect to AWS Secrets Manager // Connect to AWS Secrets Manager
const client = new SecretsManagerClient({ region: "ca-central-1" }); const client = new SecretsManagerClient({ region: "ca-central-1" });
@@ -160,10 +153,49 @@ async function getPrivateKey() {
try { try {
const { SecretString, SecretBinary } = await client.send(command); const { SecretString, SecretBinary } = await client.send(command);
if (SecretString || SecretBinary) logger.log("chatter-retrieved-private-key", "DEBUG", "api", null, null); if (SecretString || SecretBinary) logger.log("chatter-retrieved-private-key", "DEBUG", "api", null, null);
const chatterPrivateKey = SecretString ? JSON.parse(SecretString) : JSON.parse(Buffer.from(SecretBinary, "base64").toString("ascii")); const chatterPrivateKey = SecretString
return chatterPrivateKey.private_key; ? SecretString
: Buffer.from(SecretBinary, "base64").toString("ascii");
return chatterPrivateKey;
} catch (error) { } catch (error) {
logger.log("chatter-get-private-key", "ERROR", "api", null, error); logger.log("chatter-get-private-key", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw err; throw error;
}
}
async function uploadViaSFTP(allcsvsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("chatter-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Get the private key from AWS Secrets Manager.
const privateKey = await getPrivateKey();
//Connect to the FTP and upload all.
await sftp.connect({ ...ftpSetup, privateKey });
for (const csvObj of allcsvsToUpload) {
try {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename });
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, {
filename: csvObj.filename,
result: csvObj.result
});
} catch (error) {
logger.log("chatter-sftp-upload-error", "ERROR", "api", null, {
filename: csvObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
} }
} }

View File

@@ -78,16 +78,20 @@ const jobLifecycle = async (req, res) => {
Object.keys(flatGroupedAllDurations).forEach((status) => { Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0); const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value)); const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = (value / finalTotal) * 100; const percentage = finalTotal > 0 ? (value / finalTotal) * 100 : 0;
const color = getLifecycleStatusColor(status); const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`; const roundedPercentage = `${Math.round(percentage)}%`;
const averageValue = _.size(jobIDs) > 0 ? value / jobIDs.length : 0;
const averageHumanReadable = durationToHumanReadable(moment.duration(averageValue));
finalSummations.push({ finalSummations.push({
status, status,
value, value,
humanReadable, humanReadable,
percentage, percentage,
color, color,
roundedPercentage roundedPercentage,
averageValue,
averageHumanReadable
}); });
}); });
@@ -100,7 +104,12 @@ const jobLifecycle = async (req, res) => {
totalStatuses: finalSummations.length, totalStatuses: finalSummations.length,
total: finalTotal, total: finalTotal,
statusCounts: finalStatusCounts, statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal)) humanReadable: durationToHumanReadable(moment.duration(finalTotal)),
averageValue: _.size(jobIDs) > 0 ? finalTotal / jobIDs.length : 0,
averageHumanReadable:
_.size(jobIDs) > 0
? durationToHumanReadable(moment.duration(finalTotal / jobIDs.length))
: durationToHumanReadable(moment.duration(0))
} }
}); });
}; };

2
upload/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
*
!.gitignore