Merged in feature/IO-3210-Podium-Datapump (pull request #2273)

IO-3210 Podium Datapump

Approved-by: Dave Richer
This commit is contained in:
Allan Carr
2025-04-17 17:40:59 +00:00
committed by Dave Richer
9 changed files with 254 additions and 3 deletions

View File

@@ -2,7 +2,6 @@ const path = require("path");
const queries = require("../graphql-client/queries");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const _ = require("lodash");
const logger = require("../utils/logger");
const fs = require("fs");
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");

View File

@@ -3,4 +3,5 @@ exports.autohouse = require("./autohouse").default;
exports.chatter = require("./chatter").default;
exports.claimscorp = require("./claimscorp").default;
exports.kaizen = require("./kaizen").default;
exports.usageReport = require("./usageReport").default;
exports.usageReport = require("./usageReport").default;
exports.podium = require("./podium").default;

211
server/data/podium.js Normal file
View File

@@ -0,0 +1,211 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const ftpSetup = {
host: process.env.PODIUM_HOST,
port: process.env.PODIUM_PORT,
username: process.env.PODIUM_USER,
password: process.env.PODIUM_PASSWORD,
debug:
process.env.NODE_ENV !== "production"
? (message, ...data) => logger.log(message, "DEBUG", "api", null, data)
: () => {},
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
logger.log("podium-start", "DEBUG", "api", null, null);
const allCSVResults = [];
const allErrors = [];
const { bodyshops } = await client.request(queries.GET_PODIUM_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("podium-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("podium-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
await processShopData(shopsToProcess, start, end, skipUpload, allCSVResults, allErrors);
await sendServerEmail({
subject: `Podium Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allCSVResults.map((x) => ({
imexshopid: x.imexshopid,
filename: x.filename,
count: x.count,
result: x.result
})),
null,
2
)}`
});
logger.log("podium-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("podium-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processShopData(shopsToProcess, start, end, skipUpload, allCSVResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const erroredJobs = [];
try {
logger.log("podium-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const { jobs, bodyshops_by_pk } = await client.request(queries.PODIUM_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(2, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const podiumObject = jobs.map((j) => {
return {
"Podium Account ID": bodyshops_by_pk.podiumid,
"First Name": j.ownr_co_nm ? null : j.ownr_fn,
"Last Name": j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
"SMS Number": null,
"Phone 1": j.ownr_ph1,
"Phone 2": j.ownr_ph2,
Email: j.ownr_ea,
"Delivered Date":
(j.actual_delivery && moment(j.actual_delivery).tz(bodyshop.timezone).format("MM/DD/YYYY")) || ""
};
});
if (erroredJobs.length > 0) {
logger.log("podium-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
const csvObj = {
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
csv: converter.json2csv(podiumObject, { emptyFieldValue: "" }),
filename: `${bodyshop.podiumid}-${moment().format("YYYYMMDDTHHMMss")}.csv`,
count: podiumObject.length
};
if (skipUpload) {
fs.writeFileSync(`./logs/${csvObj.filename}`, csvObj.csv);
} else {
await uploadViaSFTP(csvObj);
}
allCSVResults.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
podiumid: bodyshop.podiumid,
count: csvObj.count,
filename: csvObj.filename,
result: csvObj.result
});
logger.log("podium-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("podium-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
podiumid: bodyshop.podiumid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
podiumid: bodyshop.podiumid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
}
async function uploadViaSFTP(csvObj) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("podium-sftp-connection-error", "ERROR", "api", csvObj.bodyshopid, {
error: errors.message,
stack: errors.stack
})
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
try {
csvObj.result = await sftp.put(Buffer.from(csvObj.xml), `${csvObj.filename}`);
logger.log("podium-sftp-upload", "DEBUG", "api", csvObj.bodyshopid, {
imexshopid: csvObj.imexshopid,
filename: csvObj.filename,
result: csvObj.result
});
} catch (error) {
logger.log("podium-sftp-upload-error", "ERROR", "api", csvObj.bodyshopid, {
filename: csvObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
} catch (error) {
logger.log("podium-sftp-error", "ERROR", "api", csvObj.bodyshopid, {
error: error.message,
stack: error.stack
});
throw error;
} finally {
sftp.end();
}
}

View File

@@ -1323,6 +1323,27 @@ exports.KAIZEN_QUERY = `query KAIZEN_EXPORT($start: timestamptz, $bodyshopid: uu
}
}`;
exports.PODIUM_QUERY = `query PODIUM_EXPORT($start: timestamptz, $bodyshopid: uuid!, $end: timestamptz) {
bodyshops_by_pk(id: $bodyshopid){
id
shopname
podiumid
timezone
}
jobs(where: {_and: [{converted: {_eq: true}}, {actual_delivery: {_gt: $start}}, {actual_delivery: {_lte: $end}}, {shopid: {_eq: $bodyshopid}}, {_or: [{ownr_ph1: {_is_null: false}}, {ownr_ea: {_is_null: false}}]}]}) {
actual_delivery
id
created_at
ro_number
ownr_fn
ownr_ln
ownr_co_nm
ownr_ph1
ownr_ph2
ownr_ea
}
}`;
exports.UPDATE_JOB = `
mutation UPDATE_JOB($jobId: uuid!, $job: jobs_set_input!) {
update_jobs(where: { id: { _eq: $jobId } }, _set: $job) {
@@ -1848,6 +1869,16 @@ exports.GET_KAIZEN_SHOPS = `query GET_KAIZEN_SHOPS($imexshopid: [String]) {
}
}`;
exports.GET_PODIUM_SHOPS = `query GET_PODIUM_SHOPS {
bodyshops(where: {podiumid: {_is_null: false}, _or: {podiumid: {_neq: ""}}}){
id
shopname
podiumid
imexshopid
timezone
}
}`;
exports.DELETE_ALL_DMS_VEHICLES = `mutation DELETE_ALL_DMS_VEHICLES{
delete_dms_vehicles(where: {}) {
affected_rows

View File

@@ -1,11 +1,12 @@
const express = require("express");
const router = express.Router();
const { autohouse, claimscorp, chatter, kaizen, usageReport } = require("../data/data");
const { autohouse, claimscorp, chatter, kaizen, usageReport, podium } = require("../data/data");
router.post("/ah", autohouse);
router.post("/cc", claimscorp);
router.post("/chatter", chatter);
router.post("/kaizen", kaizen);
router.post("/usagereport", usageReport);
router.post("/podium", podium);
module.exports = router;