feature/IO-2769-Job-Totals-Testing

This commit is contained in:
Dave Richer
2025-04-08 11:42:27 -04:00
parent 5f66488410
commit a18ce18d72
12 changed files with 295 additions and 53 deletions

View File

@@ -133,6 +133,16 @@ export function JobsDetailHeaderActions({
const { socket } = useSocket();
const notification = useNotification();
const isDevEnv = import.meta.env.DEV;
const isProdEnv = import.meta.env.PROD;
const userEmail = currentUser?.email || "";
const devEmails = ["imex.dev", "rome.dev"];
const prodEmails = ["imex.prod", "rome.prod", "imex.test", "rome.test"];
const hasValidEmail = (emails) => emails.some((email) => userEmail.endsWith(email));
const canSubmitForTesting = (isDevEnv && hasValidEmail(devEmails)) || (isProdEnv && hasValidEmail(prodEmails));
const {
treatments: { ImEXPay }
} = useSplitTreatments({
@@ -171,7 +181,7 @@ export function JobsDetailHeaderActions({
{ defaultOpenStatus: bodyshop.md_ro_statuses.default_imported },
(newJobId) => {
history(`/manage/jobs/${newJobId}`);
notification["success"]({
notification.success({
message: t("jobs.successes.duplicated")
});
},
@@ -181,7 +191,7 @@ export function JobsDetailHeaderActions({
const handleDuplicateConfirm = () =>
DuplicateJob(client, job.id, { defaultOpenStatus: bodyshop.md_ro_statuses.default_imported }, (newJobId) => {
history(`/manage/jobs/${newJobId}`);
notification["success"]({
notification.success({
message: t("jobs.successes.duplicated")
});
});
@@ -217,13 +227,13 @@ export function JobsDetailHeaderActions({
const result = await deleteJob({ variables: { id: job.id } });
if (!result.errors) {
notification["success"]({
notification.success({
message: t("jobs.successes.delete")
});
//go back to jobs list.
history(`/manage/`);
} else {
notification["error"]({
notification.error({
message: t("jobs.errors.deleted", {
error: JSON.stringify(result.errors)
})
@@ -275,9 +285,9 @@ export function JobsDetailHeaderActions({
});
if (!result.errors) {
notification["success"]({ message: t("csi.successes.created") });
notification.success({ message: t("csi.successes.created") });
} else {
notification["error"]({
notification.error({
message: t("csi.errors.creating", {
message: JSON.stringify(result.errors)
})
@@ -316,7 +326,7 @@ export function JobsDetailHeaderActions({
`${window.location.protocol}//${window.location.host}/csi/${result.data.insert_csi.returning[0].id}`
);
} else {
notification["error"]({
notification.error({
message: t("messaging.error.invalidphone")
});
}
@@ -328,7 +338,7 @@ export function JobsDetailHeaderActions({
);
}
} else {
notification["error"]({
notification.error({
message: t("csi.errors.notconfigured")
});
}
@@ -358,7 +368,7 @@ export function JobsDetailHeaderActions({
});
setMessage(`${window.location.protocol}//${window.location.host}/csi/${job.csiinvites[0].id}`);
} else {
notification["error"]({
notification.error({
message: t("messaging.error.invalidphone")
});
}
@@ -398,7 +408,7 @@ export function JobsDetailHeaderActions({
});
if (!result.errors) {
notification["success"]({
notification.success({
message: t("jobs.successes.voided")
});
insertAuditTrail({
@@ -409,7 +419,7 @@ export function JobsDetailHeaderActions({
//go back to jobs list.
history(`/manage/`);
} else {
notification["error"]({
notification.error({
message: t("jobs.errors.voiding", {
error: JSON.stringify(result.errors)
})
@@ -442,7 +452,7 @@ export function JobsDetailHeaderActions({
console.log("handle -> XML", QbXmlResponse);
} catch (error) {
console.log("Error getting QBXML from Server.", error);
notification["error"]({
notification.error({
message: t("jobs.errors.exporting", {
error: "Unable to retrieve QBXML. " + JSON.stringify(error.message)
})
@@ -460,7 +470,7 @@ export function JobsDetailHeaderActions({
});
} catch (error) {
console.log("Error connecting to quickbooks or partner.", error);
notification["error"]({
notification.error({
message: t("jobs.errors.exporting-partner")
});
@@ -556,7 +566,7 @@ export function JobsDetailHeaderActions({
}
});
if (!jobUpdate.errors) {
notification["success"]({
notification.success({
message: t("appointments.successes.canceled")
});
insertAuditTrail({
@@ -931,11 +941,11 @@ export function JobsDetailHeaderActions({
});
if (!result.errors) {
notification["success"]({
notification.success({
message: t("jobs.successes.partsqueue")
});
} else {
notification["error"]({
notification.error({
message: t("jobs.errors.saving", {
error: JSON.stringify(result.errors)
})
@@ -1111,6 +1121,27 @@ export function JobsDetailHeaderActions({
});
}
if (canSubmitForTesting) {
menuItems.push({
key: "submitfortesting",
id: "job-actions-submitfortesting",
label: t("menus.jobsactions.submit-for-testing"),
onClick: async () => {
try {
await axios.post("/job/totals-recorder", { id: job.id });
notification.success({
message: t("general.messages.submit-for-testing")
});
} catch (err) {
console.error(`Error submitting job for testing: ${err?.message}`);
notification.error({
message: t("genera.errors.submit-for-testing-error")
});
}
}
});
}
const menu = {
items: menuItems,
key: "popovermenu"

View File

@@ -1220,7 +1220,8 @@
"errors": {
"fcm": "You must allow notification permissions to have real time messaging. Click to try again.",
"notfound": "No record was found.",
"sizelimit": "The selected items exceed the size limit."
"sizelimit": "The selected items exceed the size limit.",
"submit-for-testing": "Error submitting Job for testing."
},
"itemtypes": {
"contract": "CC Contract",
@@ -1322,7 +1323,8 @@
"partnernotrunning": "{{app}} has detected that the partner is not running. Please ensure it is running to enable full functionality.",
"rbacunauth": "You are not authorized to view this content. Please reach out to your shop manager to change your access level.",
"unsavedchanges": "You have unsaved changes.",
"unsavedchangespopup": "You have unsaved changes. Are you sure you want to leave?"
"unsavedchangespopup": "You have unsaved changes. Are you sure you want to leave?",
"submit-for-testing": "Submitted Job for testing successfully."
},
"validation": {
"dateRangeExceeded": "The date range has been exceeded.",
@@ -2314,7 +2316,8 @@
"duplicate": "Duplicate this Job",
"duplicatenolines": "Duplicate this Job without Repair Data",
"newcccontract": "Create Courtesy Car Contract",
"void": "Void Job"
"void": "Void Job",
"submit-for-testing": "Submit for Testing"
},
"jobsdetail": {
"claimdetail": "Claim Details",

View File

@@ -1220,7 +1220,8 @@
"errors": {
"fcm": "",
"notfound": "",
"sizelimit": ""
"sizelimit": "",
"submit-for-testing": ""
},
"itemtypes": {
"contract": "",
@@ -1322,7 +1323,8 @@
"partnernotrunning": "",
"rbacunauth": "",
"unsavedchanges": "Usted tiene cambios no guardados.",
"unsavedchangespopup": ""
"unsavedchangespopup": "",
"submit-for-testing": ""
},
"validation": {
"dateRangeExceeded": "",
@@ -2314,7 +2316,8 @@
"duplicate": "",
"duplicatenolines": "",
"newcccontract": "",
"void": ""
"void": "",
"submit-for-testing": ""
},
"jobsdetail": {
"claimdetail": "Detalles de la reclamación",

View File

@@ -1220,7 +1220,8 @@
"errors": {
"fcm": "",
"notfound": "",
"sizelimit": ""
"sizelimit": "",
"submit-for-testing": ""
},
"itemtypes": {
"contract": "",
@@ -1322,7 +1323,9 @@
"partnernotrunning": "",
"rbacunauth": "",
"unsavedchanges": "Vous avez des changements non enregistrés.",
"unsavedchangespopup": ""
"unsavedchangespopup": "",
"submit-for-testing": ""
},
"validation": {
"dateRangeExceeded": "",
@@ -2314,7 +2317,8 @@
"duplicate": "",
"duplicatenolines": "",
"newcccontract": "",
"void": ""
"void": "",
"submit-for-testing": ""
},
"jobsdetail": {
"claimdetail": "Détails de la réclamation",

View File

@@ -117,6 +117,7 @@ services:
aws --endpoint-url=http://localstack:4566 secretsmanager create-secret --name CHATTER_PRIVATE_KEY --secret-string file:///tmp/certs/io-ftp-test.key
aws --endpoint-url=http://localstack:4566 logs create-log-group --log-group-name development --region ca-central-1
aws --endpoint-url=http://localstack:4566 s3api create-bucket --bucket imex-large-log --create-bucket-configuration LocationConstraint=ca-central-1
aws --endpoint-url=http://localstack:4566 s3api create-bucket --bucket imex-job-totals --create-bucket-configuration LocationConstraint=ca-central-1
"
# Node App: The Main IMEX API
node-app:

View File

@@ -0,0 +1,77 @@
const fs = require("fs");
const path = require("path");
const logger = require("./server/utils/logger"); // Assuming same logger utility
const s3Client = require("./server/utils/s3"); // Using the S3 client utilities with LocalStack support
// Set bucket name for development with LocalStack
const S3_BUCKET_NAME = "imex-job-totals";
// Set fixtures directory path
const FIXTURES_DIR = path.join(__dirname, "server", "job", "test", "fixtures", "job-totals");
const ensureFixturesDirectory = () => {
if (!fs.existsSync(FIXTURES_DIR)) {
fs.mkdirSync(FIXTURES_DIR, { recursive: true });
logger.log(`Created fixtures directory: ${FIXTURES_DIR}`, "info");
}
};
const downloadJsonFiles = async (userInfo = { email: "system" }) => {
logger.log(`Starting download of JSON files from bucket: ${S3_BUCKET_NAME}`, "debug", userInfo.email);
try {
ensureFixturesDirectory();
const contents = await s3Client.listFilesInS3Bucket(S3_BUCKET_NAME);
if (!contents.length) {
logger.log("No files found in bucket", "info", userInfo.email);
return;
}
logger.log(`Found ${contents.length} files in bucket`, "info", userInfo.email);
for (const item of contents) {
if (!item.Key.endsWith(".json")) {
logger.log(`Skipping non-JSON file: ${item.Key}`, "debug", userInfo.email);
continue;
}
logger.log(`Downloading: ${item.Key}`, "debug", userInfo.email);
const fileData = await s3Client.downloadFileFromS3({
bucketName: S3_BUCKET_NAME,
key: item.Key
});
const fileContent = await fileData.transformToString();
const fileName = path.basename(item.Key);
const filePath = path.join(FIXTURES_DIR, fileName);
fs.writeFileSync(filePath, fileContent);
logger.log(`Saved: ${filePath}`, "info", userInfo.email);
}
logger.log("Download completed successfully", "info", userInfo.email);
} catch (error) {
logger.log("Failed to download JSON files", "error", userInfo.email, null, {
error: error?.message,
stack: error?.stack
});
throw error; // Re-throw to trigger process exit with error code
}
};
// Run the download if script is executed directly
if (require.main === module) {
(async () => {
try {
await downloadJsonFiles();
console.log("Script completed successfully");
process.exit(0); // Explicitly exit with success code
} catch (error) {
console.error("Fatal error downloading files:", error);
process.exit(1); // Explicitly exit with error code
}
})();
}
module.exports = downloadJsonFiles;

View File

@@ -12,7 +12,8 @@
"start": "node server.js",
"makeitpretty": "prettier --write \"**/*.{css,js,json,jsx,scss}\"",
"test:unit": "vitest run",
"test:watch": "vitest"
"test:watch": "vitest",
"job-totals-fixtures:local": "docker exec node-app /usr/bin/node /app/download-job-totals-fixtures.js"
},
"dependencies": {
"@aws-sdk/client-cloudwatch-logs": "^3.782.0",

View File

@@ -1,7 +1,6 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const { captureFixture } = require("./utils/seralizeHelper");
const InstanceMgr = require("../utils/instanceMgr").default;
//****************************************************** */
@@ -31,16 +30,8 @@ exports.totalsSsu = async function (req, res) {
const job = await client.setHeaders({ Authorization: BearerToken }).request(queries.GET_JOB_BY_PK, {
id: id
});
// Extract the job data (the input for TotalsServerSide)
const inputForTotals = job.jobs_by_pk;
const newTotals = await TotalsServerSide({ body: { job: inputForTotals, client: client } }, res, true);
// Capture fixture data (input and output), using job.id for the filename.
if (process.env?.SAVE_TOTALS_DATA === "true") {
captureFixture(inputForTotals, newTotals);
}
p;
const newTotals = await TotalsServerSide({ body: { job: job.jobs_by_pk, client: client } }, res, true);
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.UPDATE_JOB, {
jobId: id,

View File

@@ -0,0 +1,133 @@
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
const moment = require("moment");
const { captureFixture } = require("./utils/seralizeHelper");
const { TotalsServerSide: totalsServerSideCA } = require("./job-totals"); // Canadian version (imex)
const { TotalsServerSide: totalsServerSideUS } = require("./job-totals-USA");
const InstanceMgr = require("../utils/instanceMgr").default;
const { uploadFileToS3 } = require("../utils/s3");
// requires two buckets be made per env, job-totals-test, job-totals-production, locally it will
// use `job-totals` in the owncloud stack
/**
* Returns the environment prefix based on NODE_ENV
* @returns {string}
*/
const getEnvPrefix = () => {
switch (process.env?.NODE_ENV) {
case "test":
return "test";
case "production":
return "production";
default:
return "test";
}
};
const envPrefix = getEnvPrefix();
const S3_BUCKET_NAME = process.env?.NODE_ENV === "development" ? "imex-job-totals" : `job-totals-${envPrefix}`;
/**
* Generates a unique S3 key for the job totals file
* @param {string} jobId - The job ID
* @returns {string} - S3 key with timestamp
*/
const generateS3Key = (jobId) => `${jobId}-${moment().toISOString()}.json`;
/**
* Uploads job totals data to S3
* @param {object} data - The data to upload
* @param {string} jobId - The job ID
* @param {object} userInfo - User information for logging
* @returns {Promise<string>} - The S3 key
*/
const uploadJobTotalsToS3 = async (data, jobId, userInfo) => {
const key = generateS3Key(jobId);
try {
await uploadFileToS3({
bucketName: S3_BUCKET_NAME,
key: key,
content: JSON.stringify(data, null, 2),
contentType: "application/json"
});
logger.log(`Job totals uploaded successfully to ${key}`, "info", userInfo.email, jobId);
return key;
} catch (error) {
logger.log("Failed to upload job totals to S3", "error", userInfo.email, jobId, {
error: error?.message,
stack: error?.stack
});
throw error; // Re-throw for the main handler to catch
}
};
/**
* Fetches job data using GraphQL
* @param {object} client - GraphQL client
* @param {string} token - Bearer token
* @param {string} jobId - Job ID to fetch
* @returns {Promise<object>} - Job data
*/
const fetchJobData = async (client, token, jobId) => {
return client
.setHeaders({ Authorization: token })
.request(queries.GET_JOB_BY_PK, { id: jobId })
.then((response) => response.jobs_by_pk);
};
/**
* This function is used to capture job totals json files.
* @param {object} req - Express request
* @param {object} res - Express response
* @returns {Promise<void>}
*/
const jobTotalsRecorder = async (req, res) => {
const { id: jobId } = req.body;
const bearerToken = req.BearerToken;
const client = req.userGraphQLClient;
const userEmail = req?.user?.email;
logger.log("Starting job totals recording", "debug", userEmail, jobId);
try {
// Fetch job data
const jobData = await fetchJobData(client, bearerToken, jobId);
// Get the appropriate totals function based on instance
const totalsFunction = InstanceMgr({
imex: totalsServerSideCA,
rome: totalsServerSideUS
});
// Calculate the totals
const calculatedTotals = await totalsFunction({ body: { job: jobData, client }, req }, res, true);
// Prepare data for storage
const dataToSave = captureFixture(jobData, calculatedTotals);
// Upload to S3
await uploadJobTotalsToS3(dataToSave, jobId, { email: userEmail });
res.status(200).json({ success: true, message: "Job totals recorded successfully" });
} catch (error) {
logger.log("Failed to record job totals", "error", userEmail, jobId, {
error: error?.message,
stack: error?.stack
});
// Avoid sending response if it's already been sent
if (!res.headersSent) {
res.status(503).json({
success: false,
message: "Error processing job totals",
error: error.message
});
}
}
};
module.exports = jobTotalsRecorder;

View File

@@ -1,7 +1,6 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const { captureFixture } = require("./utils/seralizeHelper");
//****************************************************** */
//****************************************************** */
@@ -31,16 +30,8 @@ exports.totalsSsu = async function (req, res) {
id: id
});
// Extract the job data (the input for TotalsServerSide)
const inputForTotals = job.jobs_by_pk;
// Capture the output of TotalsServerSide
const newTotals = await TotalsServerSide({ body: { job: inputForTotals, client: client } }, res, true);
// Capture fixture data (input and output), using job.id for the filename.
if (process.env?.SAVE_TOTALS_DATA === "true") {
captureFixture(inputForTotals, newTotals);
}
const newTotals = await TotalsServerSide({ body: { job: job.jobs_by_pk, client: client } }, res, true);
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.UPDATE_JOB, {
jobId: id,

View File

@@ -21,12 +21,9 @@ const serializeDinero = (key, value) => {
* Capture a fixture for job totals.
* @param inputData
* @param outputData
* @param saveLocally
*/
const captureFixture = (inputData, outputData) => {
if (!fs.existsSync(fixtureDir)) {
fs.mkdirSync(fixtureDir, { recursive: true });
}
const captureFixture = (inputData, outputData, saveLocally) => {
const fileName = `${inputData.id}.json`;
const filePath = path.join(fixtureDir, fileName);
@@ -44,7 +41,15 @@ const captureFixture = (inputData, outputData) => {
};
// Save the file using our custom serializer.
if (saveLocally) {
if (!fs.existsSync(fixtureDir)) {
fs.mkdirSync(fixtureDir, { recursive: true });
}
fs.writeFileSync(filePath, JSON.stringify(dataToSave, serializeDinero, 2), "utf8");
}
return dataToSave;
};
module.exports = {

View File

@@ -7,6 +7,7 @@ const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMI
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const { totals, statustransition, totalsSsu, costing, lifecycle, costingmulti, jobUpdated } = require("../job/job");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const jobTotalsRecorder = require("../job/job-totals-recorder");
router.post("/totals", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, totals);
router.post("/statustransition", eventAuthorizationMiddleware, statustransition);
@@ -17,5 +18,6 @@ router.post("/costingmulti", validateFirebaseIdTokenMiddleware, withUserGraphQLC
router.post("/partsscan", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, partsScan);
router.post("/ppc", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, ppc.generatePpc);
router.post("/job-updated", eventAuthorizationMiddleware, jobUpdated);
router.post("/totals-recorder", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, jobTotalsRecorder);
module.exports = router;