Reformat all project files to use the prettier config file.
This commit is contained in:
@@ -2,142 +2,124 @@ const path = require("path");
|
||||
const _ = require("lodash");
|
||||
const xml2js = require("xml2js");
|
||||
const queries = require("../graphql-client/queries");
|
||||
const logger = require('../utils/logger');
|
||||
const logger = require("../utils/logger");
|
||||
|
||||
require("dotenv").config({
|
||||
path: path.resolve(
|
||||
process.cwd(),
|
||||
`.env.${process.env.NODE_ENV || "development"}`
|
||||
),
|
||||
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
|
||||
});
|
||||
|
||||
exports.mixdataUpload = async (req, res) => {
|
||||
const {bodyshopid} = req.body;
|
||||
const { bodyshopid } = req.body;
|
||||
|
||||
const client = req.userGraphQLClient;
|
||||
const client = req.userGraphQLClient;
|
||||
|
||||
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
|
||||
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
|
||||
|
||||
try {
|
||||
for (const element of req.files) {
|
||||
const b = Buffer.from(element.buffer);
|
||||
|
||||
try {
|
||||
for (const element of req.files) {
|
||||
const b = Buffer.from(element.buffer);
|
||||
const inboundRequest = await xml2js.parseStringPromise(b.toString(), {
|
||||
explicitArray: false
|
||||
});
|
||||
|
||||
const inboundRequest = await xml2js.parseStringPromise(b.toString(), {
|
||||
explicitArray: false,
|
||||
});
|
||||
logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest);
|
||||
|
||||
logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest);
|
||||
const ScaleType = DetermineScaleType(inboundRequest);
|
||||
const RoNumbersFromInboundRequest = GetListOfRos(inboundRequest, ScaleType);
|
||||
|
||||
const ScaleType = DetermineScaleType(inboundRequest);
|
||||
const RoNumbersFromInboundRequest = GetListOfRos(
|
||||
inboundRequest,
|
||||
ScaleType
|
||||
);
|
||||
if (RoNumbersFromInboundRequest.length > 0) {
|
||||
//Query the list of ROs based on the RO number.
|
||||
const { jobs } = await client.request(queries.QUERY_JOB_ID_MIXDATA, {
|
||||
roNumbers: RoNumbersFromInboundRequest
|
||||
});
|
||||
|
||||
if (RoNumbersFromInboundRequest.length > 0) {
|
||||
//Query the list of ROs based on the RO number.
|
||||
const {jobs} = await client.request(queries.QUERY_JOB_ID_MIXDATA, {
|
||||
roNumbers: RoNumbersFromInboundRequest,
|
||||
});
|
||||
|
||||
//Create the hash for faster processing for inserts/updates.
|
||||
const jobHash = {};
|
||||
jobs.forEach((j) => {
|
||||
jobHash[j.ro_number] = {
|
||||
jobid: j.id,
|
||||
mixdataid: j.mixdata.length > 0 ? j.mixdata[0].id : null,
|
||||
};
|
||||
});
|
||||
const MixDataArray = GenerateMixDataArray(
|
||||
inboundRequest,
|
||||
ScaleType,
|
||||
jobHash
|
||||
);
|
||||
const foundJobs = MixDataArray.filter((m) => m.jobid);
|
||||
const MixDataQuery = `
|
||||
//Create the hash for faster processing for inserts/updates.
|
||||
const jobHash = {};
|
||||
jobs.forEach((j) => {
|
||||
jobHash[j.ro_number] = {
|
||||
jobid: j.id,
|
||||
mixdataid: j.mixdata.length > 0 ? j.mixdata[0].id : null
|
||||
};
|
||||
});
|
||||
const MixDataArray = GenerateMixDataArray(inboundRequest, ScaleType, jobHash);
|
||||
const foundJobs = MixDataArray.filter((m) => m.jobid);
|
||||
const MixDataQuery = `
|
||||
mutation UPSERT_MIXDATA{
|
||||
${foundJobs
|
||||
.map((md, idx) => GenerateGqlForMixData(md, idx))
|
||||
.join(" ")}
|
||||
${foundJobs.map((md, idx) => GenerateGqlForMixData(md, idx)).join(" ")}
|
||||
}
|
||||
`;
|
||||
if (foundJobs.length > 1) {
|
||||
const resp = await client.request(MixDataQuery);
|
||||
}
|
||||
|
||||
//Process the list of ROs and return an object to generate the queries.
|
||||
}
|
||||
if (foundJobs.length > 1) {
|
||||
const resp = await client.request(MixDataQuery);
|
||||
}
|
||||
res.sendStatus(200);
|
||||
} catch (error) {
|
||||
res.status(500).json(error);
|
||||
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
|
||||
error: error.message,
|
||||
...error,
|
||||
});
|
||||
|
||||
//Process the list of ROs and return an object to generate the queries.
|
||||
}
|
||||
}
|
||||
res.sendStatus(200);
|
||||
} catch (error) {
|
||||
res.status(500).json(error);
|
||||
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
|
||||
error: error.message,
|
||||
...error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function DetermineScaleType(inboundRequest) {
|
||||
const ret = {type: "", verson: 0};
|
||||
const ret = { type: "", verson: 0 };
|
||||
|
||||
//PPG Mix Data
|
||||
if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") {
|
||||
return {
|
||||
type: inboundRequest.PPG.Header.Protocol.Name,
|
||||
company: "PPG",
|
||||
version: inboundRequest.PPG.Header.Protocol.Version,
|
||||
};
|
||||
}
|
||||
//PPG Mix Data
|
||||
if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") {
|
||||
return {
|
||||
type: inboundRequest.PPG.Header.Protocol.Name,
|
||||
company: "PPG",
|
||||
version: inboundRequest.PPG.Header.Protocol.Version
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function GetListOfRos(inboundRequest, ScaleType) {
|
||||
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
|
||||
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map(
|
||||
(r) => r.RONumber
|
||||
);
|
||||
}
|
||||
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
|
||||
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => r.RONumber);
|
||||
}
|
||||
}
|
||||
|
||||
function GenerateMixDataArray(inboundRequest, ScaleType, jobHash) {
|
||||
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
|
||||
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map(
|
||||
(r) => {
|
||||
return {
|
||||
jobid: jobHash[r.RONumber]?.jobid,
|
||||
id: jobHash[r.RONumber]?.mixdataid,
|
||||
mixdata: r,
|
||||
totalliquidcost: r.TotalLiquidCost,
|
||||
totalsundrycost: r.TotalSundryCost,
|
||||
company: ScaleType.company,
|
||||
version: ScaleType.version,
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
|
||||
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => {
|
||||
return {
|
||||
jobid: jobHash[r.RONumber]?.jobid,
|
||||
id: jobHash[r.RONumber]?.mixdataid,
|
||||
mixdata: r,
|
||||
totalliquidcost: r.TotalLiquidCost,
|
||||
totalsundrycost: r.TotalSundryCost,
|
||||
company: ScaleType.company,
|
||||
version: ScaleType.version
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function GenerateGqlForMixData(mixdata, key) {
|
||||
const {id, ...restMixData} = mixdata;
|
||||
const { id, ...restMixData } = mixdata;
|
||||
|
||||
if (id) {
|
||||
//Update.
|
||||
return `
|
||||
update${key}: update_mixdata_by_pk(pk_columns:{id: "${id}"}, _set: ${JSON.stringify(
|
||||
restMixData
|
||||
).replace(/"(\w+)"\s*:/g, "$1:")}){
|
||||
if (id) {
|
||||
//Update.
|
||||
return `
|
||||
update${key}: update_mixdata_by_pk(pk_columns:{id: "${id}"}, _set: ${JSON.stringify(restMixData).replace(
|
||||
/"(\w+)"\s*:/g,
|
||||
"$1:"
|
||||
)}){
|
||||
id
|
||||
}
|
||||
`;
|
||||
} else {
|
||||
//Insert
|
||||
return `
|
||||
insert${key}: insert_mixdata_one(object: ${JSON.stringify(
|
||||
restMixData
|
||||
).replace(/"(\w+)"\s*:/g, "$1:")}){
|
||||
} else {
|
||||
//Insert
|
||||
return `
|
||||
insert${key}: insert_mixdata_one(object: ${JSON.stringify(restMixData).replace(/"(\w+)"\s*:/g, "$1:")}){
|
||||
id
|
||||
}
|
||||
`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user