148 lines
4.0 KiB
JavaScript
148 lines
4.0 KiB
JavaScript
const path = require("path");
|
|
const _ = require("lodash");
|
|
const logger = require("../utils/logger");
|
|
const xml2js = require("xml2js");
|
|
const GraphQLClient = require("graphql-request").GraphQLClient;
|
|
const queries = require("../graphql-client/queries");
|
|
|
|
require("dotenv").config({
|
|
path: path.resolve(
|
|
process.cwd(),
|
|
`.env.${process.env.NODE_ENV || "development"}`
|
|
),
|
|
});
|
|
|
|
exports.mixdataUpload = async (req, res) => {
|
|
const { bodyshopid } = req.body;
|
|
|
|
const BearerToken = req.headers.authorization;
|
|
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
|
|
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
|
|
headers: {
|
|
Authorization: BearerToken,
|
|
},
|
|
});
|
|
|
|
try {
|
|
req.files.forEach(async (element) => {
|
|
const b = Buffer.from(element.buffer);
|
|
console.log(b.toString());
|
|
|
|
const inboundRequest = await xml2js.parseStringPromise(b.toString(), {
|
|
explicitArray: false,
|
|
});
|
|
|
|
logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest);
|
|
|
|
const ScaleType = DetermineScaleType(inboundRequest);
|
|
const RoNumbersFromInboundRequest = GetListOfRos(
|
|
inboundRequest,
|
|
ScaleType
|
|
);
|
|
|
|
if (RoNumbersFromInboundRequest.length > 0) {
|
|
//Query the list of ROs based on the RO number.
|
|
const { jobs } = await client.request(queries.QUERY_JOB_ID_MIXDATA, {
|
|
roNumbers: RoNumbersFromInboundRequest,
|
|
});
|
|
|
|
//Create the hash for faster processing for inserts/updates.
|
|
const jobHash = {};
|
|
jobs.forEach((j) => {
|
|
jobHash[j.ro_number] = {
|
|
jobid: j.id,
|
|
mixdataid: j.mixdata.length > 0 ? j.mixdata[0].id : null,
|
|
};
|
|
});
|
|
const MixDataArray = GenerateMixDataArray(
|
|
inboundRequest,
|
|
ScaleType,
|
|
jobHash
|
|
);
|
|
|
|
const MixDataQuery = `
|
|
mutation UPSERT_MIXDATA{
|
|
${MixDataArray.map((md, idx) =>
|
|
GenerateGqlForMixData(md, idx)
|
|
).join(" ")}
|
|
}
|
|
`;
|
|
|
|
const resp = await client.request(MixDataQuery);
|
|
|
|
//Process the list of ROs and return an object to generate the queries.
|
|
}
|
|
});
|
|
res.sendStatus(200);
|
|
} catch (error) {
|
|
res.status(500).JSON(error);
|
|
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
|
|
error: error.message,
|
|
...error,
|
|
});
|
|
}
|
|
};
|
|
|
|
function DetermineScaleType(inboundRequest) {
|
|
const ret = { type: "", verson: 0 };
|
|
|
|
//PPG Mix Data
|
|
if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") {
|
|
return {
|
|
type: inboundRequest.PPG.Header.Protocol.Name,
|
|
company: "PPG",
|
|
version: inboundRequest.PPG.Header.Protocol.Version,
|
|
};
|
|
}
|
|
}
|
|
|
|
function GetListOfRos(inboundRequest, ScaleType) {
|
|
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
|
|
return inboundRequest.PPG.DataExportInterface.ROData.RepairOrders.RO.map(
|
|
(r) => r.RONumber
|
|
);
|
|
}
|
|
}
|
|
|
|
function GenerateMixDataArray(inboundRequest, ScaleType, jobHash) {
|
|
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
|
|
return inboundRequest.PPG.DataExportInterface.ROData.RepairOrders.RO.map(
|
|
(r) => {
|
|
return {
|
|
jobid: jobHash[r.RONumber].jobid,
|
|
id: jobHash[r.RONumber].mixdataid,
|
|
mixdata: r,
|
|
totalliquidcost: r.TotalLiquidCost,
|
|
totalsundrycost: r.TotalSundryCost,
|
|
company: ScaleType.company,
|
|
version: ScaleType.version,
|
|
};
|
|
}
|
|
);
|
|
}
|
|
}
|
|
|
|
function GenerateGqlForMixData(mixdata, key) {
|
|
const { id, ...restMixData } = mixdata;
|
|
|
|
if (id) {
|
|
//Update.
|
|
return `
|
|
update${key}: update_mixdata_by_pk(pk_columns:{id: "${id}"}, _set: ${JSON.stringify(
|
|
restMixData
|
|
).replace(/"(\w+)"\s*:/g, "$1:")}){
|
|
id
|
|
}
|
|
`;
|
|
} else {
|
|
//Insert
|
|
return `
|
|
insert${key}: insert_mixdata_one(object: ${JSON.stringify(
|
|
restMixData
|
|
).replace(/"(\w+)"\s*:/g, "$1:")}){
|
|
id
|
|
}
|
|
`;
|
|
}
|
|
}
|