IO-233 Mixdata schema updates and API.

This commit is contained in:
Patrick Fic
2022-04-28 09:54:15 -07:00
parent ad6d1202f2
commit 865f4776d0
5 changed files with 199 additions and 4 deletions

View File

@@ -2603,6 +2603,13 @@
insertion_order: null
column_mapping:
id: jobid
- name: mixdata
using:
foreign_key_constraint_on:
column: jobid
table:
schema: public
name: mixdata
- name: notes
using:
foreign_key_constraint_on:
@@ -3604,6 +3611,84 @@
_eq: X-Hasura-User-Id
- active:
_eq: true
- table:
schema: public
name: mixdata
object_relationships:
- name: job
using:
foreign_key_constraint_on: jobid
insert_permissions:
- role: user
permission:
check:
job:
bodyshop:
associations:
_and:
- active:
_eq: true
- user:
authid:
_eq: X-Hasura-User-Id
columns:
- mixdata
- totalliquidcost
- totalsundrycost
- company
- version
- created_at
- updated_at
- id
- jobid
backend_only: false
select_permissions:
- role: user
permission:
columns:
- mixdata
- totalliquidcost
- totalsundrycost
- company
- version
- created_at
- updated_at
- id
- jobid
filter:
job:
bodyshop:
associations:
_and:
- active:
_eq: true
- user:
authid:
_eq: X-Hasura-User-Id
update_permissions:
- role: user
permission:
columns:
- mixdata
- totalliquidcost
- totalsundrycost
- company
- version
- created_at
- updated_at
- id
- jobid
filter:
job:
bodyshop:
associations:
_and:
- active:
_eq: true
- user:
authid:
_eq: X-Hasura-User-Id
check: null
- table:
schema: public
name: notes

View File

@@ -0,0 +1 @@
DROP TABLE "public"."mixdata";

View File

@@ -0,0 +1,18 @@
CREATE TABLE "public"."mixdata" ("id" uuid NOT NULL DEFAULT gen_random_uuid(), "created_at" timestamptz NOT NULL DEFAULT now(), "updated_at" timestamptz NOT NULL DEFAULT now(), "jobid" uuid NOT NULL, "company" text NOT NULL, "version" text NOT NULL, "totalliquidcost" numeric NOT NULL, "totalsundrycost" numeric NOT NULL, "mixdata" jsonb, PRIMARY KEY ("id") , FOREIGN KEY ("jobid") REFERENCES "public"."jobs"("id") ON UPDATE cascade ON DELETE cascade);
CREATE OR REPLACE FUNCTION "public"."set_current_timestamp_updated_at"()
RETURNS TRIGGER AS $$
DECLARE
_new record;
BEGIN
_new := NEW;
_new."updated_at" = NOW();
RETURN _new;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER "set_public_mixdata_updated_at"
BEFORE UPDATE ON "public"."mixdata"
FOR EACH ROW
EXECUTE PROCEDURE "public"."set_current_timestamp_updated_at"();
COMMENT ON TRIGGER "set_public_mixdata_updated_at" ON "public"."mixdata"
IS 'trigger to set value of column "updated_at" to current timestamp on row update';
CREATE EXTENSION IF NOT EXISTS pgcrypto;

View File

@@ -1523,3 +1523,15 @@ exports.INSERT_NEW_TRANSITION = `mutation INSERT_NEW_TRANSITION($newTransition:
}
`;
exports.QUERY_JOB_ID_MIXDATA = `query QUERY_JOB_ID_MIXDATA($roNumbers: [String!]!) {
jobs(where: {ro_number: {_in: $roNumbers}}) {
id
ro_number
mixdata {
id
}
}
}
`;

View File

@@ -2,6 +2,8 @@ const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const xml2js = require("xml2js");
const GraphQLClient = require("graphql-request").GraphQLClient;
const queries = require("../graphql-client/queries");
require("dotenv").config({
path: path.resolve(
@@ -12,7 +14,7 @@ require("dotenv").config({
exports.mixdataUpload = async (req, res) => {
const { bodyshopid } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const BearerToken = req.headers.authorization;
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
@@ -31,15 +33,50 @@ exports.mixdataUpload = async (req, res) => {
});
const ScaleType = DetermineScaleType(inboundRequest);
const ROList = GetListOfRos(inboundRequest, ScaleType);
const RoNumbersFromInboundRequest = GetListOfRos(
inboundRequest,
ScaleType
);
//Query the list of ROs based on the RO number.
if (RoNumbersFromInboundRequest.length > 0) {
//Query the list of ROs based on the RO number.
const { jobs } = await client.request(queries.QUERY_JOB_ID_MIXDATA, {
roNumbers: RoNumbersFromInboundRequest,
});
console.log(ROList);
//Create the hash for faster processing for inserts/updates.
const jobHash = {};
jobs.forEach((j) => {
jobHash[j.ro_number] = {
jobid: j.id,
mixdataid: j.mixdata.length > 0 ? j.mixdata[0].id : null,
};
});
const MixDataArray = GenerateMixDataArray(
inboundRequest,
ScaleType,
jobHash
);
const MixDataQuery = `
mutation UPSERT_MIXDATA{
${MixDataArray.map((md, idx) =>
GenerateGqlForMixData(md, idx)
).join(" ")}
}
`;
const resp = await client.request(MixDataQuery);
//Process the list of ROs and return an object to generate the queries.
}
});
res.sendStatus(200);
} catch (error) {
res.status(500).JSON(error);
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
error: error.message,
});
}
};
@@ -63,3 +100,45 @@ function GetListOfRos(inboundRequest, ScaleType) {
);
}
}
function GenerateMixDataArray(inboundRequest, ScaleType, jobHash) {
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
return inboundRequest.PPG.DataExportInterface.ROData.RepairOrders.RO.map(
(r) => {
return {
jobid: jobHash[r.RONumber].jobid,
id: jobHash[r.RONumber].mixdataid,
mixdata: r,
totalliquidcost: r.TotalLiquidCost,
totalsundrycost: r.TotalSundryCost,
company: ScaleType.company,
version: ScaleType.version,
};
}
);
}
}
function GenerateGqlForMixData(mixdata, key) {
const { id, ...restMixData } = mixdata;
if (id) {
//Update.
return `
update${key}: update_mixdata_by_pk(pk_columns:{id: "${id}"}, _set: ${JSON.stringify(
restMixData
).replace(/"(\w+)"\s*:/g, "$1:")}){
id
}
`;
} else {
//Insert
return `
insert${key}: insert_mixdata_one(object: ${JSON.stringify(
restMixData
).replace(/"(\w+)"\s*:/g, "$1:")}){
id
}
`;
}
}