feature/Reynolds-and-Reynolds-DMS-API-Integration - Scaffolding
This commit is contained in:
496
server/rr/rr-job-export.js
Normal file
496
server/rr/rr-job-export.js
Normal file
@@ -0,0 +1,496 @@
|
||||
const GraphQLClient = require("graphql-request").GraphQLClient;
|
||||
const _ = require("lodash");
|
||||
const moment = require("moment-timezone");
|
||||
|
||||
const CalculateAllocations = require("../cdk/cdk-calculate-allocations").default; // reuse allocations
|
||||
const CreateRRLogEvent = require("./rr-logger");
|
||||
const queries = require("../graphql-client/queries");
|
||||
const { MakeRRCall, RRActions, getTransactionType, defaultRRTTL, RRCacheEnums } = require("./rr-helpers");
|
||||
|
||||
// --- Public entry points (similar to Fortellis)
|
||||
async function RRJobExport({ socket, redisHelpers, txEnvelope, jobid }) {
|
||||
const { setSessionTransactionData } = redisHelpers;
|
||||
|
||||
try {
|
||||
CreateRRLogEvent(socket, "DEBUG", `[RR] Received Job export request`, { jobid });
|
||||
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.txEnvelope,
|
||||
txEnvelope,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
const JobData = await QueryJobData({ socket, jobid });
|
||||
await setSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.JobData, JobData, defaultRRTTL);
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `[RR] Get Vehicle Id via VIN`, { vin: JobData.v_vin });
|
||||
|
||||
const DMSVid = await GetVehicleId({ socket, redisHelpers, JobData });
|
||||
await setSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.DMSVid, DMSVid, defaultRRTTL);
|
||||
|
||||
let DMSVehCustomer;
|
||||
if (!DMSVid?.newId) {
|
||||
const DMSVeh = await ReadVehicleById({ socket, redisHelpers, JobData, vehicleId: DMSVid.vehiclesVehId });
|
||||
await setSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.DMSVeh, DMSVeh, defaultRRTTL);
|
||||
|
||||
const owner = DMSVeh?.owners && DMSVeh.owners.find((o) => o.id.assigningPartyId === "CURRENT");
|
||||
if (owner?.id?.value) {
|
||||
DMSVehCustomer = await ReadCustomerById({ socket, redisHelpers, JobData, customerId: owner.id.value });
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.DMSVehCustomer,
|
||||
DMSVehCustomer,
|
||||
defaultRRTTL
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const DMSCustList = await SearchCustomerByName({ socket, redisHelpers, JobData });
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.DMSCustList,
|
||||
DMSCustList,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
socket.emit("rr-select-customer", [
|
||||
...(DMSVehCustomer ? [{ ...DMSVehCustomer, vinOwner: true }] : []),
|
||||
...(Array.isArray(DMSCustList) ? DMSCustList : [])
|
||||
]);
|
||||
} catch (error) {
|
||||
CreateRRLogEvent(socket, "ERROR", `[RR] RRJobExport failed: ${error.message}`, { stack: error.stack });
|
||||
}
|
||||
}
|
||||
|
||||
async function RRSelectedCustomer({ socket, redisHelpers, selectedCustomerId, jobid }) {
|
||||
const { setSessionTransactionData, getSessionTransactionData } = redisHelpers;
|
||||
|
||||
try {
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.selectedCustomerId,
|
||||
selectedCustomerId,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
const JobData = await getSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.JobData);
|
||||
const txEnvelope = await getSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.txEnvelope);
|
||||
const DMSVid = await getSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.DMSVid);
|
||||
|
||||
let DMSCust;
|
||||
if (selectedCustomerId) {
|
||||
DMSCust = await ReadCustomerById({ socket, redisHelpers, JobData, customerId: selectedCustomerId });
|
||||
} else {
|
||||
const createRes = await CreateCustomer({ socket, redisHelpers, JobData });
|
||||
DMSCust = { customerId: createRes?.data || createRes?.customerId || createRes?.id };
|
||||
}
|
||||
await setSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.DMSCust, DMSCust, defaultRRTTL);
|
||||
|
||||
let DMSVeh;
|
||||
if (DMSVid?.newId) {
|
||||
DMSVeh = await InsertVehicle({ socket, redisHelpers, JobData, txEnvelope, DMSVid, DMSCust });
|
||||
} else {
|
||||
DMSVeh = await ReadVehicleById({ socket, redisHelpers, JobData, vehicleId: DMSVid.vehiclesVehId });
|
||||
// TODO: implement UpdateVehicle if RR supports updating ownership
|
||||
// DMSVeh = await UpdateVehicle({ ... })
|
||||
}
|
||||
await setSessionTransactionData(socket.id, getTransactionType(jobid), RRCacheEnums.DMSVeh, DMSVeh, defaultRRTTL);
|
||||
|
||||
const DMSTransHeader = await StartWip({ socket, redisHelpers, JobData, txEnvelope });
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.DMSTransHeader,
|
||||
DMSTransHeader,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
const DMSBatchTxn = await TransBatchWip({ socket, redisHelpers, JobData });
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.DMSBatchTxn,
|
||||
DMSBatchTxn,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
// decide success/err format later; keep parity with Fortellis shape
|
||||
if (String(DMSBatchTxn?.rtnCode || "0") === "0") {
|
||||
const DmsBatchTxnPost = await PostBatchWip({ socket, redisHelpers, JobData });
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.DmsBatchTxnPost,
|
||||
DmsBatchTxnPost,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
if (String(DmsBatchTxnPost?.rtnCode || "0") === "0") {
|
||||
await MarkJobExported({ socket, jobid: JobData.id, redisHelpers });
|
||||
|
||||
// Optional service history write
|
||||
try {
|
||||
const DMSVehHistory = await InsertServiceVehicleHistory({ socket, redisHelpers, JobData });
|
||||
await setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.DMSVehHistory,
|
||||
DMSVehHistory,
|
||||
defaultRRTTL
|
||||
);
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "WARN", `[RR] ServiceVehicleHistory optional step failed: ${e.message}`);
|
||||
}
|
||||
|
||||
socket.emit("export-success", JobData.id);
|
||||
} else {
|
||||
await HandlePostingError({ socket, redisHelpers, JobData, DMSTransHeader });
|
||||
}
|
||||
} else {
|
||||
await InsertFailedExportLog({
|
||||
socket,
|
||||
JobData,
|
||||
error: `RR DMSBatchTxn not successful: ${JSON.stringify(DMSBatchTxn)}`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
CreateRRLogEvent(socket, "ERROR", `[RR] RRSelectedCustomer failed: ${error.message}`, { stack: error.stack });
|
||||
const JobData = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.JobData
|
||||
);
|
||||
if (JobData) await InsertFailedExportLog({ socket, JobData, error });
|
||||
}
|
||||
}
|
||||
|
||||
// --- GraphQL job fetch
|
||||
async function QueryJobData({ socket, jobid }) {
|
||||
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
|
||||
const result = await client
|
||||
.setHeaders({ Authorization: `Bearer ${socket.handshake?.auth?.token}` })
|
||||
.request(queries.QUERY_JOBS_FOR_CDK_EXPORT, { id: jobid });
|
||||
return result.jobs_by_pk;
|
||||
}
|
||||
|
||||
// --- RR API step stubs (wire to MakeRRCall) -------------------------
|
||||
|
||||
async function GetVehicleId({ socket, redisHelpers, JobData }) {
|
||||
return await MakeRRCall({
|
||||
...RRActions.GetVehicleId,
|
||||
requestPathParams: JobData.v_vin,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function ReadVehicleById({ socket, redisHelpers, JobData, vehicleId }) {
|
||||
return await MakeRRCall({
|
||||
...RRActions.ReadVehicle,
|
||||
requestPathParams: vehicleId,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function ReadCustomerById({ socket, redisHelpers, JobData, customerId }) {
|
||||
return await MakeRRCall({
|
||||
...RRActions.ReadCustomer,
|
||||
requestPathParams: customerId,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function SearchCustomerByName({ socket, redisHelpers, JobData }) {
|
||||
// align with Rome Search spec later
|
||||
const ownerNameParams =
|
||||
JobData.ownr_co_nm && JobData.ownr_co_nm.trim() !== ""
|
||||
? [["lastName", JobData.ownr_co_nm]]
|
||||
: [
|
||||
["firstName", JobData.ownr_fn],
|
||||
["lastName", JobData.ownr_ln]
|
||||
];
|
||||
|
||||
return await MakeRRCall({
|
||||
...RRActions.SearchCustomer,
|
||||
requestSearchParams: ownerNameParams,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function CreateCustomer({ socket, redisHelpers, JobData }) {
|
||||
// shape per Rome Customer Insert spec
|
||||
const body = {
|
||||
customerType: JobData.ownr_co_nm ? "BUSINESS" : "INDIVIDUAL"
|
||||
// fill minimal required fields later
|
||||
};
|
||||
return await MakeRRCall({
|
||||
...RRActions.CreateCustomer,
|
||||
body,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function InsertVehicle({ socket, redisHelpers, JobData, txEnvelope, DMSVid, DMSCust }) {
|
||||
const body = {
|
||||
// map fields per Rome Insert Service Vehicle spec
|
||||
vin: JobData.v_vin
|
||||
// owners, make/model, odometer, etc…
|
||||
};
|
||||
return await MakeRRCall({
|
||||
...RRActions.InsertVehicle,
|
||||
body,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function StartWip({ socket, redisHelpers, JobData, txEnvelope }) {
|
||||
const body = {
|
||||
acctgDate: moment().tz(JobData.bodyshop.timezone).format("YYYY-MM-DD"),
|
||||
desc: txEnvelope.story || "",
|
||||
docType: "10",
|
||||
m13Flag: "0",
|
||||
refer: JobData.ro_number,
|
||||
srcCo: JobData.bodyshop?.cdk_configuration?.srcco || "00", // placeholder
|
||||
srcJrnl: txEnvelope.journal,
|
||||
userID: "BSMS"
|
||||
};
|
||||
return await MakeRRCall({
|
||||
...RRActions.StartWip,
|
||||
body,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function TransBatchWip({ socket, redisHelpers, JobData }) {
|
||||
const wips = await GenerateTransWips({ socket, redisHelpers, JobData });
|
||||
return await MakeRRCall({
|
||||
...RRActions.TranBatchWip,
|
||||
body: wips, // shape per Rome spec
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function PostBatchWip({ socket, redisHelpers, JobData }) {
|
||||
const DMSTransHeader = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.DMSTransHeader
|
||||
);
|
||||
|
||||
const body = {
|
||||
opCode: "P",
|
||||
transID: DMSTransHeader?.transID
|
||||
};
|
||||
|
||||
return await MakeRRCall({
|
||||
...RRActions.PostBatchWip,
|
||||
body,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function QueryErrWip({ socket, redisHelpers, JobData }) {
|
||||
const DMSTransHeader = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.DMSTransHeader
|
||||
);
|
||||
return await MakeRRCall({
|
||||
...RRActions.QueryErrorWip,
|
||||
requestPathParams: DMSTransHeader?.transID,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function DeleteWip({ socket, redisHelpers, JobData }) {
|
||||
const DMSTransHeader = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.DMSTransHeader
|
||||
);
|
||||
const body = { opCode: "D", transID: DMSTransHeader?.transID };
|
||||
return await MakeRRCall({
|
||||
...RRActions.PostBatchWip,
|
||||
body,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function InsertServiceVehicleHistory({ socket, redisHelpers, JobData }) {
|
||||
const txEnvelope = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.txEnvelope
|
||||
);
|
||||
|
||||
const body = {
|
||||
// map to Rome “Service Vehicle History Insert” spec
|
||||
comments: txEnvelope?.story || ""
|
||||
};
|
||||
return await MakeRRCall({
|
||||
...RRActions.ServiceHistoryInsert,
|
||||
body,
|
||||
redisHelpers,
|
||||
socket,
|
||||
jobid: JobData.id
|
||||
});
|
||||
}
|
||||
|
||||
async function HandlePostingError({ socket, redisHelpers, JobData, DMSTransHeader }) {
|
||||
const DmsError = await QueryErrWip({ socket, redisHelpers, JobData });
|
||||
await DeleteWip({ socket, redisHelpers, JobData });
|
||||
|
||||
const errString = DmsError?.errMsg || JSON.stringify(DmsError);
|
||||
errString?.split("|")?.forEach((e) => e && CreateRRLogEvent(socket, "ERROR", `[RR] Post error: ${e}`));
|
||||
await InsertFailedExportLog({ socket, JobData, error: errString });
|
||||
}
|
||||
|
||||
async function GenerateTransWips({ socket, redisHelpers, JobData }) {
|
||||
// reuse the existing allocator
|
||||
const allocations = await CalculateAllocations(socket, JobData.id, true); // true==enable verbose logging
|
||||
const DMSTransHeader = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.DMSTransHeader
|
||||
);
|
||||
|
||||
// Translate allocations -> RR WIP line shape later. For now: keep parity with Fortellis skeleton
|
||||
const wips = [];
|
||||
allocations.forEach((alloc) => {
|
||||
if (alloc.sale.getAmount() > 0 && !alloc.tax) {
|
||||
wips.push({
|
||||
acct: alloc.profitCenter.dms_acctnumber,
|
||||
cntl: alloc.profitCenter.dms_control_override || JobData.ro_number,
|
||||
postAmt: alloc.sale.multiply(-1).getAmount(),
|
||||
transID: DMSTransHeader?.transID,
|
||||
trgtCoID: JobData.bodyshop?.cdk_configuration?.srcco
|
||||
});
|
||||
}
|
||||
if (alloc.cost.getAmount() > 0 && !alloc.tax) {
|
||||
wips.push({
|
||||
acct: alloc.costCenter.dms_acctnumber,
|
||||
cntl: alloc.costCenter.dms_control_override || JobData.ro_number,
|
||||
postAmt: alloc.cost.getAmount(),
|
||||
transID: DMSTransHeader?.transID,
|
||||
trgtCoID: JobData.bodyshop?.cdk_configuration?.srcco
|
||||
});
|
||||
wips.push({
|
||||
acct: alloc.costCenter.dms_wip_acctnumber,
|
||||
cntl: alloc.costCenter.dms_control_override || JobData.ro_number,
|
||||
postAmt: alloc.cost.multiply(-1).getAmount(),
|
||||
transID: DMSTransHeader?.transID,
|
||||
trgtCoID: JobData.bodyshop?.cdk_configuration?.srcco
|
||||
});
|
||||
}
|
||||
if (alloc.tax && alloc.sale.getAmount() > 0) {
|
||||
wips.push({
|
||||
acct: alloc.profitCenter.dms_acctnumber,
|
||||
cntl: alloc.profitCenter.dms_control_override || JobData.ro_number,
|
||||
postAmt: alloc.sale.multiply(-1).getAmount(),
|
||||
transID: DMSTransHeader?.transID,
|
||||
trgtCoID: JobData.bodyshop?.cdk_configuration?.srcco
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const txEnvelope = await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.txEnvelope
|
||||
);
|
||||
txEnvelope?.payers?.forEach((payer) => {
|
||||
wips.push({
|
||||
acct: payer.dms_acctnumber,
|
||||
cntl: payer.controlnumber,
|
||||
postAmt: Math.round(payer.amount * 100),
|
||||
transID: DMSTransHeader?.transID,
|
||||
trgtCoID: JobData.bodyshop?.cdk_configuration?.srcco
|
||||
});
|
||||
});
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(JobData.id),
|
||||
RRCacheEnums.transWips,
|
||||
wips,
|
||||
defaultRRTTL
|
||||
);
|
||||
return wips;
|
||||
}
|
||||
|
||||
// --- DB logging mirrors Fortellis
|
||||
async function MarkJobExported({ socket, jobid, redisHelpers }) {
|
||||
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
|
||||
const currentToken =
|
||||
(socket?.data && socket.data.authToken) || (socket?.handshake?.auth && socket.handshake.auth.token);
|
||||
|
||||
return client.setHeaders({ Authorization: `Bearer ${currentToken}` }).request(queries.MARK_JOB_EXPORTED, {
|
||||
jobId: jobid,
|
||||
job: {
|
||||
status: socket.JobData?.bodyshop?.md_ro_statuses?.default_exported || "Exported*",
|
||||
date_exported: new Date()
|
||||
},
|
||||
log: {
|
||||
bodyshopid: socket.JobData?.bodyshop?.id,
|
||||
jobid,
|
||||
successful: true,
|
||||
useremail: socket.user?.email,
|
||||
metadata: await redisHelpers.getSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(jobid),
|
||||
RRCacheEnums.transWips
|
||||
)
|
||||
},
|
||||
bill: { exported: true, exported_at: new Date() }
|
||||
});
|
||||
}
|
||||
|
||||
async function InsertFailedExportLog({ socket, JobData, error }) {
|
||||
try {
|
||||
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {});
|
||||
const currentToken =
|
||||
(socket?.data && socket.data.authToken) || (socket?.handshake?.auth && socket.handshake.auth.token);
|
||||
return await client.setHeaders({ Authorization: `Bearer ${currentToken}` }).request(queries.INSERT_EXPORT_LOG, {
|
||||
log: {
|
||||
bodyshopid: JobData.bodyshop.id,
|
||||
jobid: JobData.id,
|
||||
successful: false,
|
||||
message: typeof error === "string" ? error : JSON.stringify(error),
|
||||
useremail: socket.user?.email
|
||||
}
|
||||
});
|
||||
} catch (error2) {
|
||||
CreateRRLogEvent(socket, "ERROR", `Error in InsertFailedExportLog - ${error2.message}`, { stack: error2.stack });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
RRJobExport,
|
||||
RRSelectedCustomer
|
||||
};
|
||||
Reference in New Issue
Block a user