Merge branch 'master-AIO' into feature/IO-3515-ocr-bill-posting
This commit is contained in:
139
server/chatter/chatter-client.js
Normal file
139
server/chatter/chatter-client.js
Normal file
@@ -0,0 +1,139 @@
|
||||
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
|
||||
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
|
||||
const { isString, isEmpty } = require("lodash");
|
||||
|
||||
const CHATTER_BASE_URL = process.env.CHATTER_API_BASE_URL || "https://api.chatterresearch.com";
|
||||
const AWS_REGION = process.env.AWS_REGION || "ca-central-1";
|
||||
|
||||
// Configure SecretsManager client with localstack support
|
||||
const secretsClientOptions = {
|
||||
region: AWS_REGION,
|
||||
credentials: defaultProvider()
|
||||
};
|
||||
|
||||
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
|
||||
|
||||
if (isLocal) {
|
||||
secretsClientOptions.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
|
||||
}
|
||||
|
||||
const secretsClient = new SecretsManagerClient(secretsClientOptions);
|
||||
|
||||
/**
|
||||
* Chatter API Client for making requests to the Chatter API
|
||||
*/
|
||||
class ChatterApiClient {
|
||||
constructor({ baseUrl, apiToken }) {
|
||||
if (!apiToken) throw new Error("ChatterApiClient requires apiToken");
|
||||
this.baseUrl = String(baseUrl || "").replace(/\/+$/, "");
|
||||
this.apiToken = apiToken;
|
||||
}
|
||||
|
||||
async createLocation(companyId, payload) {
|
||||
return this.request(`/api/v1/companies/${companyId}/locations`, {
|
||||
method: "POST",
|
||||
body: payload
|
||||
});
|
||||
}
|
||||
|
||||
async postInteraction(companyId, payload) {
|
||||
return this.request(`/api/v1/companies/${companyId}/solicitation/interaction`, {
|
||||
method: "POST",
|
||||
body: payload
|
||||
});
|
||||
}
|
||||
|
||||
async request(path, { method = "GET", body } = {}) {
|
||||
const res = await fetch(this.baseUrl + path, {
|
||||
method,
|
||||
headers: {
|
||||
"Api-Token": this.apiToken,
|
||||
Accept: "application/json",
|
||||
...(body ? { "Content-Type": "application/json" } : {})
|
||||
},
|
||||
body: body ? JSON.stringify(body) : undefined
|
||||
});
|
||||
|
||||
const text = await res.text();
|
||||
const data = text ? safeJson(text) : null;
|
||||
|
||||
if (!res.ok) {
|
||||
const err = new Error(`Chatter API error ${res.status} | ${data?.message}`);
|
||||
err.status = res.status;
|
||||
err.data = data;
|
||||
const retryAfterMs = parseRetryAfterMs(res.headers.get("retry-after"));
|
||||
if (retryAfterMs != null) err.retryAfterMs = retryAfterMs;
|
||||
throw err;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely parse JSON, returning original text if parsing fails
|
||||
*/
|
||||
function safeJson(text) {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
|
||||
function parseRetryAfterMs(value) {
|
||||
if (!value) return null;
|
||||
|
||||
const sec = Number(value);
|
||||
if (Number.isFinite(sec) && sec >= 0) return Math.ceil(sec * 1000);
|
||||
|
||||
const dateMs = Date.parse(value);
|
||||
if (!Number.isFinite(dateMs)) return null;
|
||||
return Math.max(0, dateMs - Date.now());
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches Chatter API token from AWS Secrets Manager
|
||||
* SecretId format: CHATTER_COMPANY_KEY_<companyId>
|
||||
*
|
||||
* @param {string|number} companyId - The company ID
|
||||
* @returns {Promise<string>} The API token
|
||||
*/
|
||||
async function getChatterApiToken(companyId) {
|
||||
const key = String(companyId ?? "").trim();
|
||||
if (!key) throw new Error("getChatterApiToken: companyId is required");
|
||||
|
||||
// Optional override for development/testing
|
||||
if (process.env.CHATTER_API_TOKEN) return process.env.CHATTER_API_TOKEN;
|
||||
|
||||
const secretId = `CHATTER_COMPANY_KEY_${key}`;
|
||||
const command = new GetSecretValueCommand({ SecretId: secretId });
|
||||
const { SecretString, SecretBinary } = await secretsClient.send(command);
|
||||
|
||||
const token =
|
||||
(SecretString && SecretString.trim()) ||
|
||||
(SecretBinary && Buffer.from(SecretBinary, "base64").toString("ascii").trim()) ||
|
||||
"";
|
||||
|
||||
if (!token) throw new Error(`Chatter API token secret is empty: ${secretId}`);
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Chatter API client instance
|
||||
*
|
||||
* @param {string|number} companyId - The company ID
|
||||
* @param {string} [baseUrl] - Optional base URL override
|
||||
* @returns {Promise<ChatterApiClient>} Configured API client
|
||||
*/
|
||||
async function createChatterClient(companyId, baseUrl = CHATTER_BASE_URL) {
|
||||
const apiToken = await getChatterApiToken(companyId);
|
||||
return new ChatterApiClient({ baseUrl, apiToken });
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ChatterApiClient,
|
||||
getChatterApiToken,
|
||||
createChatterClient,
|
||||
safeJson,
|
||||
CHATTER_BASE_URL
|
||||
};
|
||||
123
server/chatter/createLocation.js
Normal file
123
server/chatter/createLocation.js
Normal file
@@ -0,0 +1,123 @@
|
||||
const DEFAULT_COMPANY_ID = process.env.CHATTER_DEFAULT_COMPANY_ID;
|
||||
const client = require("../graphql-client/graphql-client").client;
|
||||
const { createChatterClient } = require("./chatter-client");
|
||||
const InstanceManager = require("../utils/instanceMgr").default;
|
||||
|
||||
const GET_BODYSHOP_FOR_CHATTER = `
|
||||
query GET_BODYSHOP_FOR_CHATTER($id: uuid!) {
|
||||
bodyshops_by_pk(id: $id) {
|
||||
id
|
||||
shopname
|
||||
address1
|
||||
city
|
||||
state
|
||||
zip_post
|
||||
imexshopid
|
||||
chatterid
|
||||
chatter_company_id
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const UPDATE_BODYSHOP_CHATTER_FIELDS = `
|
||||
mutation UPDATE_BODYSHOP_CHATTER_FIELDS($id: uuid!, $chatter_company_id: String!, $chatterid: String!) {
|
||||
update_bodyshops_by_pk(pk_columns: {id: $id}, _set: {chatter_company_id: $chatter_company_id, chatterid: $chatterid}) {
|
||||
id
|
||||
chatter_company_id
|
||||
chatterid
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const createLocation = async (req, res) => {
|
||||
const { logger } = req;
|
||||
const { bodyshopID, googlePlaceID } = req.body;
|
||||
|
||||
console.dir({ body: req.body });
|
||||
|
||||
if (!DEFAULT_COMPANY_ID) {
|
||||
logger.log("chatter-create-location-no-default-company", "warn", null, null, { bodyshopID });
|
||||
return res.json({ success: false, message: "No default company set" });
|
||||
}
|
||||
|
||||
if (!googlePlaceID) {
|
||||
logger.log("chatter-create-location-no-google-place-id", "warn", null, null, { bodyshopID });
|
||||
return res.json({ success: false, message: "No google place id provided" });
|
||||
}
|
||||
|
||||
if (!bodyshopID) {
|
||||
logger.log("chatter-create-location-invalid-bodyshop", "warn", null, null, { bodyshopID });
|
||||
return res.json({ success: false, message: "No bodyshop id" });
|
||||
}
|
||||
|
||||
try {
|
||||
const { bodyshops_by_pk: bodyshop } = await client.request(GET_BODYSHOP_FOR_CHATTER, { id: bodyshopID });
|
||||
|
||||
if (!bodyshop) {
|
||||
logger.log("chatter-create-location-bodyshop-not-found", "warn", null, null, { bodyshopID });
|
||||
return res.json({ success: false, message: "Bodyshop not found" });
|
||||
}
|
||||
|
||||
if (bodyshop.chatter_company_id && bodyshop.chatterid) {
|
||||
logger.log("chatter-create-location-already-exists", "warn", null, null, {
|
||||
bodyshopID
|
||||
});
|
||||
return res.json({ success: false, message: "This Bodyshop already has a location associated with it" });
|
||||
}
|
||||
|
||||
const chatterApi = await createChatterClient(DEFAULT_COMPANY_ID);
|
||||
|
||||
const locationIdentifier = `${DEFAULT_COMPANY_ID}-${bodyshop.id}`;
|
||||
|
||||
const locationPayload = {
|
||||
name: bodyshop.shopname,
|
||||
locationIdentifier: locationIdentifier,
|
||||
address: bodyshop.address1,
|
||||
postalCode: bodyshop.zip_post,
|
||||
state: bodyshop.state,
|
||||
city: bodyshop.city,
|
||||
country: InstanceManager({ imex: "Canada", rome: "US" }),
|
||||
googlePlaceId: googlePlaceID,
|
||||
status: "active"
|
||||
};
|
||||
|
||||
logger.log("chatter-create-location-calling-api", "info", null, null, { bodyshopID, locationIdentifier });
|
||||
|
||||
const response = await chatterApi.createLocation(DEFAULT_COMPANY_ID, locationPayload);
|
||||
|
||||
if (!response.location?.id) {
|
||||
logger.log("chatter-create-location-no-location-id", "error", null, null, { bodyshopID, response });
|
||||
return res.json({ success: false, message: "No location ID in response", data: response });
|
||||
}
|
||||
|
||||
await client.request(UPDATE_BODYSHOP_CHATTER_FIELDS, {
|
||||
id: bodyshopID,
|
||||
chatter_company_id: DEFAULT_COMPANY_ID,
|
||||
chatterid: String(response.location.id)
|
||||
});
|
||||
|
||||
logger.log("chatter-create-location-success", "info", null, null, {
|
||||
bodyshopID,
|
||||
chatter_company_id: DEFAULT_COMPANY_ID,
|
||||
chatterid: response.location.id,
|
||||
locationIdentifier
|
||||
});
|
||||
|
||||
return res.json({ success: true, data: response });
|
||||
} catch (error) {
|
||||
logger.log("chatter-create-location-error", "error", null, null, {
|
||||
bodyshopID,
|
||||
error: error.message,
|
||||
status: error.status,
|
||||
data: error.data
|
||||
});
|
||||
|
||||
return res.json({
|
||||
success: false,
|
||||
message: error.message || "Failed to create location",
|
||||
error: error.data
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = createLocation;
|
||||
@@ -221,6 +221,8 @@ const CreateRepairOrderTag = (job, errorCallback) => {
|
||||
|
||||
const repairCosts = CreateCosts(job);
|
||||
|
||||
const LaborDetailLines = generateLaborLines(job.timetickets);
|
||||
|
||||
//Calculate detail only lines.
|
||||
const detailAdjustments = job.joblines
|
||||
.filter((jl) => jl.ah_detail_line && jl.mod_lbr_ty)
|
||||
@@ -606,12 +608,14 @@ const CreateRepairOrderTag = (job, errorCallback) => {
|
||||
// CSIID: null,
|
||||
InsGroupCode: null
|
||||
},
|
||||
|
||||
DetailLines: {
|
||||
DetailLine:
|
||||
job.joblines.length > 0
|
||||
? job.joblines.map((jl) => GenerateDetailLines(job, jl, job.bodyshop.md_order_statuses))
|
||||
: [generateNullDetailLine()]
|
||||
},
|
||||
LaborDetailLines: {
|
||||
LaborDetailLine: LaborDetailLines
|
||||
}
|
||||
};
|
||||
return ret;
|
||||
@@ -787,6 +791,76 @@ const CreateCosts = (job) => {
|
||||
};
|
||||
};
|
||||
|
||||
const generateLaborLines = (timetickets) => {
|
||||
if (!timetickets || timetickets.length === 0) return [];
|
||||
|
||||
const codeToProps = {
|
||||
LAB: { actual: "LaborBodyActualHours", flag: "LaborBodyFlagHours", cost: "LaborBodyCost" },
|
||||
LAM: { actual: "LaborMechanicalActualHours", flag: "LaborMechanicalFlagHours", cost: "LaborMechanicalCost" },
|
||||
LAG: { actual: "LaborGlassActualHours", flag: "LaborGlassFlagHours", cost: "LaborGlassCost" },
|
||||
LAS: { actual: "LaborStructuralActualHours", flag: "LaborStructuralFlagHours", cost: "LaborStructuralCost" },
|
||||
LAE: { actual: "LaborElectricalActualHours", flag: "LaborElectricalFlagHours", cost: "LaborElectricalCost" },
|
||||
LAA: { actual: "LaborAluminumActualHours", flag: "LaborAluminumFlagHours", cost: "LaborAluminumCost" },
|
||||
LAR: { actual: "LaborRefinishActualHours", flag: "LaborRefinishFlagHours", cost: "LaborRefinishCost" },
|
||||
LAU: { actual: "LaborDetailActualHours", flag: "LaborDetailFlagHours", cost: "LaborDetailCost" },
|
||||
LA1: { actual: "LaborOtherActualHours", flag: "LaborOtherFlagHours", cost: "LaborOtherCost" },
|
||||
LA2: { actual: "LaborOtherActualHours", flag: "LaborOtherFlagHours", cost: "LaborOtherCost" },
|
||||
LA3: { actual: "LaborOtherActualHours", flag: "LaborOtherFlagHours", cost: "LaborOtherCost" },
|
||||
LA4: { actual: "LaborOtherActualHours", flag: "LaborOtherFlagHours", cost: "LaborOtherCost" }
|
||||
};
|
||||
|
||||
return timetickets.map((ticket, idx) => {
|
||||
const { ciecacode, employee, actualhrs = 0, productivehrs = 0, rate = 0 } = ticket;
|
||||
const isFlatRate = employee?.flat_rate;
|
||||
const hours = isFlatRate ? productivehrs : actualhrs;
|
||||
const cost = rate * hours;
|
||||
|
||||
const laborDetail = {
|
||||
LaborDetailLineNumber: idx + 1,
|
||||
TechnicianNameFirst: employee?.first_name || "",
|
||||
TechnicianNameLast: employee?.last_name || "",
|
||||
LaborBodyActualHours: 0,
|
||||
LaborMechanicalActualHours: 0,
|
||||
LaborGlassActualHours: 0,
|
||||
LaborStructuralActualHours: 0,
|
||||
LaborElectricalActualHours: 0,
|
||||
LaborAluminumActualHours: 0,
|
||||
LaborRefinishActualHours: 0,
|
||||
LaborDetailActualHours: 0,
|
||||
LaborOtherActualHours: 0,
|
||||
LaborBodyFlagHours: 0,
|
||||
LaborMechanicalFlagHours: 0,
|
||||
LaborGlassFlagHours: 0,
|
||||
LaborStructuralFlagHours: 0,
|
||||
LaborElectricalFlagHours: 0,
|
||||
LaborAluminumFlagHours: 0,
|
||||
LaborRefinishFlagHours: 0,
|
||||
LaborDetailFlagHours: 0,
|
||||
LaborOtherFlagHours: 0,
|
||||
LaborBodyCost: 0,
|
||||
LaborMechanicalCost: 0,
|
||||
LaborGlassCost: 0,
|
||||
LaborStructuralCost: 0,
|
||||
LaborElectricalCost: 0,
|
||||
LaborAluminumCost: 0,
|
||||
LaborRefinishCost: 0,
|
||||
LaborDetailCost: 0,
|
||||
LaborOtherCost: 0
|
||||
};
|
||||
|
||||
const effectiveCiecacode = ciecacode || "LA4";
|
||||
|
||||
if (codeToProps[effectiveCiecacode]) {
|
||||
const { actual, flag, cost: costProp } = codeToProps[effectiveCiecacode];
|
||||
laborDetail[actual] = actualhrs;
|
||||
laborDetail[flag] = productivehrs;
|
||||
laborDetail[costProp] = cost;
|
||||
}
|
||||
|
||||
return laborDetail;
|
||||
});
|
||||
};
|
||||
|
||||
const StatusMapping = (status, md_ro_statuses) => {
|
||||
//Possible return statuses EST, SCH, ARR, IPR, RDY, DEL, CLO, CAN, UNDEFINED.
|
||||
const {
|
||||
|
||||
554
server/data/chatter-api.js
Normal file
554
server/data/chatter-api.js
Normal file
@@ -0,0 +1,554 @@
|
||||
/**
|
||||
* Environment variables used by this file
|
||||
* Chatter integration
|
||||
* - CHATTER_API_CONCURRENCY
|
||||
* - Maximum number of jobs/interactions posted concurrently *per shop* (within a single shop's batch).
|
||||
* - Default: 5
|
||||
* - Used by: createConcurrencyLimit(MAX_CONCURRENCY)
|
||||
*
|
||||
* - CHATTER_API_REQUESTS_PER_SECOND
|
||||
* - Per-company outbound request rate (token bucket refill rate).
|
||||
* - Default: 3
|
||||
* - Must be a positive number; otherwise falls back to default.
|
||||
* - Used by: createTokenBucketRateLimiter({ refillPerSecond })
|
||||
*
|
||||
* - CHATTER_API_BURST_CAPACITY
|
||||
* - Per-company token bucket capacity (maximum burst size).
|
||||
* - Default: equals CHATTER_API_REQUESTS_PER_SECOND (i.e., 3 unless overridden)
|
||||
* - Must be a positive number; otherwise falls back to default.
|
||||
* - Used by: createTokenBucketRateLimiter({ capacity })
|
||||
*
|
||||
* - CHATTER_API_MAX_RETRIES
|
||||
* - Maximum number of attempts for posting an interaction before giving up.
|
||||
* - Default: 6
|
||||
* - Must be a positive integer; otherwise falls back to default.
|
||||
* - Used by: postInteractionWithPolicy()
|
||||
*
|
||||
* - CHATTER_API_TOKEN
|
||||
* - Optional override token for emergency/dev scenarios.
|
||||
* - If set, bypasses Secrets Manager/Redis token retrieval and uses this value for all companies.
|
||||
* - Used by: getChatterApiTokenCached()
|
||||
*
|
||||
* Notes
|
||||
* - Per-company API tokens are otherwise fetched via getChatterApiToken(companyId) (Secrets Manager)
|
||||
* and may be cached via `sessionUtils.getChatterToken/setChatterToken` (Redis-backed).
|
||||
*/
|
||||
|
||||
const queries = require("../graphql-client/queries");
|
||||
const moment = require("moment-timezone");
|
||||
const logger = require("../utils/logger");
|
||||
const { ChatterApiClient, getChatterApiToken, CHATTER_BASE_URL } = require("../chatter/chatter-client");
|
||||
|
||||
const client = require("../graphql-client/graphql-client").client;
|
||||
|
||||
const CHATTER_EVENT = process.env.NODE_ENV === "production" ? "delivery" : "TEST_INTEGRATION";
|
||||
const MAX_CONCURRENCY = Number(process.env.CHATTER_API_CONCURRENCY || 5);
|
||||
const CHATTER_REQUESTS_PER_SECOND = getPositiveNumber(process.env.CHATTER_API_REQUESTS_PER_SECOND, 3);
|
||||
const CHATTER_BURST_CAPACITY = getPositiveNumber(process.env.CHATTER_API_BURST_CAPACITY, CHATTER_REQUESTS_PER_SECOND);
|
||||
const CHATTER_MAX_RETRIES = getPositiveInteger(process.env.CHATTER_API_MAX_RETRIES, 6);
|
||||
|
||||
// Client caching (in-memory) - tokens are now cached in Redis
|
||||
const clientCache = new Map(); // companyId -> ChatterApiClient
|
||||
const tokenInFlight = new Map(); // companyId -> Promise<string> (for in-flight deduplication)
|
||||
const companyRateLimiters = new Map(); // companyId -> rate limiter
|
||||
|
||||
/**
|
||||
* Core processing function for Chatter API jobs.
|
||||
* This can be called by the HTTP handler or the BullMQ worker.
|
||||
*
|
||||
* @param {Object} options - Processing options
|
||||
* @param {string} options.start - Start date for the delivery window
|
||||
* @param {string} options.end - End date for the delivery window
|
||||
* @param {Array<string>} options.bodyshopIds - Optional specific shops to process
|
||||
* @param {boolean} options.skipUpload - Dry-run flag
|
||||
* @param {Object} options.sessionUtils - Optional session utils for token caching
|
||||
* @returns {Promise<Object>} Result with totals, allShopSummaries, and allErrors
|
||||
*/
|
||||
async function processChatterApiJob({ start, end, bodyshopIds, skipUpload, sessionUtils }) {
|
||||
logger.log("chatter-api-start", "DEBUG", "api", null, null);
|
||||
|
||||
const allErrors = [];
|
||||
const allShopSummaries = [];
|
||||
|
||||
// Shops that DO have chatter_company_id
|
||||
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS_WITH_COMPANY);
|
||||
|
||||
const shopsToProcess =
|
||||
bodyshopIds?.length > 0 ? bodyshops.filter((shop) => bodyshopIds.includes(shop.id)) : bodyshops;
|
||||
|
||||
logger.log("chatter-api-shopsToProcess-generated", "DEBUG", "api", null, { count: shopsToProcess.length });
|
||||
|
||||
if (shopsToProcess.length === 0) {
|
||||
logger.log("chatter-api-shopsToProcess-empty", "DEBUG", "api", null, null);
|
||||
return {
|
||||
totals: { shops: 0, jobs: 0, sent: 0, duplicates: 0, failed: 0 },
|
||||
allShopSummaries: [],
|
||||
allErrors: []
|
||||
};
|
||||
}
|
||||
|
||||
await processBatchApi({
|
||||
shopsToProcess,
|
||||
start,
|
||||
end,
|
||||
skipUpload,
|
||||
allShopSummaries,
|
||||
allErrors,
|
||||
sessionUtils
|
||||
});
|
||||
|
||||
const totals = allShopSummaries.reduce(
|
||||
(acc, s) => {
|
||||
acc.shops += 1;
|
||||
acc.jobs += s.jobs || 0;
|
||||
acc.sent += s.sent || 0;
|
||||
acc.duplicates += s.duplicates || 0;
|
||||
acc.failed += s.failed || 0;
|
||||
return acc;
|
||||
},
|
||||
{ shops: 0, jobs: 0, sent: 0, duplicates: 0, failed: 0 }
|
||||
);
|
||||
|
||||
logger.log("chatter-api-end", "DEBUG", "api", null, totals);
|
||||
|
||||
return { totals, allShopSummaries, allErrors };
|
||||
}
|
||||
|
||||
exports.default = async (req, res) => {
|
||||
if (process.env.NODE_ENV !== "production") return res.sendStatus(403);
|
||||
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) return res.sendStatus(401);
|
||||
|
||||
res.status(202).json({
|
||||
success: true,
|
||||
message: "Chatter API job queued for processing",
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
try {
|
||||
const { dispatchChatterApiJob } = require("./queues/chatterApiQueue");
|
||||
const { start, end, bodyshopIds, skipUpload } = req.body;
|
||||
|
||||
await dispatchChatterApiJob({
|
||||
start,
|
||||
end,
|
||||
bodyshopIds,
|
||||
skipUpload
|
||||
});
|
||||
} catch (error) {
|
||||
logger.log("chatter-api-queue-dispatch-error", "ERROR", "api", null, {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
exports.processChatterApiJob = processChatterApiJob;
|
||||
|
||||
async function processBatchApi({ shopsToProcess, start, end, skipUpload, allShopSummaries, allErrors, sessionUtils }) {
|
||||
for (const bodyshop of shopsToProcess) {
|
||||
const summary = {
|
||||
bodyshopid: bodyshop.id,
|
||||
imexshopid: bodyshop.imexshopid,
|
||||
shopname: bodyshop.shopname,
|
||||
chatter_company_id: bodyshop.chatter_company_id,
|
||||
chatterid: bodyshop.chatterid,
|
||||
jobs: 0,
|
||||
sent: 0,
|
||||
duplicates: 0,
|
||||
failed: 0,
|
||||
ok: true
|
||||
};
|
||||
|
||||
try {
|
||||
logger.log("chatter-api-start-shop", "DEBUG", "api", bodyshop.id, { shopname: bodyshop.shopname });
|
||||
|
||||
const companyId = parseCompanyId(bodyshop.chatter_company_id);
|
||||
if (!companyId) {
|
||||
summary.ok = false;
|
||||
summary.failed = 0;
|
||||
allErrors.push({
|
||||
...pickShop(bodyshop),
|
||||
fatal: true,
|
||||
errors: [`Invalid chatter_company_id: "${bodyshop.chatter_company_id}"`]
|
||||
});
|
||||
allShopSummaries.push(summary);
|
||||
continue;
|
||||
}
|
||||
|
||||
const chatterApi = await getChatterApiClient(companyId, sessionUtils);
|
||||
|
||||
const { jobs } = await client.request(queries.CHATTER_QUERY, {
|
||||
bodyshopid: bodyshop.id,
|
||||
start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"),
|
||||
...(end && { end: moment(end).endOf("day") })
|
||||
});
|
||||
|
||||
summary.jobs = jobs.length;
|
||||
|
||||
// concurrency-limited posting
|
||||
const limit = createConcurrencyLimit(MAX_CONCURRENCY);
|
||||
const results = await Promise.all(
|
||||
jobs.map((j) =>
|
||||
limit(async () => {
|
||||
const payload = buildInteractionPayload(bodyshop, j);
|
||||
|
||||
// keep legacy flag name: skipUpload == dry-run
|
||||
if (skipUpload) return { ok: true, dryRun: true };
|
||||
|
||||
const r = await postInteractionWithPolicy(chatterApi, companyId, payload);
|
||||
return r;
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
for (const r of results) {
|
||||
if (r?.dryRun) continue;
|
||||
if (r?.ok && r?.duplicate) summary.duplicates += 1;
|
||||
else if (r?.ok) summary.sent += 1;
|
||||
else summary.failed += 1;
|
||||
}
|
||||
|
||||
// record failures with some detail (cap to avoid huge emails)
|
||||
const failures = results
|
||||
.filter((r) => r && r.ok === false)
|
||||
.slice(0, 25)
|
||||
.map((r) => ({
|
||||
status: r.status,
|
||||
error: r.error,
|
||||
context: r.context
|
||||
}));
|
||||
|
||||
if (failures.length) {
|
||||
summary.ok = false;
|
||||
allErrors.push({
|
||||
...pickShop(bodyshop),
|
||||
fatal: false,
|
||||
errors: failures
|
||||
});
|
||||
}
|
||||
|
||||
logger.log("chatter-api-end-shop", "DEBUG", "api", bodyshop.id, summary);
|
||||
} catch (error) {
|
||||
summary.ok = false;
|
||||
|
||||
logger.log("chatter-api-error-shop", "ERROR", "api", bodyshop.id, {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
allErrors.push({
|
||||
...pickShop(bodyshop),
|
||||
fatal: true,
|
||||
errors: [error.toString()]
|
||||
});
|
||||
} finally {
|
||||
allShopSummaries.push(summary);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildInteractionPayload(bodyshop, j) {
|
||||
const isCompany = Boolean(j.ownr_co_nm);
|
||||
|
||||
const locationIdentifier = `${bodyshop.chatter_company_id}-${bodyshop.id}`;
|
||||
const timestamp = formatChatterTimestamp(j.actual_delivery, bodyshop.timezone);
|
||||
|
||||
if (j.actual_delivery && !timestamp) {
|
||||
logger.log("chatter-api-invalid-delivery-timestamp", "WARN", "api", bodyshop.id, {
|
||||
bodyshopId: bodyshop.id,
|
||||
jobId: j.id,
|
||||
timezone: bodyshop.timezone,
|
||||
actualDelivery: j.actual_delivery
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
locationIdentifier: locationIdentifier,
|
||||
event: CHATTER_EVENT,
|
||||
consent: "true",
|
||||
transactionId: j.ro_number != null ? String(j.ro_number) : undefined,
|
||||
timestamp,
|
||||
firstName: isCompany ? null : j.ownr_fn || null,
|
||||
lastName: isCompany ? j.ownr_co_nm : j.ownr_ln || null,
|
||||
emailAddress: j.ownr_ea || undefined,
|
||||
phoneNumber: j.ownr_ph1 || undefined,
|
||||
metadata: {
|
||||
imexShopId: bodyshop.imexshopid,
|
||||
bodyshopId: bodyshop.id,
|
||||
jobId: j.id
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function postInteractionWithPolicy(chatterApi, companyId, payload) {
|
||||
const limiter = getCompanyRateLimiter(companyId);
|
||||
const requestContext = {
|
||||
companyId,
|
||||
locationIdentifier: payload?.locationIdentifier,
|
||||
transactionId: payload?.transactionId,
|
||||
timestamp: payload?.timestamp ?? null,
|
||||
bodyshopId: payload?.metadata?.bodyshopId ?? null,
|
||||
jobId: payload?.metadata?.jobId ?? null
|
||||
};
|
||||
|
||||
for (let attempt = 0; attempt < CHATTER_MAX_RETRIES; attempt++) {
|
||||
await limiter.acquire();
|
||||
|
||||
try {
|
||||
await chatterApi.postInteraction(companyId, payload);
|
||||
return { ok: true };
|
||||
} catch (e) {
|
||||
// duplicate -> treat as successful idempotency outcome
|
||||
if (e.status === 409) return { ok: true, duplicate: true, error: e.data };
|
||||
|
||||
// rate limited -> backoff + retry
|
||||
if (e.status === 429) {
|
||||
const retryDelayMs = retryDelayMsForError(e, attempt);
|
||||
limiter.pause(retryDelayMs);
|
||||
logger.log("chatter-api-request-rate-limited", "WARN", "api", requestContext.bodyshopId, {
|
||||
...requestContext,
|
||||
attempt: attempt + 1,
|
||||
maxAttempts: CHATTER_MAX_RETRIES,
|
||||
status: e.status,
|
||||
retryAfterMs: e.retryAfterMs,
|
||||
retryDelayMs,
|
||||
error: e.data ?? e.message
|
||||
});
|
||||
await sleep(retryDelayMs);
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.log("chatter-api-request-failed", "ERROR", "api", requestContext.bodyshopId, {
|
||||
...requestContext,
|
||||
attempt: attempt + 1,
|
||||
maxAttempts: CHATTER_MAX_RETRIES,
|
||||
status: e.status,
|
||||
error: e.data ?? e.message
|
||||
});
|
||||
return { ok: false, status: e.status, error: e.data ?? e.message, context: requestContext };
|
||||
}
|
||||
}
|
||||
|
||||
logger.log("chatter-api-request-failed", "ERROR", "api", requestContext.bodyshopId, {
|
||||
...requestContext,
|
||||
maxAttempts: CHATTER_MAX_RETRIES,
|
||||
status: 429,
|
||||
error: "rate limit retry exhausted"
|
||||
});
|
||||
|
||||
return { ok: false, status: 429, error: "rate limit retry exhausted", context: requestContext };
|
||||
}
|
||||
|
||||
function parseCompanyId(val) {
|
||||
const s = String(val ?? "").trim();
|
||||
if (!s) return null;
|
||||
const n = Number(s);
|
||||
if (!Number.isFinite(n) || !Number.isInteger(n) || n <= 0) return null;
|
||||
return n;
|
||||
}
|
||||
|
||||
function pickShop(bodyshop) {
|
||||
return {
|
||||
bodyshopid: bodyshop.id,
|
||||
imexshopid: bodyshop.imexshopid,
|
||||
shopname: bodyshop.shopname,
|
||||
chatter_company_id: bodyshop.chatter_company_id,
|
||||
chatterid: bodyshop.chatterid
|
||||
};
|
||||
}
|
||||
|
||||
function sleep(ms) {
|
||||
return new Promise((r) => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
function backoffMs(attempt) {
|
||||
const base = Math.min(30_000, 500 * 2 ** attempt);
|
||||
const jitter = Math.floor(Math.random() * 250);
|
||||
return base + jitter;
|
||||
}
|
||||
|
||||
function retryDelayMsForError(error, attempt) {
|
||||
const retryAfterMs = Number(error?.retryAfterMs);
|
||||
if (Number.isFinite(retryAfterMs) && retryAfterMs > 0) {
|
||||
const jitter = Math.floor(Math.random() * 250);
|
||||
return Math.min(60_000, retryAfterMs + jitter);
|
||||
}
|
||||
return backoffMs(attempt);
|
||||
}
|
||||
|
||||
function formatChatterTimestamp(value, timezone) {
|
||||
if (!value) return undefined;
|
||||
|
||||
const hasValidTimezone = Boolean(timezone && moment.tz.zone(timezone));
|
||||
const parsed = hasValidTimezone ? moment(value).tz(timezone) : moment(value);
|
||||
if (!parsed.isValid()) return undefined;
|
||||
|
||||
// Keep a strict, Chatter-friendly timestamp without fractional seconds.
|
||||
return parsed.utc().format("YYYY-MM-DD HH:mm:ss[Z]");
|
||||
}
|
||||
|
||||
function createConcurrencyLimit(max) {
|
||||
let active = 0;
|
||||
const queue = [];
|
||||
|
||||
const next = () => {
|
||||
if (active >= max) return;
|
||||
const fn = queue.shift();
|
||||
if (!fn) return;
|
||||
active++;
|
||||
fn()
|
||||
.catch(() => {})
|
||||
.finally(() => {
|
||||
active--;
|
||||
next();
|
||||
});
|
||||
};
|
||||
|
||||
return (fn) =>
|
||||
new Promise((resolve, reject) => {
|
||||
queue.push(async () => {
|
||||
try {
|
||||
resolve(await fn());
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
function getCompanyRateLimiter(companyId) {
|
||||
const key = String(companyId);
|
||||
const existing = companyRateLimiters.get(key);
|
||||
if (existing) return existing;
|
||||
|
||||
const limiter = createTokenBucketRateLimiter({
|
||||
refillPerSecond: CHATTER_REQUESTS_PER_SECOND,
|
||||
capacity: CHATTER_BURST_CAPACITY
|
||||
});
|
||||
|
||||
companyRateLimiters.set(key, limiter);
|
||||
return limiter;
|
||||
}
|
||||
|
||||
function createTokenBucketRateLimiter({ refillPerSecond, capacity }) {
|
||||
let tokens = capacity;
|
||||
let lastRefillAt = Date.now();
|
||||
let pauseUntil = 0;
|
||||
let chain = Promise.resolve();
|
||||
|
||||
const refill = () => {
|
||||
const now = Date.now();
|
||||
const elapsedSec = (now - lastRefillAt) / 1000;
|
||||
if (elapsedSec <= 0) return;
|
||||
tokens = Math.min(capacity, tokens + elapsedSec * refillPerSecond);
|
||||
lastRefillAt = now;
|
||||
};
|
||||
|
||||
const waitForPermit = async () => {
|
||||
for (;;) {
|
||||
const now = Date.now();
|
||||
if (pauseUntil > now) {
|
||||
await sleep(pauseUntil - now);
|
||||
continue;
|
||||
}
|
||||
|
||||
refill();
|
||||
if (tokens >= 1) {
|
||||
tokens -= 1;
|
||||
return;
|
||||
}
|
||||
|
||||
const missing = 1 - tokens;
|
||||
const waitMs = Math.max(25, Math.ceil((missing / refillPerSecond) * 1000));
|
||||
await sleep(waitMs);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
acquire() {
|
||||
chain = chain.then(waitForPermit, waitForPermit);
|
||||
return chain;
|
||||
},
|
||||
pause(ms) {
|
||||
const n = Number(ms);
|
||||
if (!Number.isFinite(n) || n <= 0) return;
|
||||
pauseUntil = Math.max(pauseUntil, Date.now() + n);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function getPositiveNumber(value, fallback) {
|
||||
const n = Number(value);
|
||||
return Number.isFinite(n) && n > 0 ? n : fallback;
|
||||
}
|
||||
|
||||
function getPositiveInteger(value, fallback) {
|
||||
const n = Number(value);
|
||||
return Number.isInteger(n) && n > 0 ? n : fallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a per-company Chatter API client, caching both the token and the client.
|
||||
*/
|
||||
async function getChatterApiClient(companyId, sessionUtils) {
|
||||
const key = String(companyId);
|
||||
|
||||
const existing = clientCache.get(key);
|
||||
if (existing) return existing;
|
||||
|
||||
const apiToken = await getChatterApiTokenCached(companyId, sessionUtils);
|
||||
const chatterApi = new ChatterApiClient({ baseUrl: CHATTER_BASE_URL, apiToken });
|
||||
|
||||
clientCache.set(key, chatterApi);
|
||||
return chatterApi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the per-company token from AWS Secrets Manager with Redis caching
|
||||
* SecretId: CHATTER_COMPANY_KEY_<companyId>
|
||||
*
|
||||
* Uses Redis caching + in-flight dedupe to avoid hammering Secrets Manager.
|
||||
*/
|
||||
async function getChatterApiTokenCached(companyId, sessionUtils) {
|
||||
const key = String(companyId ?? "").trim();
|
||||
if (!key) throw new Error("getChatterApiToken: companyId is required");
|
||||
|
||||
// Optional override for emergency/dev
|
||||
if (process.env.CHATTER_API_TOKEN) return process.env.CHATTER_API_TOKEN;
|
||||
|
||||
// Check Redis cache if sessionUtils is available
|
||||
if (sessionUtils?.getChatterToken) {
|
||||
const cachedToken = await sessionUtils.getChatterToken(key);
|
||||
if (cachedToken) {
|
||||
logger.log("chatter-api-get-token-cache-hit", "DEBUG", "api", null, { companyId: key });
|
||||
return cachedToken;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for in-flight requests
|
||||
const inflight = tokenInFlight.get(key);
|
||||
if (inflight) return inflight;
|
||||
|
||||
const p = (async () => {
|
||||
logger.log("chatter-api-get-token-cache-miss", "DEBUG", "api", null, { companyId: key });
|
||||
|
||||
// Fetch token from Secrets Manager using shared function
|
||||
const token = await getChatterApiToken(companyId);
|
||||
|
||||
// Store in Redis cache if sessionUtils is available
|
||||
if (sessionUtils?.setChatterToken) {
|
||||
await sessionUtils.setChatterToken(key, token);
|
||||
}
|
||||
|
||||
return token;
|
||||
})();
|
||||
|
||||
tokenInFlight.set(key, p);
|
||||
|
||||
try {
|
||||
return await p;
|
||||
} finally {
|
||||
tokenInFlight.delete(key);
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,8 @@ const converter = require("json-2-csv");
|
||||
const logger = require("../utils/logger");
|
||||
const fs = require("fs");
|
||||
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
|
||||
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
|
||||
const { isString, isEmpty } = require("lodash");
|
||||
let Client = require("ssh2-sftp-client");
|
||||
|
||||
const client = require("../graphql-client/graphql-client").client;
|
||||
@@ -144,7 +146,18 @@ async function processBatch(shopsToProcess, start, end, allChatterObjects, allEr
|
||||
|
||||
async function getPrivateKey() {
|
||||
// Connect to AWS Secrets Manager
|
||||
const client = new SecretsManagerClient({ region: "ca-central-1" });
|
||||
const secretsClientOptions = {
|
||||
region: "ca-central-1",
|
||||
credentials: defaultProvider()
|
||||
};
|
||||
|
||||
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
|
||||
|
||||
if (isLocal) {
|
||||
secretsClientOptions.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
|
||||
}
|
||||
|
||||
const client = new SecretsManagerClient(secretsClientOptions);
|
||||
const command = new GetSecretValueCommand({ SecretId: "CHATTER_PRIVATE_KEY" });
|
||||
|
||||
logger.log("chatter-get-private-key", "DEBUG", "api", null, null);
|
||||
|
||||
@@ -9,4 +9,5 @@ exports.emsUpload = require("./emsUpload").default;
|
||||
exports.carfax = require("./carfax").default;
|
||||
exports.carfaxRps = require("./carfax-rps").default;
|
||||
exports.vehicletype = require("./vehicletype/vehicletype").default;
|
||||
exports.documentAnalytics = require("./analytics/documents").default;
|
||||
exports.documentAnalytics = require("./analytics/documents").default;
|
||||
exports.chatterApi = require("./chatter-api").default;
|
||||
|
||||
178
server/data/queues/chatterApiQueue.js
Normal file
178
server/data/queues/chatterApiQueue.js
Normal file
@@ -0,0 +1,178 @@
|
||||
const { Queue, Worker } = require("bullmq");
|
||||
const { registerCleanupTask } = require("../../utils/cleanupManager");
|
||||
const getBullMQPrefix = require("../../utils/getBullMQPrefix");
|
||||
const devDebugLogger = require("../../utils/devDebugLogger");
|
||||
const moment = require("moment-timezone");
|
||||
const { sendServerEmail } = require("../../email/sendemail");
|
||||
|
||||
let chatterApiQueue;
|
||||
let chatterApiWorker;
|
||||
|
||||
/**
|
||||
* Initializes the Chatter API queue and worker.
|
||||
*
|
||||
* @param {Object} options - Configuration options for queue initialization.
|
||||
* @param {Object} options.pubClient - Redis client instance for queue communication.
|
||||
* @param {Object} options.logger - Logger instance for logging events and debugging.
|
||||
* @param {Function} options.processJob - Function to process the Chatter API job.
|
||||
* @param {Function} options.getChatterToken - Function to get Chatter token from Redis.
|
||||
* @param {Function} options.setChatterToken - Function to set Chatter token in Redis.
|
||||
* @returns {Queue} The initialized `chatterApiQueue` instance.
|
||||
*/
|
||||
const loadChatterApiQueue = async ({ pubClient, logger, processJob, getChatterToken, setChatterToken }) => {
|
||||
if (!chatterApiQueue) {
|
||||
const prefix = getBullMQPrefix();
|
||||
|
||||
devDebugLogger(`Initializing Chatter API Queue with prefix: ${prefix}`);
|
||||
|
||||
chatterApiQueue = new Queue("chatterApi", {
|
||||
prefix,
|
||||
connection: pubClient,
|
||||
defaultJobOptions: {
|
||||
removeOnComplete: true,
|
||||
removeOnFail: false,
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 60000 // 1 minute base delay
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
chatterApiWorker = new Worker(
|
||||
"chatterApi",
|
||||
async (job) => {
|
||||
const { start, end, bodyshopIds, skipUpload } = job.data;
|
||||
|
||||
logger.log("chatter-api-queue-job-start", "INFO", "api", null, {
|
||||
jobId: job.id,
|
||||
start,
|
||||
end,
|
||||
bodyshopIds,
|
||||
skipUpload
|
||||
});
|
||||
|
||||
try {
|
||||
// Provide sessionUtils-like object with token caching functions
|
||||
const sessionUtils = {
|
||||
getChatterToken,
|
||||
setChatterToken
|
||||
};
|
||||
|
||||
const result = await processJob({
|
||||
start,
|
||||
end,
|
||||
bodyshopIds,
|
||||
skipUpload,
|
||||
sessionUtils
|
||||
});
|
||||
|
||||
logger.log("chatter-api-queue-job-complete", "INFO", "api", null, {
|
||||
jobId: job.id,
|
||||
totals: result.totals
|
||||
});
|
||||
|
||||
// Send email summary
|
||||
await sendServerEmail({
|
||||
subject: `Chatter API Report ${moment().format("MM-DD-YY")}`,
|
||||
text:
|
||||
`Totals:\n${JSON.stringify(result.totals, null, 2)}\n\n` +
|
||||
`Shop summaries:\n${JSON.stringify(result.allShopSummaries, null, 2)}\n\n` +
|
||||
`Errors:\n${JSON.stringify(result.allErrors, null, 2)}\n`
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.log("chatter-api-queue-job-error", "ERROR", "api", null, {
|
||||
jobId: job.id,
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
// Send error email
|
||||
await sendServerEmail({
|
||||
subject: `Chatter API Error ${moment().format("MM-DD-YY")}`,
|
||||
text: `Job failed:\n${error.message}\n\n${error.stack}`
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
prefix,
|
||||
connection: pubClient,
|
||||
concurrency: 1, // Process one job at a time
|
||||
lockDuration: 14400000 // 4 hours - allow long-running jobs
|
||||
}
|
||||
);
|
||||
|
||||
// Event handlers
|
||||
chatterApiWorker.on("completed", (job) => {
|
||||
devDebugLogger(`Chatter API job ${job.id} completed`);
|
||||
});
|
||||
|
||||
chatterApiWorker.on("failed", (job, err) => {
|
||||
logger.log("chatter-api-queue-job-failed", "ERROR", "api", null, {
|
||||
jobId: job?.id,
|
||||
message: err?.message,
|
||||
stack: err?.stack
|
||||
});
|
||||
});
|
||||
|
||||
chatterApiWorker.on("progress", (job, progress) => {
|
||||
devDebugLogger(`Chatter API job ${job.id} progress: ${progress}%`);
|
||||
});
|
||||
|
||||
// Register cleanup task
|
||||
const shutdown = async () => {
|
||||
devDebugLogger("Closing Chatter API queue worker...");
|
||||
await chatterApiWorker.close();
|
||||
devDebugLogger("Chatter API queue worker closed");
|
||||
};
|
||||
registerCleanupTask(shutdown);
|
||||
}
|
||||
|
||||
return chatterApiQueue;
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the initialized `chatterApiQueue` instance.
|
||||
*
|
||||
* @returns {Queue} The `chatterApiQueue` instance.
|
||||
* @throws {Error} If `chatterApiQueue` is not initialized.
|
||||
*/
|
||||
const getQueue = () => {
|
||||
if (!chatterApiQueue) {
|
||||
throw new Error("Chatter API queue not initialized. Ensure loadChatterApiQueue is called during bootstrap.");
|
||||
}
|
||||
return chatterApiQueue;
|
||||
};
|
||||
|
||||
/**
|
||||
* Dispatches a Chatter API job to the queue.
|
||||
*
|
||||
* @param {Object} options - Options for the job.
|
||||
* @param {string} options.start - Start date for the delivery window.
|
||||
* @param {string} options.end - End date for the delivery window.
|
||||
* @param {Array<string>} options.bodyshopIds - Optional specific shops to process.
|
||||
* @param {boolean} options.skipUpload - Dry-run flag.
|
||||
* @returns {Promise<void>} Resolves when the job is added to the queue.
|
||||
*/
|
||||
const dispatchChatterApiJob = async ({ start, end, bodyshopIds, skipUpload }) => {
|
||||
const queue = getQueue();
|
||||
|
||||
const jobData = {
|
||||
start: start || moment().subtract(1, "days").startOf("day").toISOString(),
|
||||
end: end || moment().endOf("day").toISOString(),
|
||||
bodyshopIds: bodyshopIds || [],
|
||||
skipUpload: skipUpload || false
|
||||
};
|
||||
|
||||
await queue.add("process-chatter-api", jobData, {
|
||||
jobId: `chatter-api-${moment().format("YYYY-MM-DD-HHmmss")}`
|
||||
});
|
||||
|
||||
devDebugLogger(`Added Chatter API job to queue: ${JSON.stringify(jobData)}`);
|
||||
};
|
||||
|
||||
module.exports = { loadChatterApiQueue, getQueue, dispatchChatterApiJob };
|
||||
@@ -306,8 +306,7 @@ async function FortellisSelectedCustomer({ socket, redisHelpers, selectedCustome
|
||||
CreateFortellisLogEvent(socket, "ERROR", `{7.1} Error posting vehicle service history. ${error.message}`);
|
||||
}
|
||||
|
||||
//TODO: IF THE VEHICLE SERVICE HISTORY FAILS, WE NEED TO MARK IT AS SUCH AND NOT DELETE THE TRANSACTION.
|
||||
//socket.emit("export-success", JobData.id);
|
||||
socket.emit("export-success", JobData.id);
|
||||
} else {
|
||||
//There was something wrong. Throw an error to trigger clean up.
|
||||
//throw new Error("Error posting DMS Batch Transaction");
|
||||
@@ -431,10 +430,10 @@ async function QueryDmsCustomerByName({ socket, redisHelpers, JobData }) {
|
||||
const ownerName =
|
||||
JobData.ownr_co_nm && JobData.ownr_co_nm.trim() !== ""
|
||||
//? [["firstName", JobData.ownr_co_nm.replace(replaceSpecialRegex, "").toUpperCase()]] // Commented out until we receive direction.
|
||||
? [["email", JobData.ownr_ea.toUpperCase()]]
|
||||
? [["phone", JobData.ownr_ph1?.replace(replaceSpecialRegex, "")]]
|
||||
: [
|
||||
["firstName", JobData.ownr_fn.replace(replaceSpecialRegex, "").toUpperCase()],
|
||||
["lastName", JobData.ownr_ln.replace(replaceSpecialRegex, "").toUpperCase()]
|
||||
["firstName", JobData.ownr_fn?.replace(/[^a-zA-Z-]/g, "").toUpperCase()],
|
||||
["lastName", JobData.ownr_ln?.replace(/[^a-zA-Z-]/g, "").toUpperCase()]
|
||||
];
|
||||
try {
|
||||
const result = await MakeFortellisCall({
|
||||
@@ -475,9 +474,9 @@ async function InsertDmsCustomer({ socket, redisHelpers, JobData }) {
|
||||
} : {
|
||||
customerName: {
|
||||
//"suffix": "Mr.",
|
||||
firstName: JobData.ownr_fn && JobData.ownr_fn.replace(replaceSpecialRegex, "").toUpperCase(),
|
||||
firstName: JobData.ownr_fn && JobData.ownr_fn.replace(/[^a-zA-Z-]/g, "").toUpperCase(),
|
||||
//"middleName": "",
|
||||
lastName: JobData.ownr_ln && JobData.ownr_ln.replace(replaceSpecialRegex, "").toUpperCase()
|
||||
lastName: JobData.ownr_ln && JobData.ownr_ln.replace(/[^a-zA-Z-]/g, "").toUpperCase()
|
||||
//"title": "",
|
||||
//"nickName": ""
|
||||
}
|
||||
|
||||
@@ -827,13 +827,21 @@ exports.AUTOHOUSE_QUERY = `query AUTOHOUSE_EXPORT($start: timestamptz, $bodyshop
|
||||
quantity
|
||||
}
|
||||
}
|
||||
timetickets {
|
||||
timetickets(where: {cost_center: {_neq: "timetickets.labels.shift"}}) {
|
||||
id
|
||||
rate
|
||||
ciecacode
|
||||
cost_center
|
||||
actualhrs
|
||||
productivehrs
|
||||
flat_rate
|
||||
employeeid
|
||||
employee {
|
||||
employee_number
|
||||
flat_rate
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
}
|
||||
area_of_damage
|
||||
employee_prep_rel {
|
||||
@@ -1612,6 +1620,9 @@ exports.GET_JOB_BY_PK = `query GET_JOB_BY_PK($id: uuid!) {
|
||||
rate_ats
|
||||
flat_rate_ats
|
||||
rate_ats_flat
|
||||
dms_id
|
||||
dms_customer_id
|
||||
dms_advisor_id
|
||||
joblines(where: { removed: { _eq: false } }){
|
||||
id
|
||||
line_no
|
||||
@@ -1725,6 +1736,7 @@ query QUERY_JOB_COSTING_DETAILS($id: uuid!) {
|
||||
profitcenter_part
|
||||
profitcenter_labor
|
||||
act_price_before_ppc
|
||||
manual_line
|
||||
}
|
||||
bills {
|
||||
id
|
||||
@@ -1842,6 +1854,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
|
||||
op_code_desc
|
||||
profitcenter_part
|
||||
profitcenter_labor
|
||||
manual_line
|
||||
}
|
||||
bills {
|
||||
id
|
||||
@@ -1909,10 +1922,35 @@ exports.GET_AUTOHOUSE_SHOPS = `query GET_AUTOHOUSE_SHOPS {
|
||||
}`;
|
||||
|
||||
exports.GET_CHATTER_SHOPS = `query GET_CHATTER_SHOPS {
|
||||
bodyshops(where: {chatterid: {_is_null: false}, _or: {chatterid: {_neq: ""}}}){
|
||||
bodyshops(
|
||||
where: {
|
||||
chatterid: { _is_null: false, _neq: "" }
|
||||
_or: [
|
||||
{ chatter_company_id: { _is_null: true } }
|
||||
{ chatter_company_id: { _eq: "" } }
|
||||
]
|
||||
}
|
||||
) {
|
||||
id
|
||||
shopname
|
||||
chatterid
|
||||
chatter_company_id
|
||||
imexshopid
|
||||
timezone
|
||||
}
|
||||
}`;
|
||||
|
||||
exports.GET_CHATTER_SHOPS_WITH_COMPANY = `query GET_CHATTER_SHOPS_WITH_COMPANY {
|
||||
bodyshops(
|
||||
where: {
|
||||
chatterid: { _is_null: false, _neq: "" }
|
||||
chatter_company_id: { _is_null: false, _neq: "" }
|
||||
}
|
||||
) {
|
||||
id
|
||||
shopname
|
||||
chatterid
|
||||
chatter_company_id
|
||||
imexshopid
|
||||
timezone
|
||||
}
|
||||
@@ -3201,9 +3239,12 @@ exports.UPDATE_USER_FCM_TOKENS_BY_EMAIL = /* GraphQL */ `
|
||||
}
|
||||
`;
|
||||
|
||||
exports.SET_JOB_DMS_ID = `mutation SetJobDmsId($id: uuid!, $dms_id: String!) {
|
||||
update_jobs_by_pk(pk_columns: { id: $id }, _set: { dms_id: $dms_id }) {
|
||||
exports.SET_JOB_DMS_ID = `mutation SetJobDmsId($id: uuid!, $dms_id: String!, $dms_customer_id: String, $dms_advisor_id: String, $kmin: Int) {
|
||||
update_jobs_by_pk(pk_columns: { id: $id }, _set: { dms_id: $dms_id, dms_customer_id: $dms_customer_id, dms_advisor_id: $dms_advisor_id, kmin: $kmin }) {
|
||||
id
|
||||
dms_id
|
||||
dms_customer_id
|
||||
dms_advisor_id
|
||||
kmin
|
||||
}
|
||||
}`;
|
||||
|
||||
@@ -13,6 +13,9 @@ const { DiscountNotAlreadyCounted } = InstanceManager({
|
||||
// Dinero.globalLocale = "en-CA";
|
||||
Dinero.globalRoundingMode = "HALF_EVEN";
|
||||
|
||||
const isImEX = InstanceManager({ imex: true, rome: false });
|
||||
const isRome = InstanceManager({ imex: false, rome: true });
|
||||
|
||||
async function JobCosting(req, res) {
|
||||
const { jobid } = req.body;
|
||||
|
||||
@@ -266,9 +269,7 @@ function GenerateCostingData(job) {
|
||||
);
|
||||
|
||||
const materialsHours = { mapaHrs: 0, mashHrs: 0 };
|
||||
let mashOpCodes = InstanceManager({
|
||||
rome: ParseCalopCode(job.materials["MASH"]?.cal_opcode)
|
||||
});
|
||||
let mashOpCodes = isRome && ParseCalopCode(job.materials["MASH"]?.cal_opcode);
|
||||
let hasMapaLine = false;
|
||||
let hasMashLine = false;
|
||||
|
||||
@@ -343,7 +344,7 @@ function GenerateCostingData(job) {
|
||||
if (!acc.labor[laborProfitCenter]) acc.labor[laborProfitCenter] = Dinero();
|
||||
acc.labor[laborProfitCenter] = acc.labor[laborProfitCenter].add(laborAmount);
|
||||
|
||||
if (val.act_price > 0 && val.lbr_op === "OP14") {
|
||||
if (val.act_price > 0 && val.lbr_op === "OP14" && !val.part_type) {
|
||||
//Scenario where SGI may pay out hours using a part price.
|
||||
acc.labor[laborProfitCenter] = acc.labor[laborProfitCenter].add(
|
||||
Dinero({
|
||||
@@ -355,7 +356,7 @@ function GenerateCostingData(job) {
|
||||
if (val.mod_lbr_ty === "LAR") {
|
||||
materialsHours.mapaHrs += val.mod_lb_hrs || 0;
|
||||
}
|
||||
if (InstanceManager({ imex: true, rome: false })) {
|
||||
if (isImEX) {
|
||||
if (val.mod_lbr_ty !== "LAR") {
|
||||
materialsHours.mashHrs += val.mod_lb_hrs || 0;
|
||||
}
|
||||
@@ -363,6 +364,9 @@ function GenerateCostingData(job) {
|
||||
if (val.mod_lbr_ty !== "LAR" && mashOpCodes.includes(val.lbr_op)) {
|
||||
materialsHours.mashHrs += val.mod_lb_hrs || 0;
|
||||
}
|
||||
if (val.manual_line === true && !mashOpCodes.includes(val.lbr_op) && val.mod_lbr_ty !== "LAR") {
|
||||
materialsHours.mashHrs += val.mod_lb_hrs || 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -499,7 +503,7 @@ function GenerateCostingData(job) {
|
||||
let disc = Dinero(),
|
||||
markup = Dinero();
|
||||
const convertedKey = Object.keys(defaultProfits).find((k) => defaultProfits[k] === key);
|
||||
if (job.parts_tax_rates && job.parts_tax_rates[convertedKey.toUpperCase()]) {
|
||||
if (convertedKey && job.parts_tax_rates && job.parts_tax_rates[convertedKey.toUpperCase()]) {
|
||||
if (
|
||||
job.parts_tax_rates[convertedKey.toUpperCase()].prt_discp !== undefined &&
|
||||
job.parts_tax_rates[convertedKey.toUpperCase()].prt_discp >= 0
|
||||
@@ -522,15 +526,18 @@ function GenerateCostingData(job) {
|
||||
}
|
||||
}
|
||||
|
||||
if (InstanceManager({ rome: true })) {
|
||||
const correspondingCiecaStlTotalLine = job.cieca_stl?.data.find(
|
||||
(c) => c.ttl_typecd === convertedKey.toUpperCase()
|
||||
);
|
||||
if (
|
||||
correspondingCiecaStlTotalLine &&
|
||||
Math.abs(jobLineTotalsByProfitCenter.parts[key].getAmount() - correspondingCiecaStlTotalLine.ttl_amt * 100) > 1
|
||||
) {
|
||||
jobLineTotalsByProfitCenter.parts[key] = jobLineTotalsByProfitCenter.parts[key].add(disc).add(markup);
|
||||
if (isRome) {
|
||||
if (convertedKey) {
|
||||
const correspondingCiecaStlTotalLine = job.cieca_stl?.data.find(
|
||||
(c) => c.ttl_typecd === convertedKey.toUpperCase()
|
||||
);
|
||||
if (
|
||||
correspondingCiecaStlTotalLine &&
|
||||
Math.abs(jobLineTotalsByProfitCenter.parts[key].getAmount() - correspondingCiecaStlTotalLine.ttl_amt * 100) >
|
||||
1
|
||||
) {
|
||||
jobLineTotalsByProfitCenter.parts[key] = jobLineTotalsByProfitCenter.parts[key].add(disc).add(markup);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -540,7 +547,7 @@ function GenerateCostingData(job) {
|
||||
if (
|
||||
job.materials["MAPA"] &&
|
||||
job.materials["MAPA"].cal_maxdlr !== undefined &&
|
||||
job.materials["MAPA"].cal_maxdlr >= 0
|
||||
(isRome ? job.materials["MAPA"].cal_maxdlr >= 0 : job.materials["MAPA"].cal_maxdlr > 0)
|
||||
) {
|
||||
//It has an upper threshhold.
|
||||
threshold = Dinero({
|
||||
@@ -590,7 +597,7 @@ function GenerateCostingData(job) {
|
||||
if (
|
||||
job.materials["MASH"] &&
|
||||
job.materials["MASH"].cal_maxdlr !== undefined &&
|
||||
job.materials["MASH"].cal_maxdlr >= 0
|
||||
(isRome ? job.materials["MASH"].cal_maxdlr >= 0 : job.materials["MASH"].cal_maxdlr > 0)
|
||||
) {
|
||||
//It has an upper threshhold.
|
||||
threshold = Dinero({
|
||||
@@ -636,7 +643,7 @@ function GenerateCostingData(job) {
|
||||
}
|
||||
}
|
||||
|
||||
if (InstanceManager({ imex: false, rome: true })) {
|
||||
if (isRome) {
|
||||
const stlTowing = job.cieca_stl?.data.find((c) => c.ttl_type === "OTTW");
|
||||
const stlStorage = job.cieca_stl?.data.find((c) => c.ttl_type === "OTST");
|
||||
|
||||
|
||||
@@ -47,14 +47,14 @@ exports.totalsSsu = async function (req, res) {
|
||||
throw new Error("Failed to update job totals");
|
||||
}
|
||||
|
||||
res.status(200).send();
|
||||
res.status(200).json({ success: true });
|
||||
} catch (error) {
|
||||
logger.log("job-totals-ssu-USA-error", "error", req?.user?.email, id, {
|
||||
jobid: id,
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
res.status(503).send();
|
||||
res.status(503).json({ error: "Failed to calculate totals" });
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -47,14 +47,14 @@ exports.totalsSsu = async function (req, res) {
|
||||
throw new Error("Failed to update job totals");
|
||||
}
|
||||
|
||||
res.status(200).send();
|
||||
res.status(200).json({ success: true });
|
||||
} catch (error) {
|
||||
logger.log("job-totals-ssu-error", "error", req.user.email, id, {
|
||||
jobid: id,
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
res.status(503).send();
|
||||
res.status(503).json({ error: "Failed to calculate totals" });
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
12
server/routes/chatterRoutes.js
Normal file
12
server/routes/chatterRoutes.js
Normal file
@@ -0,0 +1,12 @@
|
||||
const express = require("express");
|
||||
const createLocation = require("../chatter/createLocation");
|
||||
const router = express.Router();
|
||||
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
|
||||
const validateAdminMiddleware = require("../middleware/validateAdminMiddleware");
|
||||
|
||||
router.use(validateFirebaseIdTokenMiddleware);
|
||||
router.use(validateAdminMiddleware);
|
||||
|
||||
router.post("/create-location", createLocation);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,10 +1,21 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const { autohouse, claimscorp, chatter, kaizen, usageReport, podium, carfax, carfaxRps } = require("../data/data");
|
||||
const {
|
||||
autohouse,
|
||||
claimscorp,
|
||||
chatter,
|
||||
kaizen,
|
||||
usageReport,
|
||||
podium,
|
||||
carfax,
|
||||
carfaxRps,
|
||||
chatterApi
|
||||
} = require("../data/data");
|
||||
|
||||
router.post("/ah", autohouse);
|
||||
router.post("/cc", claimscorp);
|
||||
router.post("/chatter", chatter);
|
||||
router.post("/chatter-api", chatterApi);
|
||||
router.post("/kaizen", kaizen);
|
||||
router.post("/usagereport", usageReport);
|
||||
router.post("/podium", podium);
|
||||
|
||||
@@ -86,8 +86,9 @@ const buildMessageJSONString = ({ error, classification, result, fallback }) =>
|
||||
/**
|
||||
* Success: mark job exported + (optionally) insert a success log.
|
||||
* Uses queries.MARK_JOB_EXPORTED (same shape as Fortellis/PBS).
|
||||
* @param {boolean} isEarlyRo - If true, only logs success but does NOT change job status (for early RO creation)
|
||||
*/
|
||||
const markRRExportSuccess = async ({ socket, jobId, job, bodyshop, result, metaExtra = {} }) => {
|
||||
const markRRExportSuccess = async ({ socket, jobId, job, bodyshop, result, metaExtra = {}, isEarlyRo = false }) => {
|
||||
const endpoint = process.env.GRAPHQL_ENDPOINT;
|
||||
if (!endpoint) throw new Error("GRAPHQL_ENDPOINT not configured");
|
||||
const token = getAuthToken(socket);
|
||||
@@ -96,11 +97,40 @@ const markRRExportSuccess = async ({ socket, jobId, job, bodyshop, result, metaE
|
||||
const client = new GraphQLClient(endpoint, {});
|
||||
client.setHeaders({ Authorization: `Bearer ${token}` });
|
||||
|
||||
const meta = buildRRExportMeta({ result, extra: metaExtra });
|
||||
|
||||
// For early RO, we only insert a log but do NOT change job status or mark as exported
|
||||
if (isEarlyRo) {
|
||||
try {
|
||||
await client.request(queries.INSERT_EXPORT_LOG, {
|
||||
logs: [
|
||||
{
|
||||
bodyshopid: bodyshop?.id || job?.bodyshop?.id,
|
||||
jobid: jobId,
|
||||
successful: true,
|
||||
useremail: socket?.user?.email || null,
|
||||
metadata: meta,
|
||||
message: buildMessageJSONString({ result, fallback: "RR early RO created" })
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR early RO: success log inserted (job status unchanged)", {
|
||||
jobId
|
||||
});
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "ERROR", "RR early RO: failed to insert success log", {
|
||||
jobId,
|
||||
error: e?.message
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Full export: mark job as exported and insert success log
|
||||
const exportedStatus =
|
||||
job?.bodyshop?.md_ro_statuses?.default_exported || bodyshop?.md_ro_statuses?.default_exported || "Exported*";
|
||||
|
||||
const meta = buildRRExportMeta({ result, extra: metaExtra });
|
||||
|
||||
try {
|
||||
await client.request(queries.MARK_JOB_EXPORTED, {
|
||||
jobId,
|
||||
|
||||
@@ -56,7 +56,324 @@ const deriveRRStatus = (rrRes = {}) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Step 1: Export a job to RR as a new Repair Order.
|
||||
* Early RO Creation: Create a minimal RR Repair Order with basic info (customer, advisor, mileage, story).
|
||||
* Used when creating RO from convert button or admin page before full job export.
|
||||
* @param args
|
||||
* @returns {Promise<{success: boolean, data: *, roStatus: {status: *, statusCode: *|undefined, message}, statusBlocks: *|{}, customerNo: string, svId: *, roNo: *, xml: *}>}
|
||||
*/
|
||||
const createMinimalRRRepairOrder = async (args) => {
|
||||
const { bodyshop, job, advisorNo, selectedCustomer, txEnvelope, socket, svId } = args || {};
|
||||
|
||||
if (!bodyshop) throw new Error("createMinimalRRRepairOrder: bodyshop is required");
|
||||
if (!job) throw new Error("createMinimalRRRepairOrder: job is required");
|
||||
if (advisorNo == null || String(advisorNo).trim() === "") {
|
||||
throw new Error("createMinimalRRRepairOrder: advisorNo is required for RR");
|
||||
}
|
||||
|
||||
// Resolve customer number (accept multiple shapes)
|
||||
const selected = selectedCustomer?.customerNo || selectedCustomer?.custNo;
|
||||
if (!selected) throw new Error("createMinimalRRRepairOrder: selectedCustomer.custNo/customerNo is required");
|
||||
|
||||
const { client, opts } = buildClientAndOpts(bodyshop);
|
||||
|
||||
// For early RO creation we always "Insert" (create minimal RO)
|
||||
const finalOpts = {
|
||||
...opts,
|
||||
envelope: {
|
||||
...(opts?.envelope || {}),
|
||||
sender: {
|
||||
...(opts?.envelope?.sender || {}),
|
||||
task: "BSMRO",
|
||||
referenceId: "Insert"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const story = txEnvelope?.story ? String(txEnvelope.story).trim() : null;
|
||||
const makeOverride = txEnvelope?.makeOverride ? String(txEnvelope.makeOverride).trim() : null;
|
||||
|
||||
// Build minimal RO payload - just header, no allocations/parts/labor
|
||||
const cleanVin =
|
||||
(job?.v_vin || "")
|
||||
.toString()
|
||||
.replace(/[^A-Za-z0-9]/g, "")
|
||||
.toUpperCase()
|
||||
.slice(0, 17) || undefined;
|
||||
|
||||
// Resolve mileage - must be a positive number
|
||||
let mileageIn = txEnvelope?.kmin ?? job?.kmin ?? null;
|
||||
if (mileageIn != null) {
|
||||
mileageIn = parseInt(mileageIn, 10);
|
||||
if (isNaN(mileageIn) || mileageIn < 0) {
|
||||
mileageIn = null;
|
||||
}
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", "Resolved mileage for early RO", {
|
||||
txEnvelopeKmin: txEnvelope?.kmin,
|
||||
jobKmin: job?.kmin,
|
||||
resolvedMileageIn: mileageIn
|
||||
});
|
||||
|
||||
const payload = {
|
||||
customerNo: String(selected),
|
||||
advisorNo: String(advisorNo),
|
||||
vin: cleanVin,
|
||||
departmentType: "B",
|
||||
outsdRoNo: job?.ro_number || job?.id || undefined,
|
||||
estimate: {
|
||||
parts: "0",
|
||||
labor: "0",
|
||||
total: "0.00"
|
||||
}
|
||||
};
|
||||
|
||||
// Only add mileageIn if we have a valid value
|
||||
if (mileageIn != null && mileageIn >= 0) {
|
||||
payload.mileageIn = mileageIn;
|
||||
}
|
||||
|
||||
// Add optional fields if present
|
||||
if (story) {
|
||||
payload.roComment = story;
|
||||
}
|
||||
if (makeOverride) {
|
||||
payload.makeOverride = makeOverride;
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "Creating minimal RR Repair Order (early creation)", {
|
||||
payload
|
||||
});
|
||||
|
||||
const response = await client.createRepairOrder(payload, finalOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR minimal Repair Order created", {
|
||||
payload,
|
||||
response
|
||||
});
|
||||
|
||||
const data = response?.data || null;
|
||||
const statusBlocks = response?.statusBlocks || {};
|
||||
const roStatus = deriveRRStatus(response);
|
||||
|
||||
const statusUpper = roStatus?.status ? String(roStatus.status).toUpperCase() : null;
|
||||
|
||||
let success = false;
|
||||
|
||||
if (statusUpper) {
|
||||
// Treat explicit FAILURE / ERROR as hard failures
|
||||
success = !["FAILURE", "ERROR"].includes(statusUpper);
|
||||
} else if (typeof response?.success === "boolean") {
|
||||
// Fallback to library boolean if no explicit status
|
||||
success = response.success;
|
||||
} else if (roStatus?.status) {
|
||||
success = String(roStatus.status).toUpperCase() === "SUCCESS";
|
||||
}
|
||||
|
||||
// Extract canonical roNo for later updates
|
||||
const roNo = data?.dmsRoNo ?? data?.outsdRoNo ?? roStatus?.dmsRoNo ?? null;
|
||||
|
||||
return {
|
||||
success,
|
||||
data,
|
||||
roStatus,
|
||||
statusBlocks,
|
||||
customerNo: String(selected),
|
||||
svId,
|
||||
roNo,
|
||||
xml: response?.xml // expose XML for logging/diagnostics
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Full Data Update: Update an existing RR Repair Order with complete job data (allocations, parts, labor).
|
||||
* Used during DMS post form when an early RO was already created.
|
||||
* @param args
|
||||
* @returns {Promise<{success: boolean, data: *, roStatus: {status: *, statusCode: *|undefined, message}, statusBlocks: *|{}, customerNo: string, svId: *, roNo: *, xml: *}>}
|
||||
*/
|
||||
const updateRRRepairOrderWithFullData = async (args) => {
|
||||
const { bodyshop, job, advisorNo, selectedCustomer, txEnvelope, socket, svId, roNo } = args || {};
|
||||
|
||||
if (!bodyshop) throw new Error("updateRRRepairOrderWithFullData: bodyshop is required");
|
||||
if (!job) throw new Error("updateRRRepairOrderWithFullData: job is required");
|
||||
if (advisorNo == null || String(advisorNo).trim() === "") {
|
||||
throw new Error("updateRRRepairOrderWithFullData: advisorNo is required for RR");
|
||||
}
|
||||
if (!roNo) throw new Error("updateRRRepairOrderWithFullData: roNo is required for update");
|
||||
|
||||
// Resolve customer number (accept multiple shapes)
|
||||
const selected = selectedCustomer?.customerNo || selectedCustomer?.custNo;
|
||||
if (!selected) throw new Error("updateRRRepairOrderWithFullData: selectedCustomer.custNo/customerNo is required");
|
||||
|
||||
const { client, opts } = buildClientAndOpts(bodyshop);
|
||||
|
||||
// For full data update after early RO, we still use "Insert" referenceId
|
||||
// because we're inserting the job operations for the first time
|
||||
const finalOpts = {
|
||||
...opts,
|
||||
envelope: {
|
||||
...(opts?.envelope || {}),
|
||||
sender: {
|
||||
...(opts?.envelope?.sender || {}),
|
||||
task: "BSMRO",
|
||||
referenceId: "Insert"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const story = txEnvelope?.story ? String(txEnvelope.story).trim() : null;
|
||||
const makeOverride = txEnvelope?.makeOverride ? String(txEnvelope.makeOverride).trim() : null;
|
||||
|
||||
// Optional RR OpCode segments coming from the FE (RRPostForm)
|
||||
const opPrefix = txEnvelope?.opPrefix ?? txEnvelope?.op_prefix ?? null;
|
||||
const opBase = txEnvelope?.opBase ?? txEnvelope?.op_base ?? null;
|
||||
const opSuffix = txEnvelope?.opSuffix ?? txEnvelope?.op_suffix ?? null;
|
||||
|
||||
// RR-only extras
|
||||
let rrCentersConfig = null;
|
||||
let allocations = null;
|
||||
let opCode = null;
|
||||
|
||||
// 1) Responsibility center config (for visibility / debugging)
|
||||
try {
|
||||
rrCentersConfig = extractRrResponsibilityCenters(bodyshop);
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", "RR responsibility centers resolved", {
|
||||
hasCenters: !!bodyshop.md_responsibility_centers,
|
||||
profitCenters: Object.keys(rrCentersConfig?.profitsByName || {}),
|
||||
costCenters: Object.keys(rrCentersConfig?.costsByName || {}),
|
||||
dmsCostDefaults: rrCentersConfig?.dmsCostDefaults || {},
|
||||
dmsProfitDefaults: rrCentersConfig?.dmsProfitDefaults || {}
|
||||
});
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "ERROR", "Failed to resolve RR responsibility centers", {
|
||||
message: e?.message,
|
||||
stack: e?.stack
|
||||
});
|
||||
}
|
||||
|
||||
// 2) Allocations (sales + cost by center, with rr_* metadata already attached)
|
||||
try {
|
||||
const allocResult = await CdkCalculateAllocations(socket, job.id);
|
||||
|
||||
// We only need the per-center job allocations for RO.GOG / ROLABOR.
|
||||
allocations = Array.isArray(allocResult?.jobAllocations) ? allocResult.jobAllocations : [];
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR allocations resolved for update", {
|
||||
hasAllocations: allocations.length > 0,
|
||||
count: allocations.length,
|
||||
allocationsPreview: allocations.slice(0, 2).map(a => ({
|
||||
type: a?.type,
|
||||
code: a?.code,
|
||||
laborSale: a?.laborSale,
|
||||
laborCost: a?.laborCost,
|
||||
partsSale: a?.partsSale,
|
||||
partsCost: a?.partsCost
|
||||
})),
|
||||
taxAllocCount: Array.isArray(allocResult?.taxAllocArray) ? allocResult.taxAllocArray.length : 0,
|
||||
ttlAdjCount: Array.isArray(allocResult?.ttlAdjArray) ? allocResult.ttlAdjArray.length : 0,
|
||||
ttlTaxAdjCount: Array.isArray(allocResult?.ttlTaxAdjArray) ? allocResult.ttlTaxAdjArray.length : 0
|
||||
});
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "ERROR", "Failed to calculate RR allocations", {
|
||||
message: e?.message,
|
||||
stack: e?.stack
|
||||
});
|
||||
// Proceed with a header-only update if allocations fail.
|
||||
allocations = [];
|
||||
}
|
||||
|
||||
const resolvedBaseOpCode = resolveRROpCodeFromBodyshop(bodyshop);
|
||||
|
||||
let opCodeOverride = txEnvelope?.opCode || txEnvelope?.opcode || txEnvelope?.op_code || null;
|
||||
|
||||
// If the FE only sends segments, combine them here.
|
||||
if (!opCodeOverride && (opPrefix || opBase || opSuffix)) {
|
||||
const combined = `${opPrefix || ""}${opBase || ""}${opSuffix || ""}`.trim();
|
||||
if (combined) {
|
||||
opCodeOverride = combined;
|
||||
}
|
||||
}
|
||||
|
||||
if (opCodeOverride || resolvedBaseOpCode) {
|
||||
opCode = String(opCodeOverride || resolvedBaseOpCode).trim() || null;
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", "RR OP config resolved", {
|
||||
opCode,
|
||||
baseFromConfig: resolvedBaseOpCode,
|
||||
opPrefix,
|
||||
opBase,
|
||||
opSuffix
|
||||
});
|
||||
|
||||
// Build full RO payload for update with allocations
|
||||
const payload = buildRRRepairOrderPayload({
|
||||
bodyshop,
|
||||
job,
|
||||
selectedCustomer: { customerNo: String(selected), custNo: String(selected) },
|
||||
advisorNo: String(advisorNo),
|
||||
story,
|
||||
makeOverride,
|
||||
allocations,
|
||||
opCode
|
||||
});
|
||||
|
||||
// Add roNo for linking to existing RO
|
||||
payload.roNo = String(roNo);
|
||||
payload.outsdRoNo = job?.ro_number || job?.id || undefined;
|
||||
|
||||
// Keep rolabor - it's needed to register the job/OpCode accounts in Reynolds
|
||||
// Without this, Reynolds won't recognize the OpCode when we send rogg operations
|
||||
// The rolabor section tells Reynolds "these jobs exist" even with minimal data
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "Sending full data for early RO (using create with roNo)", {
|
||||
roNo: String(roNo),
|
||||
hasRolabor: !!payload.rolabor,
|
||||
hasRogg: !!payload.rogg,
|
||||
payload
|
||||
});
|
||||
|
||||
// Use createRepairOrder (not update) with the roNo to link to the existing early RO
|
||||
// Reynolds will merge this with the existing RO header
|
||||
const response = await client.createRepairOrder(payload, finalOpts);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "RR Repair Order full data sent", {
|
||||
payload,
|
||||
response
|
||||
});
|
||||
|
||||
const data = response?.data || null;
|
||||
const statusBlocks = response?.statusBlocks || {};
|
||||
const roStatus = deriveRRStatus(response);
|
||||
|
||||
const statusUpper = roStatus?.status ? String(roStatus.status).toUpperCase() : null;
|
||||
|
||||
let success = false;
|
||||
|
||||
if (statusUpper) {
|
||||
success = !["FAILURE", "ERROR"].includes(statusUpper);
|
||||
} else if (typeof response?.success === "boolean") {
|
||||
success = response.success;
|
||||
} else if (roStatus?.status) {
|
||||
success = String(roStatus.status).toUpperCase() === "SUCCESS";
|
||||
}
|
||||
|
||||
return {
|
||||
success,
|
||||
data,
|
||||
roStatus,
|
||||
statusBlocks,
|
||||
customerNo: String(selected),
|
||||
svId,
|
||||
roNo: String(roNo),
|
||||
xml: response?.xml
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* LEGACY: Step 1: Export a job to RR as a new Repair Order with full data.
|
||||
* This is the original function - kept for backward compatibility if shops don't use early RO creation.
|
||||
* @param args
|
||||
* @returns {Promise<{success: boolean, data: *, roStatus: {status: *, statusCode: *|undefined, message}, statusBlocks: *|{}, customerNo: string, svId: *, roNo: *, xml: *}>}
|
||||
*/
|
||||
@@ -315,4 +632,10 @@ const finalizeRRRepairOrder = async (args) => {
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = { exportJobToRR, finalizeRRRepairOrder, deriveRRStatus };
|
||||
module.exports = {
|
||||
exportJobToRR,
|
||||
createMinimalRRRepairOrder,
|
||||
updateRRRepairOrderWithFullData,
|
||||
finalizeRRRepairOrder,
|
||||
deriveRRStatus
|
||||
};
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
const CreateRRLogEvent = require("./rr-logger-event");
|
||||
const { rrCombinedSearch, rrGetAdvisors, buildClientAndOpts } = require("./rr-lookup");
|
||||
const { QueryJobData, buildRogogFromAllocations, buildRolaborFromRogog } = require("./rr-job-helpers");
|
||||
const { exportJobToRR, finalizeRRRepairOrder } = require("./rr-job-export");
|
||||
const {
|
||||
exportJobToRR,
|
||||
createMinimalRRRepairOrder,
|
||||
updateRRRepairOrderWithFullData,
|
||||
finalizeRRRepairOrder
|
||||
} = require("./rr-job-export");
|
||||
const RRCalculateAllocations = require("./rr-calculate-allocations").default;
|
||||
const { createRRCustomer } = require("./rr-customers");
|
||||
const { ensureRRServiceVehicle } = require("./rr-service-vehicles");
|
||||
@@ -124,13 +129,15 @@ const getBodyshopForSocket = async ({ bodyshopId, socket }) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* GraphQL mutation to set job.dms_id
|
||||
* GraphQL mutation to set job.dms_id, dms_customer_id, and dms_advisor_id
|
||||
* @param socket
|
||||
* @param jobId
|
||||
* @param dmsId
|
||||
* @param dmsCustomerId
|
||||
* @param dmsAdvisorId
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const setJobDmsIdForSocket = async ({ socket, jobId, dmsId }) => {
|
||||
const setJobDmsIdForSocket = async ({ socket, jobId, dmsId, dmsCustomerId, dmsAdvisorId, mileageIn }) => {
|
||||
if (!jobId || !dmsId) {
|
||||
CreateRRLogEvent(socket, "WARN", "setJobDmsIdForSocket called without jobId or dmsId", {
|
||||
jobId,
|
||||
@@ -149,16 +156,28 @@ const setJobDmsIdForSocket = async ({ socket, jobId, dmsId }) => {
|
||||
const client = new GraphQLClient(endpoint, {});
|
||||
await client
|
||||
.setHeaders({ Authorization: `Bearer ${token}` })
|
||||
.request(queries.SET_JOB_DMS_ID, { id: jobId, dms_id: String(dmsId) });
|
||||
.request(queries.SET_JOB_DMS_ID, {
|
||||
id: jobId,
|
||||
dms_id: String(dmsId),
|
||||
dms_customer_id: dmsCustomerId ? String(dmsCustomerId) : null,
|
||||
dms_advisor_id: dmsAdvisorId ? String(dmsAdvisorId) : null,
|
||||
kmin: mileageIn != null && mileageIn > 0 ? parseInt(mileageIn, 10) : null
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", "Linked job.dms_id to RR RO", {
|
||||
jobId,
|
||||
dmsId: String(dmsId)
|
||||
dmsId: String(dmsId),
|
||||
dmsCustomerId,
|
||||
dmsAdvisorId,
|
||||
mileageIn
|
||||
});
|
||||
} catch (err) {
|
||||
CreateRRLogEvent(socket, "ERROR", "Failed to set job.dms_id after RR create/update", {
|
||||
jobId,
|
||||
dmsId,
|
||||
dmsCustomerId,
|
||||
dmsAdvisorId,
|
||||
mileageIn,
|
||||
message: err?.message || String(err),
|
||||
stack: err?.stack
|
||||
});
|
||||
@@ -373,7 +392,504 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("rr-export-job", async ({ jobid, jobId, txEnvelope } = {}) => {
|
||||
/**
|
||||
* NEW: Early RO Creation Event
|
||||
* Creates a minimal RO from convert button or admin page with customer selection,
|
||||
* advisor, mileage, and optional story/overrides.
|
||||
*/
|
||||
socket.on("rr-create-early-ro", async ({ jobid, jobId, txEnvelope } = {}) => {
|
||||
const rid = resolveJobId(jobid || jobId, { jobId, jobid }, null);
|
||||
|
||||
try {
|
||||
if (!rid) throw new Error("RR early create: jobid required");
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-1} Received RR early RO creation request`, { jobid: rid });
|
||||
|
||||
// Cache txEnvelope (contains advisor, mileage, story, overrides)
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.txEnvelope,
|
||||
txEnvelope || {},
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-1.1} Cached txEnvelope`, { hasTxEnvelope: !!txEnvelope });
|
||||
|
||||
const job = await QueryJobData({ redisHelpers }, rid);
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.JobData,
|
||||
job,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-1.2} Cached JobData`, { vin: job?.v_vin, ro: job?.ro_number });
|
||||
|
||||
const adv = readAdvisorNo(
|
||||
{ txEnvelope },
|
||||
await redisHelpers.getSessionTransactionData(socket.id, getTransactionType(rid), RRCacheEnums.AdvisorNo)
|
||||
);
|
||||
|
||||
if (adv) {
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.AdvisorNo,
|
||||
String(adv),
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-1.3} Cached advisorNo`, { advisorNo: String(adv) });
|
||||
}
|
||||
|
||||
const { bodyshopId } = await getSessionOrSocket(redisHelpers, socket);
|
||||
const bodyshop = await getBodyshopForSocket({ bodyshopId, socket });
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-2} Running multi-search (Full Name + VIN)`);
|
||||
|
||||
const candidates = await rrMultiCustomerSearch({ bodyshop, job, socket, redisHelpers });
|
||||
const decorated = candidates.map((c) => (c.vinOwner != null ? c : { ...c, vinOwner: !!c.isVehicleOwner }));
|
||||
|
||||
socket.emit("rr-select-customer", decorated);
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-2.1} Emitted rr-select-customer for early RO`, {
|
||||
count: decorated.length,
|
||||
anyOwner: decorated.some((c) => c.vinOwner || c.isVehicleOwner)
|
||||
});
|
||||
} catch (error) {
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR early RO creation (prepare)`, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", { vendor: "rr", jobId: rid, error: error.message });
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* NEW: Early RO Customer Selected Event
|
||||
* Handles customer selection for early RO creation and creates minimal RO.
|
||||
*/
|
||||
socket.on("rr-early-customer-selected", async ({ jobid, jobId, selectedCustomerId, custNo, create } = {}, ack) => {
|
||||
const rid = resolveJobId(jobid || jobId, { jobid, jobId }, null);
|
||||
let bodyshop = null;
|
||||
let job = null;
|
||||
let createdCustomer = false;
|
||||
|
||||
try {
|
||||
if (!rid) throw new Error("jobid required");
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3} rr-early-customer-selected`, {
|
||||
jobid: rid,
|
||||
custNo,
|
||||
selectedCustomerId,
|
||||
create: !!create
|
||||
});
|
||||
|
||||
const ns = getTransactionType(rid);
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.0a} Raw parameters received`, {
|
||||
custNo: custNo,
|
||||
custNoType: typeof custNo,
|
||||
selectedCustomerId: selectedCustomerId,
|
||||
create: create
|
||||
});
|
||||
|
||||
let selectedCustNo =
|
||||
(custNo && String(custNo)) ||
|
||||
(selectedCustomerId && String(selectedCustomerId)) ||
|
||||
(await redisHelpers.getSessionTransactionData(socket.id, ns, RRCacheEnums.SelectedCustomer));
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.0b} After initial resolution`, {
|
||||
selectedCustNo,
|
||||
selectedCustNoType: typeof selectedCustNo
|
||||
});
|
||||
|
||||
// Filter out invalid values
|
||||
if (selectedCustNo === "undefined" || selectedCustNo === "null" || (selectedCustNo && selectedCustNo.trim() === "")) {
|
||||
selectedCustNo = null;
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.0} Resolved customer selection`, {
|
||||
selectedCustNo,
|
||||
willCreateNew: create === true || !selectedCustNo
|
||||
});
|
||||
|
||||
job = await redisHelpers.getSessionTransactionData(socket.id, ns, RRCacheEnums.JobData);
|
||||
|
||||
const txEnvelope = (await redisHelpers.getSessionTransactionData(socket.id, ns, RRCacheEnums.txEnvelope)) || {};
|
||||
|
||||
if (!job) throw new Error("Staged JobData not found (run rr-create-early-ro first).");
|
||||
|
||||
const { bodyshopId } = await getSessionOrSocket(redisHelpers, socket);
|
||||
|
||||
bodyshop = await getBodyshopForSocket({ bodyshopId, socket });
|
||||
|
||||
// Create customer (if requested or none chosen)
|
||||
if (create === true || !selectedCustNo) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.1} Creating RR customer`);
|
||||
|
||||
const created = await createRRCustomer({ bodyshop, job, socket });
|
||||
selectedCustNo = String(created?.customerNo || "");
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.2} Created customer`, {
|
||||
custNo: selectedCustNo,
|
||||
createdCustomerNo: created?.customerNo
|
||||
});
|
||||
|
||||
if (!selectedCustNo || selectedCustNo === "undefined" || selectedCustNo.trim() === "") {
|
||||
throw new Error("RR create customer returned no valid custNo");
|
||||
}
|
||||
|
||||
createdCustomer = true;
|
||||
}
|
||||
|
||||
// VIN owner pre-check
|
||||
try {
|
||||
const vehQ = makeVehicleSearchPayloadFromJob(job);
|
||||
if (vehQ && vehQ.kind === "vin" && job?.v_vin) {
|
||||
const vinResponse = await rrCombinedSearch(bodyshop, vehQ);
|
||||
|
||||
CreateRRLogEvent(socket, "SILLY", `VIN owner pre-check response (early RO)`, { response: vinResponse });
|
||||
|
||||
const vinBlocks = Array.isArray(vinResponse?.data) ? vinResponse.data : [];
|
||||
|
||||
try {
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
ns,
|
||||
RRCacheEnums.VINCandidates,
|
||||
vinBlocks,
|
||||
defaultRRTTL
|
||||
);
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
|
||||
const ownersSet = ownersFromVinBlocks(vinBlocks, job.v_vin);
|
||||
|
||||
if (ownersSet?.size) {
|
||||
const sel = String(selectedCustNo);
|
||||
|
||||
if (!ownersSet.has(sel)) {
|
||||
const [existingOwner] = Array.from(ownersSet).map(String);
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.2a} VIN exists; switching to VIN owner`, {
|
||||
vin: job.v_vin,
|
||||
selected: sel,
|
||||
existingOwner
|
||||
});
|
||||
selectedCustNo = existingOwner;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
CreateRRLogEvent(socket, "WARN", `VIN owner pre-check failed; continuing with selected customer (early RO)`, {
|
||||
error: e?.message
|
||||
});
|
||||
}
|
||||
|
||||
// Cache final/effective customer selection
|
||||
const effectiveCustNo = String(selectedCustNo);
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
ns,
|
||||
RRCacheEnums.SelectedCustomer,
|
||||
effectiveCustNo,
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-3.3} Cached selected customer`, { custNo: effectiveCustNo });
|
||||
|
||||
// Build client & routing
|
||||
const { client, opts } = await buildClientAndOpts(bodyshop);
|
||||
const routing = opts?.routing || client?.opts?.routing || null;
|
||||
if (!routing?.dealerNumber) throw new Error("ensureRRServiceVehicle: routing.dealerNumber required");
|
||||
|
||||
// Reconstruct a lightweight tx object
|
||||
const tx = {
|
||||
jobData: {
|
||||
...job,
|
||||
vin: job?.v_vin
|
||||
},
|
||||
txEnvelope
|
||||
};
|
||||
|
||||
const vin = resolveVin({ tx, job });
|
||||
|
||||
if (!vin) {
|
||||
CreateRRLogEvent(socket, "ERROR", "{EARLY-3.x} No VIN found for ensureRRServiceVehicle", { jobid: rid });
|
||||
throw new Error("ensureRRServiceVehicle: vin required");
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", "{EARLY-3.4} ensureRRServiceVehicle: starting", {
|
||||
jobid: rid,
|
||||
selectedCustomerNo: effectiveCustNo,
|
||||
vin,
|
||||
dealerNumber: routing.dealerNumber,
|
||||
storeNumber: routing.storeNumber,
|
||||
areaNumber: routing.areaNumber
|
||||
});
|
||||
|
||||
const ensured = await ensureRRServiceVehicle({
|
||||
client,
|
||||
routing,
|
||||
bodyshop,
|
||||
selectedCustomerNo: effectiveCustNo,
|
||||
custNo: effectiveCustNo,
|
||||
customerNo: effectiveCustNo,
|
||||
vin,
|
||||
job,
|
||||
socket,
|
||||
redisHelpers
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", "{EARLY-3.5} ensureRRServiceVehicle: done", ensured);
|
||||
|
||||
const cachedAdvisor = await redisHelpers.getSessionTransactionData(socket.id, ns, RRCacheEnums.AdvisorNo);
|
||||
const advisorNo = readAdvisorNo({ txEnvelope }, cachedAdvisor);
|
||||
|
||||
if (!advisorNo) {
|
||||
CreateRRLogEvent(socket, "ERROR", `Advisor is required (advisorNo) for early RO`);
|
||||
await insertRRFailedExportLog({
|
||||
socket,
|
||||
jobId: rid,
|
||||
job,
|
||||
bodyshop,
|
||||
error: new Error("Advisor is required (advisorNo)."),
|
||||
classification: { errorCode: "RR_MISSING_ADVISOR", friendlyMessage: "Advisor is required." }
|
||||
});
|
||||
socket.emit("export-failed", { vendor: "rr", jobId: rid, error: "Advisor is required (advisorNo)." });
|
||||
return ack?.({ ok: false, error: "Advisor is required (advisorNo)." });
|
||||
}
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
ns,
|
||||
RRCacheEnums.AdvisorNo,
|
||||
String(advisorNo),
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
// CREATE MINIMAL RO (early creation)
|
||||
CreateRRLogEvent(socket, "DEBUG", `{EARLY-4} Creating minimal RR RO`);
|
||||
const result = await createMinimalRRRepairOrder({
|
||||
bodyshop,
|
||||
job,
|
||||
selectedCustomer: { customerNo: effectiveCustNo, custNo: effectiveCustNo },
|
||||
advisorNo: String(advisorNo),
|
||||
txEnvelope,
|
||||
socket,
|
||||
svId: ensured?.svId || null
|
||||
});
|
||||
|
||||
// Cache raw export result + pending RO number
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
ns,
|
||||
RRCacheEnums.ExportResult,
|
||||
result || {},
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
if (result?.success) {
|
||||
const data = result?.data || {};
|
||||
|
||||
// Prefer explicit return from export function; then fall back to fields
|
||||
const dmsRoNo = result?.roNo ?? data?.dmsRoNo ?? null;
|
||||
|
||||
const outsdRoNo = data?.outsdRoNo ?? job?.ro_number ?? job?.id ?? null;
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", "Early RO created - checking dmsRoNo", {
|
||||
dmsRoNo,
|
||||
resultRoNo: result?.roNo,
|
||||
dataRoNo: data?.dmsRoNo,
|
||||
jobId: rid
|
||||
});
|
||||
|
||||
// ✅ Persist DMS RO number, customer ID, advisor ID, and mileage on the job
|
||||
if (dmsRoNo) {
|
||||
const mileageIn = txEnvelope?.kmin ?? null;
|
||||
CreateRRLogEvent(socket, "DEBUG", "Calling setJobDmsIdForSocket", {
|
||||
jobId: rid,
|
||||
dmsId: dmsRoNo,
|
||||
customerId: effectiveCustNo,
|
||||
advisorId: String(advisorNo),
|
||||
mileageIn
|
||||
});
|
||||
await setJobDmsIdForSocket({
|
||||
socket,
|
||||
jobId: rid,
|
||||
dmsId: dmsRoNo,
|
||||
dmsCustomerId: effectiveCustNo,
|
||||
dmsAdvisorId: String(advisorNo),
|
||||
mileageIn
|
||||
});
|
||||
} else {
|
||||
CreateRRLogEvent(socket, "WARN", "RR early RO creation succeeded but no DMS RO number was returned", {
|
||||
jobId: rid,
|
||||
resultPreview: {
|
||||
roNo: result?.roNo,
|
||||
data: {
|
||||
dmsRoNo: data?.dmsRoNo,
|
||||
outsdRoNo: data?.outsdRoNo
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
ns,
|
||||
RRCacheEnums.PendingRO,
|
||||
{
|
||||
outsdRoNo,
|
||||
dmsRoNo,
|
||||
customerNo: String(effectiveCustNo),
|
||||
advisorNo: String(advisorNo),
|
||||
vin: job?.v_vin || null,
|
||||
earlyRoCreated: true // Flag to indicate this was an early RO
|
||||
},
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", `{EARLY-5} Minimal RO created successfully`, {
|
||||
dmsRoNo: dmsRoNo || null,
|
||||
outsdRoNo: outsdRoNo || null
|
||||
});
|
||||
|
||||
// Mark success in export logs
|
||||
await markRRExportSuccess({
|
||||
socket,
|
||||
jobId: rid,
|
||||
job,
|
||||
bodyshop,
|
||||
result,
|
||||
isEarlyRo: true
|
||||
});
|
||||
|
||||
// Tell FE that early RO was created
|
||||
socket.emit("rr-early-ro-created", { jobId: rid, dmsRoNo, outsdRoNo });
|
||||
|
||||
// Emit result
|
||||
socket.emit("rr-create-early-ro:result", { jobId: rid, bodyshopId: bodyshop?.id, result });
|
||||
|
||||
// ACK with RO details
|
||||
ack?.({
|
||||
ok: true,
|
||||
dmsRoNo,
|
||||
outsdRoNo,
|
||||
result,
|
||||
custNo: String(effectiveCustNo),
|
||||
createdCustomer,
|
||||
earlyRoCreated: true
|
||||
});
|
||||
} else {
|
||||
// classify & fail
|
||||
const tx = result?.statusBlocks?.transaction;
|
||||
|
||||
const vendorStatusCode = Number(
|
||||
result?.roStatus?.statusCode ?? result?.roStatus?.StatusCode ?? tx?.statusCode ?? tx?.StatusCode
|
||||
);
|
||||
|
||||
const vendorMessage =
|
||||
result?.roStatus?.message ??
|
||||
result?.roStatus?.Message ??
|
||||
tx?.message ??
|
||||
tx?.Message ??
|
||||
result?.error ??
|
||||
"RR early RO creation failed";
|
||||
|
||||
const cls = classifyRRVendorError({
|
||||
code: vendorStatusCode,
|
||||
message: vendorMessage
|
||||
});
|
||||
|
||||
CreateRRLogEvent(socket, "ERROR", `Early RO creation failed`, {
|
||||
roStatus: result?.roStatus,
|
||||
statusBlocks: result?.statusBlocks,
|
||||
classification: cls
|
||||
});
|
||||
|
||||
await insertRRFailedExportLog({
|
||||
socket,
|
||||
jobId: rid,
|
||||
job,
|
||||
bodyshop,
|
||||
error: new Error(cls.friendlyMessage || result?.error || "RR early RO creation failed"),
|
||||
classification: cls,
|
||||
result
|
||||
});
|
||||
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: cls?.friendlyMessage || result?.error || "RR early RO creation failed",
|
||||
...cls
|
||||
});
|
||||
|
||||
ack?.({
|
||||
ok: false,
|
||||
error: cls.friendlyMessage || result?.error || "RR early RO creation failed",
|
||||
result,
|
||||
classification: cls
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const cls = classifyRRVendorError(error);
|
||||
|
||||
CreateRRLogEvent(socket, "ERROR", `Error during RR early RO creation (customer-selected)`, {
|
||||
error: error.message,
|
||||
vendorStatusCode: cls.vendorStatusCode,
|
||||
code: cls.errorCode,
|
||||
friendly: cls.friendlyMessage,
|
||||
stack: error.stack,
|
||||
jobid: rid
|
||||
});
|
||||
|
||||
try {
|
||||
if (!bodyshop || !job) {
|
||||
const { bodyshopId } = await getSessionOrSocket(redisHelpers, socket);
|
||||
bodyshop = bodyshop || (await getBodyshopForSocket({ bodyshopId, socket }));
|
||||
job =
|
||||
job ||
|
||||
(await redisHelpers.getSessionTransactionData(socket.id, getTransactionType(rid), RRCacheEnums.JobData));
|
||||
}
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
|
||||
await insertRRFailedExportLog({
|
||||
socket,
|
||||
jobId: rid,
|
||||
job,
|
||||
bodyshop,
|
||||
error,
|
||||
classification: cls
|
||||
});
|
||||
|
||||
try {
|
||||
socket.emit("export-failed", {
|
||||
vendor: "rr",
|
||||
jobId: rid,
|
||||
error: error.message,
|
||||
...cls
|
||||
});
|
||||
socket.emit("rr-user-notice", { jobId: rid, ...cls });
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
|
||||
ack?.({ ok: false, error: cls.friendlyMessage || error.message, classification: cls });
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("rr-export-job", async ({ jobid, jobId, txEnvelope } = {}, ack) => {
|
||||
const rid = resolveJobId(jobid || jobId, { jobId, jobid }, null);
|
||||
|
||||
try {
|
||||
@@ -422,6 +938,139 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
const { bodyshopId } = await getSessionOrSocket(redisHelpers, socket);
|
||||
const bodyshop = await getBodyshopForSocket({ bodyshopId, socket });
|
||||
|
||||
// Check if this job already has an early RO - if so, use stored IDs and skip customer search
|
||||
const hasEarlyRO = !!job?.dms_id;
|
||||
|
||||
if (hasEarlyRO) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `{2} Early RO exists - using stored customer/advisor`, {
|
||||
dms_id: job.dms_id,
|
||||
dms_customer_id: job.dms_customer_id,
|
||||
dms_advisor_id: job.dms_advisor_id
|
||||
});
|
||||
|
||||
// Cache the stored customer/advisor IDs for the next step
|
||||
if (job.dms_customer_id) {
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.SelectedCustomer,
|
||||
String(job.dms_customer_id),
|
||||
defaultRRTTL
|
||||
);
|
||||
}
|
||||
if (job.dms_advisor_id) {
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.AdvisorNo,
|
||||
String(job.dms_advisor_id),
|
||||
defaultRRTTL
|
||||
);
|
||||
}
|
||||
|
||||
// Emit empty customer list to frontend (won't show modal)
|
||||
socket.emit("rr-select-customer", []);
|
||||
|
||||
// Continue directly with the export by calling the selected customer handler logic inline
|
||||
// This is essentially the same as if user selected the stored customer
|
||||
const selectedCustNo = job.dms_customer_id;
|
||||
|
||||
if (!selectedCustNo) {
|
||||
throw new Error("Early RO exists but no customer ID stored");
|
||||
}
|
||||
|
||||
// Continue with ensureRRServiceVehicle and export (same as rr-selected-customer handler)
|
||||
const { client, opts } = await buildClientAndOpts(bodyshop);
|
||||
const routing = opts?.routing || client?.opts?.routing || null;
|
||||
if (!routing?.dealerNumber) throw new Error("ensureRRServiceVehicle: routing.dealerNumber required");
|
||||
|
||||
const tx = {
|
||||
jobData: {
|
||||
...job,
|
||||
vin: job?.v_vin
|
||||
},
|
||||
txEnvelope
|
||||
};
|
||||
|
||||
const vin = resolveVin({ tx, job });
|
||||
if (!vin) {
|
||||
CreateRRLogEvent(socket, "ERROR", "{3.x} No VIN found for ensureRRServiceVehicle", { jobid: rid });
|
||||
throw new Error("ensureRRServiceVehicle: vin required");
|
||||
}
|
||||
|
||||
const ensured = await ensureRRServiceVehicle({
|
||||
client,
|
||||
routing,
|
||||
bodyshop,
|
||||
selectedCustomerNo: String(selectedCustNo),
|
||||
custNo: String(selectedCustNo),
|
||||
customerNo: String(selectedCustNo),
|
||||
vin,
|
||||
job,
|
||||
socket,
|
||||
redisHelpers
|
||||
});
|
||||
|
||||
const advisorNo = job.dms_advisor_id || readAdvisorNo({ txEnvelope }, await redisHelpers.getSessionTransactionData(socket.id, getTransactionType(rid), RRCacheEnums.AdvisorNo));
|
||||
|
||||
if (!advisorNo) {
|
||||
throw new Error("Advisor is required (advisorNo).");
|
||||
}
|
||||
|
||||
// UPDATE existing RO with full data
|
||||
CreateRRLogEvent(socket, "DEBUG", `{4} Updating existing RR RO with full data`, { dmsRoNo: job.dms_id });
|
||||
const result = await updateRRRepairOrderWithFullData({
|
||||
bodyshop,
|
||||
job,
|
||||
selectedCustomer: { customerNo: String(selectedCustNo), custNo: String(selectedCustNo) },
|
||||
advisorNo: String(advisorNo),
|
||||
txEnvelope,
|
||||
socket,
|
||||
svId: ensured?.svId || null,
|
||||
roNo: job.dms_id
|
||||
});
|
||||
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.roStatus?.message || "Failed to update RR Repair Order");
|
||||
}
|
||||
|
||||
const dmsRoNo = result?.roNo ?? result?.data?.dmsRoNo ?? job.dms_id;
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.ExportResult,
|
||||
result || {},
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
socket.id,
|
||||
getTransactionType(rid),
|
||||
RRCacheEnums.PendingRO,
|
||||
{
|
||||
outsdRoNo: result?.data?.outsdRoNo ?? job?.ro_number ?? job?.id ?? null,
|
||||
dmsRoNo,
|
||||
customerNo: String(selectedCustNo),
|
||||
advisorNo: String(advisorNo),
|
||||
vin: job?.v_vin || null,
|
||||
isUpdate: true
|
||||
},
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
CreateRRLogEvent(socket, "INFO", `RR Repair Order updated successfully`, {
|
||||
dmsRoNo,
|
||||
jobId: rid
|
||||
});
|
||||
|
||||
// For early RO flow, only emit validation-required (not export-job:result)
|
||||
// since the export is not complete yet - we're just waiting for validation
|
||||
socket.emit("rr-validation-required", { dmsRoNo, jobId: rid });
|
||||
|
||||
return ack?.({ ok: true, skipCustomerSelection: true, dmsRoNo });
|
||||
}
|
||||
|
||||
CreateRRLogEvent(socket, "DEBUG", `{2} Running multi-search (Full Name + VIN)`);
|
||||
|
||||
const candidates = await rrMultiCustomerSearch({ bodyshop, job, socket, redisHelpers });
|
||||
@@ -620,17 +1269,59 @@ const registerRREvents = ({ socket, redisHelpers }) => {
|
||||
defaultRRTTL
|
||||
);
|
||||
|
||||
// CREATE/UPDATE (first step only)
|
||||
CreateRRLogEvent(socket, "DEBUG", `{4} Performing RR create/update (step 1)`);
|
||||
const result = await exportJobToRR({
|
||||
bodyshop,
|
||||
job,
|
||||
selectedCustomer: { customerNo: effectiveCustNo, custNo: effectiveCustNo },
|
||||
advisorNo: String(advisorNo),
|
||||
txEnvelope,
|
||||
socket,
|
||||
svId: ensured?.svId || null
|
||||
});
|
||||
// Check if this job already has an early RO created (check job.dms_id)
|
||||
// If so, we'll use stored customer/advisor IDs and do a full data UPDATE instead of CREATE
|
||||
const existingDmsId = job?.dms_id || null;
|
||||
const shouldUpdate = !!existingDmsId;
|
||||
|
||||
// When updating an early RO, use stored customer/advisor IDs
|
||||
let finalEffectiveCustNo = effectiveCustNo;
|
||||
let finalAdvisorNo = advisorNo;
|
||||
|
||||
if (shouldUpdate && job?.dms_customer_id) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `Using stored customer ID from early RO`, {
|
||||
storedCustomerId: job.dms_customer_id,
|
||||
originalCustomerId: effectiveCustNo
|
||||
});
|
||||
finalEffectiveCustNo = String(job.dms_customer_id);
|
||||
}
|
||||
|
||||
if (shouldUpdate && job?.dms_advisor_id) {
|
||||
CreateRRLogEvent(socket, "DEBUG", `Using stored advisor ID from early RO`, {
|
||||
storedAdvisorId: job.dms_advisor_id,
|
||||
originalAdvisorId: advisorNo
|
||||
});
|
||||
finalAdvisorNo = String(job.dms_advisor_id);
|
||||
}
|
||||
|
||||
let result;
|
||||
|
||||
if (shouldUpdate) {
|
||||
// UPDATE existing RO with full data
|
||||
CreateRRLogEvent(socket, "DEBUG", `{4} Updating existing RR RO with full data`, { dmsRoNo: existingDmsId });
|
||||
result = await updateRRRepairOrderWithFullData({
|
||||
bodyshop,
|
||||
job,
|
||||
selectedCustomer: { customerNo: finalEffectiveCustNo, custNo: finalEffectiveCustNo },
|
||||
advisorNo: String(finalAdvisorNo),
|
||||
txEnvelope,
|
||||
socket,
|
||||
svId: ensured?.svId || null,
|
||||
roNo: existingDmsId
|
||||
});
|
||||
} else {
|
||||
// CREATE new RO (legacy flow - full data on first create)
|
||||
CreateRRLogEvent(socket, "DEBUG", `{4} Performing RR create (step 1 - full data)`);
|
||||
result = await exportJobToRR({
|
||||
bodyshop,
|
||||
job,
|
||||
selectedCustomer: { customerNo: finalEffectiveCustNo, custNo: finalEffectiveCustNo },
|
||||
advisorNo: String(finalAdvisorNo),
|
||||
txEnvelope,
|
||||
socket,
|
||||
svId: ensured?.svId || null
|
||||
});
|
||||
}
|
||||
|
||||
// Cache raw export result + pending RO number for finalize
|
||||
await redisHelpers.setSessionTransactionData(
|
||||
|
||||
@@ -8,6 +8,12 @@ const client = require("../graphql-client/graphql-client").client;
|
||||
*/
|
||||
const BODYSHOP_CACHE_TTL = 3600; // 1 hour
|
||||
|
||||
/**
|
||||
* Chatter API token cache TTL in seconds
|
||||
* @type {number}
|
||||
*/
|
||||
const CHATTER_TOKEN_CACHE_TTL = 3600; // 1 hour
|
||||
|
||||
/**
|
||||
* Generate a cache key for a bodyshop
|
||||
* @param bodyshopId
|
||||
@@ -15,6 +21,13 @@ const BODYSHOP_CACHE_TTL = 3600; // 1 hour
|
||||
*/
|
||||
const getBodyshopCacheKey = (bodyshopId) => `bodyshop-cache:${bodyshopId}`;
|
||||
|
||||
/**
|
||||
* Generate a cache key for a Chatter API token
|
||||
* @param companyId
|
||||
* @returns {`chatter-token:${string}`}
|
||||
*/
|
||||
const getChatterTokenCacheKey = (companyId) => `chatter-token:${companyId}`;
|
||||
|
||||
/**
|
||||
* Generate a cache key for a user socket mapping
|
||||
* @param email
|
||||
@@ -373,9 +386,53 @@ const applyRedisHelpers = ({ pubClient, app, logger }) => {
|
||||
*/
|
||||
const getProviderCache = (ns, field) => getSessionData(`${ns}:provider`, field);
|
||||
|
||||
/**
|
||||
* Get Chatter API token from Redis cache
|
||||
* @param companyId
|
||||
* @returns {Promise<string|null>}
|
||||
*/
|
||||
const getChatterToken = async (companyId) => {
|
||||
const key = getChatterTokenCacheKey(companyId);
|
||||
try {
|
||||
const token = await pubClient.get(key);
|
||||
return token;
|
||||
} catch (error) {
|
||||
logger.log("get-chatter-token-from-redis", "ERROR", "redis", null, {
|
||||
companyId,
|
||||
error: error.message
|
||||
});
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Set Chatter API token in Redis cache
|
||||
* @param companyId
|
||||
* @param token
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const setChatterToken = async (companyId, token) => {
|
||||
const key = getChatterTokenCacheKey(companyId);
|
||||
try {
|
||||
await pubClient.set(key, token);
|
||||
await pubClient.expire(key, CHATTER_TOKEN_CACHE_TTL);
|
||||
devDebugLogger("chatter-token-cache-set", {
|
||||
companyId,
|
||||
action: "Token cached"
|
||||
});
|
||||
} catch (error) {
|
||||
logger.log("set-chatter-token-in-redis", "ERROR", "redis", null, {
|
||||
companyId,
|
||||
error: error.message
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const api = {
|
||||
getUserSocketMappingKey,
|
||||
getBodyshopCacheKey,
|
||||
getChatterTokenCacheKey,
|
||||
setSessionData,
|
||||
getSessionData,
|
||||
clearSessionData,
|
||||
@@ -390,7 +447,9 @@ const applyRedisHelpers = ({ pubClient, app, logger }) => {
|
||||
getSessionTransactionData,
|
||||
clearSessionTransactionData,
|
||||
setProviderCache,
|
||||
getProviderCache
|
||||
getProviderCache,
|
||||
getChatterToken,
|
||||
setChatterToken
|
||||
};
|
||||
|
||||
Object.assign(module.exports, api);
|
||||
|
||||
Reference in New Issue
Block a user