feature/IO-3096-GlobalNotifications - Cleanup and Package bumps

This commit is contained in:
Dave Richer
2025-02-19 12:50:01 -05:00
parent 366f7b9c4a
commit 1384616d66
11 changed files with 415 additions and 198 deletions

View File

@@ -1,65 +1,87 @@
/**
* Parses an event by comparing old and new data to determine which fields have changed.
*
* This function analyzes the differences between previous (`oldData`) and current (`newData`)
* data states to identify changed fields. It determines if the event is a new entry or an update
* and optionally extracts a `jobId` based on a specified field. The result includes details
* about changed fields, the event type, and associated metadata.
*
* @param {Object} options - Configuration options for parsing the event.
* @param {Object} [options.oldData] - The previous state of the data (undefined for new entries).
* @param {Object} options.newData - The current state of the data.
* @param {string} options.trigger - The type of event trigger (e.g., 'INSERT', 'UPDATE').
* @param {string} options.table - The name of the table associated with the event.
* @param {string} [options.jobIdField] - The field name used to extract the jobId (optional).
* @returns {Object} An object containing the parsed event details:
* - {Array<string>} changedFieldNames - List of field names that have changed.
* - {Object} changedFields - Map of changed fields with their old and new values.
* - {boolean} isNew - True if the event is a new entry (no oldData provided).
* - {Object} data - The current data state (`newData`).
* - {string} trigger - The event trigger type.
* - {string} table - The table name.
* - {string|null} jobId - The extracted jobId or null if not applicable.
*/
const eventParser = async ({ oldData, newData, trigger, table, jobIdField }) => {
const isNew = !oldData;
const isNew = !oldData; // True if no old data exists, indicating a new entry
let changedFields = {};
let changedFieldNames = [];
if (isNew) {
// If there's no old data, every field in newData is considered new
// For new entries, all fields in newData are considered "changed" (from undefined to their value)
changedFields = Object.fromEntries(
Object.entries(newData).map(([key, value]) => [key, { old: undefined, new: value }])
);
changedFieldNames = Object.keys(newData);
changedFieldNames = Object.keys(newData); // All keys are new
} else {
// Compare oldData with newData for changes
// Compare oldData and newData to detect updates
for (const key in newData) {
if (Object.prototype.hasOwnProperty.call(newData, key)) {
// Check if the key exists in oldData and if values differ
// Check if the field is new or its value has changed
if (
!Object.prototype.hasOwnProperty.call(oldData, key) ||
JSON.stringify(oldData[key]) !== JSON.stringify(newData[key])
!Object.prototype.hasOwnProperty.call(oldData, key) || // Field didnt exist before
JSON.stringify(oldData[key]) !== JSON.stringify(newData[key]) // Values differ (deep comparison)
) {
changedFields[key] = {
old: oldData[key], // Could be undefined if key didnt exist in oldData
old: oldData[key], // Undefined if field wasnt in oldData
new: newData[key]
};
changedFieldNames.push(key);
}
}
}
// Check for fields that were removed
// Identify fields removed in newData (present in oldData but absent in newData)
for (const key in oldData) {
if (Object.prototype.hasOwnProperty.call(oldData, key) && !Object.prototype.hasOwnProperty.call(newData, key)) {
changedFields[key] = {
old: oldData[key],
new: null // Indicate field was removed
new: null // Mark as removed
};
changedFieldNames.push(key);
}
}
}
// Extract jobId based on jobIdField
// Extract jobId if jobIdField is provided
let jobId = null;
if (jobIdField) {
let keyName = jobIdField;
const prefix = "req.body.event.new.";
// Strip prefix if present to isolate the actual field name
if (keyName.startsWith(prefix)) {
keyName = keyName.slice(prefix.length);
}
// Look for jobId in newData first, then fallback to oldData if necessary
jobId = newData[keyName] || (oldData && oldData[keyName]) || null;
}
return {
changedFieldNames,
changedFields,
isNew,
data: newData,
trigger,
table,
jobId
changedFieldNames, // Array of fields that changed
changedFields, // Object with old/new values for changed fields
isNew, // Boolean indicating if this is a new entry
data: newData, // Current data state
trigger, // Event trigger (e.g., 'INSERT', 'UPDATE')
table, // Associated table name
jobId // Extracted jobId or null
};
};

View File

@@ -1,23 +0,0 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const Queue = require("better-queue");
const logger = require("../utils/logger");
const notificationsEmailQueue = () =>
new Queue(
(taskIds, cb) => {
logger.log("Processing Notification Emails: ", "silly", null, null);
cb(null);
},
{
batchSize: 50,
batchDelay: 5000,
// The lower this is, the more likely we are to hit the rate limit.
batchDelayTimeout: 1000
}
);
module.exports = { notificationsEmailQueue };

View File

@@ -3,22 +3,36 @@ const { Queue, Worker } = require("bullmq");
let addQueue;
let consolidateQueue;
/**
* Initializes the notification queues and workers for adding and consolidating notifications.
*
* @param {Object} options - Configuration options for queue initialization.
* @param {Object} options.pubClient - Redis client instance for queue communication.
* @param {Object} options.logger - Logger instance for logging events and debugging.
* @param {Object} options.redisHelpers - Utility functions for Redis operations.
* @param {Object} options.ioRedis - Socket.io Redis adapter for real-time event emission.
* @returns {Queue} The initialized `addQueue` instance for dispatching notifications.
*/
const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
// Only initialize if queues don't already exist
if (!addQueue || !consolidateQueue) {
logger.logger.info("Initializing Notifications Queues");
// Create queue for adding notifications
addQueue = new Queue("notificationsAdd", {
connection: pubClient,
prefix: "{BULLMQ}",
defaultJobOptions: { removeOnComplete: true, removeOnFail: true }
prefix: "{BULLMQ}", // Namespace prefix for BullMQ in Redis
defaultJobOptions: { removeOnComplete: true, removeOnFail: true } // Cleanup jobs after success/failure
});
// Create queue for consolidating notifications
consolidateQueue = new Queue("notificationsConsolidate", {
connection: pubClient,
prefix: "{BULLMQ}",
defaultJobOptions: { removeOnComplete: true, removeOnFail: true }
});
// Worker to process jobs from the addQueue
const addWorker = new Worker(
"notificationsAdd",
async (job) => {
@@ -28,27 +42,32 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
const redisKeyPrefix = `app:notifications:${jobId}`;
const notification = { key, variables, timestamp: Date.now() };
// Store notification for each recipient in Redis
for (const recipient of recipients) {
const { user } = recipient;
const userKey = `${redisKeyPrefix}:${user}`;
const existingNotifications = await pubClient.get(userKey);
const notifications = existingNotifications ? JSON.parse(existingNotifications) : [];
notifications.push(notification);
// Set with 40-second expiration to avoid stale data
await pubClient.set(userKey, JSON.stringify(notifications), "EX", 40);
logger.logger.debug(`Stored notification for ${user} under ${userKey}: ${JSON.stringify(notifications)}`);
}
const consolidateKey = `app:consolidate:${jobId}`;
// setnx ensures only one consolidation job is scheduled (atomic operation)
const flagSet = await pubClient.setnx(consolidateKey, "pending");
logger.logger.debug(`Consolidation flag set for jobId ${jobId}: ${flagSet}`);
if (flagSet) {
// Schedule consolidation job to run after a 5-second delay
await consolidateQueue.add(
"consolidate-notifications",
{ jobId, recipients },
{ jobId: `consolidate:${jobId}`, delay: 5000 }
);
logger.logger.info(`Scheduled consolidation for jobId ${jobId}`);
// Set expiration on flag to clean up after 5 minutes
await pubClient.expire(consolidateKey, 300);
} else {
logger.logger.debug(`Consolidation already scheduled for jobId ${jobId}`);
@@ -57,10 +76,11 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
{
connection: pubClient,
prefix: "{BULLMQ}",
concurrency: 5
concurrency: 5 // Process up to 5 jobs concurrently
}
);
// Worker to process jobs from the consolidateQueue
const consolidateWorker = new Worker(
"notificationsConsolidate",
async (job) => {
@@ -69,15 +89,18 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
const redisKeyPrefix = `app:notifications:${jobId}`;
const lockKey = `lock:consolidate:${jobId}`;
// Acquire a lock to prevent concurrent consolidation (NX = set if not exists)
const lockAcquired = await pubClient.set(lockKey, "locked", "NX", "EX", 10);
logger.logger.debug(`Lock acquisition for jobId ${jobId}: ${lockAcquired}`);
if (lockAcquired) {
try {
const allNotifications = {};
// Get unique user IDs to avoid duplicate processing
const uniqueUsers = [...new Set(recipients.map((r) => r.user))];
logger.logger.debug(`Unique users for jobId ${jobId}: ${uniqueUsers}`);
// Retrieve and structure notifications by user and bodyShopId
for (const user of uniqueUsers) {
const userKey = `${redisKeyPrefix}:${user}`;
const notifications = await pubClient.get(userKey);
@@ -90,7 +113,7 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
allNotifications[user] = allNotifications[user] || {};
allNotifications[user][bodyShopId] = parsedNotifications;
}
await pubClient.del(userKey);
await pubClient.del(userKey); // Clean up after retrieval
logger.logger.debug(`Deleted Redis key ${userKey}`);
} else {
logger.logger.warn(`No notifications found for ${user} under ${userKey}`);
@@ -99,6 +122,7 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
logger.logger.debug(`Consolidated notifications: ${JSON.stringify(allNotifications)}`);
// Emit notifications to users via Socket.io
for (const [user, bodyShopData] of Object.entries(allNotifications)) {
const userMapping = await redisHelpers.getUserSocketMapping(user);
logger.logger.debug(`User socket mapping for ${user}: ${JSON.stringify(userMapping)}`);
@@ -107,7 +131,11 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
if (userMapping && userMapping[bodyShopId]?.socketIds) {
userMapping[bodyShopId].socketIds.forEach((socketId) => {
logger.logger.debug(
`Emitting to socket ${socketId}: ${JSON.stringify({ jobId, bodyShopId, notifications })}`
`Emitting to socket ${socketId}: ${JSON.stringify({
jobId,
bodyShopId,
notifications
})}`
);
ioRedis.to(socketId).emit("notification", {
jobId,
@@ -124,12 +152,13 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
}
}
// Clean up consolidation flag after processing
await pubClient.del(`app:consolidate:${jobId}`);
} catch (err) {
logger.logger.error(`Consolidation error for jobId ${jobId}: ${err.message}`, { error: err });
throw err; // Re-throw to trigger failed event
throw err; // Re-throw to trigger BullMQ's failed event
} finally {
await pubClient.del(lockKey);
await pubClient.del(lockKey); // Release lock regardless of success/failure
}
} else {
logger.logger.info(`Skipped consolidation for jobId ${jobId} - lock held by another worker`);
@@ -138,42 +167,63 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
{
connection: pubClient,
prefix: "{BULLMQ}",
concurrency: 1,
limiter: { max: 1, duration: 5000 }
concurrency: 1, // Single concurrency to avoid race conditions
limiter: { max: 1, duration: 5000 } // Rate limit: 1 job every 5 seconds
}
);
// Log worker completion events
addWorker.on("completed", (job) => logger.logger.info(`Add job ${job.id} completed`));
consolidateWorker.on("completed", (job) => logger.logger.info(`Consolidate job ${job.id} completed`));
// Log worker failure events with error details
addWorker.on("failed", (job, err) =>
logger.logger.error(`Add job ${job.id} failed: ${err.message}`, { error: err })
);
consolidateWorker.on("failed", (job, err) =>
logger.logger.error(`Consolidate job ${job.id} failed: ${err.message}`, { error: err })
);
// Graceful shutdown handler for workers
const shutdown = async () => {
logger.logger.info("Closing app queue workers...");
await Promise.all([addWorker.close(), consolidateWorker.close()]);
logger.logger.info("App queue workers closed");
};
process.on("SIGTERM", shutdown);
process.on("SIGINT", shutdown);
process.on("SIGTERM", shutdown); // Handle termination signal
process.on("SIGINT", shutdown); // Handle interrupt signal (e.g., Ctrl+C)
}
return addQueue; // Return the add queue for dispatching
return addQueue; // Return queue for external use
};
/**
* Retrieves the initialized `addQueue` instance.
*
* @returns {Queue} The `addQueue` instance for adding notifications.
* @throws {Error} If `addQueue` is not initialized (i.e., `loadAppQueue` wasnt called).
*/
const getQueue = () => {
if (!addQueue) throw new Error("Add queue not initialized. Ensure loadAppQueue is called during bootstrap.");
return addQueue;
};
/**
* Dispatches notifications to the `addQueue` for processing.
*
* @param {Object} options - Options for dispatching notifications.
* @param {Array} options.appsToDispatch - Array of notification objects to dispatch.
* @param {Object} options.logger - Logger instance for logging dispatch events.
* @returns {Promise<void>} Resolves when all notifications are added to the queue.
*/
const dispatchAppsToQueue = async ({ appsToDispatch, logger }) => {
const appQueue = getQueue();
for (const app of appsToDispatch) {
const { jobId, bodyShopId, key, variables, recipients } = app;
// Unique jobId with timestamp to avoid duplicates
await appQueue.add(
"add-notification",
{ jobId, bodyShopId, key, variables, recipients },

View File

@@ -4,30 +4,44 @@ const { sendTaskEmail } = require("../../email/sendemail");
let emailQueue;
let worker;
const loadEmailQueue = async ({ pubClient, logger, redisHelpers }) => {
// Consolidate the same way the App Queue Does.
/**
* Initializes the email queue and worker for sending notifications via email.
*
* @param {Object} options - Configuration options for queue initialization.
* @param {Object} options.pubClient - Redis client instance for queue communication.
* @param {Object} options.logger - Logger instance for logging events and debugging.
* @returns {Queue} The initialized `emailQueue` instance for dispatching emails.
*/
const loadEmailQueue = async ({ pubClient, logger }) => {
// Only initialize if queue doesn't already exist
if (!emailQueue) {
logger.logger.info("Initializing Notifications Email Queue");
// Create queue for email notifications
emailQueue = new Queue("notificationsEmails", {
connection: pubClient,
prefix: "{BULLMQ}",
prefix: "{BULLMQ}", // Namespace prefix for BullMQ in Redis
defaultJobOptions: {
attempts: 3,
attempts: 3, // Retry failed jobs up to 3 times
backoff: {
type: "exponential",
delay: 1000
type: "exponential", // Exponential backoff strategy
delay: 1000 // Initial delay of 1 second
}
}
});
// Initialize the worker during queue setup
// Worker to process jobs from the emailQueue
worker = new Worker(
"notificationsEmails",
async (job) => {
const { subject, body, recipients } = job.data;
logger.logger.debug(`Processing email job ${job.id} for ${recipients.length} recipients`);
const { subject, body, recipient } = job.data;
logger.logger.debug(`Processing email job ${job.id} for recipient ${recipient}`);
// Send email to a single recipient
await sendTaskEmail({
to: recipients.map((r) => r.user),
to: recipient, // Single email address
subject,
type: "text",
text: body
@@ -38,9 +52,9 @@ const loadEmailQueue = async ({ pubClient, logger, redisHelpers }) => {
{
connection: pubClient,
prefix: "{BULLMQ}",
concurrency: 2, // Reduced for multi-node setup; adjust based on load
concurrency: 2, // Process up to 2 jobs concurrently
limiter: {
max: 10, // Max 10 jobs per minute per worker
max: 10, // Maximum of 10 jobs per minute
duration: 60 * 1000 // 1 minute
}
}
@@ -59,7 +73,7 @@ const loadEmailQueue = async ({ pubClient, logger, redisHelpers }) => {
logger.logger.error("Worker error:", { error: err });
});
// Graceful shutdown handling
// Graceful shutdown handler for the worker
const shutdown = async () => {
if (worker) {
logger.logger.info("Closing email queue worker...");
@@ -68,13 +82,19 @@ const loadEmailQueue = async ({ pubClient, logger, redisHelpers }) => {
}
};
process.on("SIGTERM", shutdown);
process.on("SIGINT", shutdown);
process.on("SIGTERM", shutdown); // Handle termination signal
process.on("SIGINT", shutdown); // Handle interrupt signal (e.g., Ctrl+C)
}
return emailQueue;
return emailQueue; // Return queue for external use
};
/**
* Retrieves the initialized `emailQueue` instance.
*
* @returns {Queue} The `emailQueue` instance for sending emails.
* @throws {Error} If `emailQueue` is not initialized (i.e., `loadEmailQueue` wasnt called).
*/
const getQueue = () => {
if (!emailQueue) {
throw new Error("Email queue not initialized. Ensure loadEmailQueue is called during bootstrap.");
@@ -82,17 +102,31 @@ const getQueue = () => {
return emailQueue;
};
/**
* Dispatches emails to the `emailQueue` for processing, creating one job per recipient.
*
* @param {Object} options - Options for dispatching emails.
* @param {Array} options.emailsToDispatch - Array of email objects to dispatch.
* @param {Object} options.logger - Logger instance for logging dispatch events.
* @returns {Promise<void>} Resolves when all email jobs are added to the queue.
*/
const dispatchEmailsToQueue = async ({ emailsToDispatch, logger }) => {
const emailQueue = getQueue();
for (const email of emailsToDispatch) {
const { subject, body, recipients } = email;
await emailQueue.add("send-email", {
subject,
body,
recipients
}); // Job options moved to defaultJobOptions in Queue
logger.logger.debug(`Added email to queue: ${subject} for ${recipients.length} recipients`);
// Create an array of jobs, one per recipient
const jobs = recipients.map((recipient) => ({
name: "send-email",
data: {
subject,
body,
recipient: recipient.user // Extract the email address from recipient object
}
}));
// Add all jobs for this email in one operation
await emailQueue.addBulk(jobs);
logger.logger.debug(`Added ${jobs.length} email jobs to queue for subject: ${subject}`);
}
};

View File

@@ -1,14 +1,29 @@
const { getJobAssignmentType } = require("./stringHelpers");
/**
* Populates the recipients for app, email, and FCM notifications based on scenario watchers.
*
* @param {Object} data - The data object containing scenarioWatchers and bodyShopId.
* @param {Object} result - The result object to populate with recipients for app, email, and FCM notifications.
*/
const populateWatchers = (data, result) => {
data.scenarioWatchers.forEach((recipients) => {
const { user, app, fcm, email } = recipients;
// Add user to app recipients with bodyShopId if app notification is enabled
if (app === true) result.app.recipients.push({ user, bodyShopId: data.bodyShopId });
// Add user to FCM recipients if FCM notification is enabled
if (fcm === true) result.fcm.recipients.push(user);
// Add user to email recipients if email notification is enabled
if (email === true) result.email.recipients.push({ user });
});
};
/**
* Builds notification data for changes to alternate transport.
*
* @param {Object} data - The data object containing job details and alternate transport changes.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const alternateTransportChangedBuilder = (data) => {
const result = {
app: {
@@ -33,6 +48,12 @@ const alternateTransportChangedBuilder = (data) => {
return result;
};
/**
* Builds notification data for bill posted events.
*
* @param {Object} data - The data object containing job and billing details.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const billPostedHandler = (data) => {
const result = {
app: {
@@ -56,6 +77,12 @@ const billPostedHandler = (data) => {
return result;
};
/**
* Builds notification data for changes to critical parts status.
*
* @param {Object} data - The data object containing job details and critical parts status changes.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const criticalPartsStatusChangedBuilder = (data) => {
const result = {
app: {
@@ -80,7 +107,14 @@ const criticalPartsStatusChangedBuilder = (data) => {
return result;
};
/**
* Builds notification data for completed intake or delivery checklists.
*
* @param {Object} data - The data object containing job details and checklist changes.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const intakeDeliveryChecklistCompletedBuilder = (data) => {
// Determine checklist type based on which field was changed
const checklistType = data.changedFields.intakechecklist ? "intake" : "delivery";
const result = {
app: {
@@ -105,6 +139,12 @@ const intakeDeliveryChecklistCompletedBuilder = (data) => {
return result;
};
/**
* Builds notification data for job assignment events.
*
* @param {Object} data - The data object containing job details and scenario fields.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const jobAssignedToMeBuilder = (data) => {
const result = {
app: {
@@ -128,6 +168,12 @@ const jobAssignedToMeBuilder = (data) => {
return result;
};
/**
* Builds notification data for jobs added to production.
*
* @param {Object} data - The data object containing job details.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const jobsAddedToProductionBuilder = (data) => {
const result = {
app: {
@@ -149,7 +195,12 @@ const jobsAddedToProductionBuilder = (data) => {
return result;
};
// Verified
/**
* Builds notification data for job status changes.
*
* @param {Object} data - The data object containing job details and status changes.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const jobStatusChangeBuilder = (data) => {
const result = {
app: {
@@ -164,7 +215,7 @@ const jobStatusChangeBuilder = (data) => {
},
email: {
subject: `The status of ${data?.jobRoNumber} (${data.bodyShopName}) has changed from ${data.changedFields.status.old} to ${data.data.status}`,
body: `...`,
body: `...`, // Placeholder indicating email body may need further customization
recipients: []
},
fcm: { recipients: [] }
@@ -174,6 +225,12 @@ const jobStatusChangeBuilder = (data) => {
return result;
};
/**
* Builds notification data for new media added or reassigned events.
*
* @param {Object} data - The data object containing job details.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const newMediaAddedReassignedBuilder = (data) => {
const result = {
app: {
@@ -195,7 +252,12 @@ const newMediaAddedReassignedBuilder = (data) => {
return result;
};
// Verified
/**
* Builds notification data for new notes added to a job.
*
* @param {Object} data - The data object containing job details and note text.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const newNoteAddedBuilder = (data) => {
const result = {
app: {
@@ -219,6 +281,12 @@ const newNoteAddedBuilder = (data) => {
return result;
};
/**
* Builds notification data for new time tickets posted.
*
* @param {Object} data - The data object containing job details.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const newTimeTicketPostedBuilder = (data) => {
const result = {
app: {
@@ -240,6 +308,12 @@ const newTimeTicketPostedBuilder = (data) => {
return result;
};
/**
* Builds notification data for parts marked as back-ordered.
*
* @param {Object} data - The data object containing job details and parts status changes.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const partMarkedBackOrderedBuilder = (data) => {
const result = {
app: {
@@ -264,6 +338,12 @@ const partMarkedBackOrderedBuilder = (data) => {
return result;
};
/**
* Builds notification data for payment collection events.
*
* @param {Object} data - The data object containing job and payment details.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const paymentCollectedCompletedBuilder = (data) => {
const result = {
app: {
@@ -287,6 +367,12 @@ const paymentCollectedCompletedBuilder = (data) => {
return result;
};
/**
* Builds notification data for changes to scheduled dates.
*
* @param {Object} data - The data object containing job details and scheduling changes.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const scheduledDatesChangedBuilder = (data) => {
const result = {
app: {
@@ -315,6 +401,12 @@ const scheduledDatesChangedBuilder = (data) => {
return result;
};
/**
* Builds notification data for supplement imported events.
*
* @param {Object} data - The data object containing job and supplement details.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const supplementImportedBuilder = (data) => {
const result = {
app: {
@@ -338,6 +430,12 @@ const supplementImportedBuilder = (data) => {
return result;
};
/**
* Builds notification data for tasks updated or created.
*
* @param {Object} data - The data object containing job details and task event type.
* @returns {Object} Notification data structured for app, email, and FCM channels.
*/
const tasksUpdatedCreatedBuilder = (data) => {
const result = {
app: {

View File

@@ -2,6 +2,9 @@
* @module scenarioParser
* @description
* This module exports a function that parses an event and triggers notification scenarios based on the event data.
* It integrates with event parsing utilities, GraphQL queries, and notification queues to manage the dispatching
* of notifications via email and app channels. The function processes event data, identifies relevant scenarios,
* queries user notification preferences, and dispatches notifications accordingly.
*/
const eventParser = require("./eventParser");
@@ -9,23 +12,28 @@ const { client: gqlClient } = require("../graphql-client/graphql-client");
const queries = require("../graphql-client/queries");
const { isEmpty, isFunction } = require("lodash");
const { getMatchingScenarios } = require("./scenarioMapperr");
const consoleDir = require("../utils/consoleDir");
const { dispatchEmailsToQueue } = require("./queues/emailQueue");
const { dispatchAppsToQueue } = require("./queues/appQueue");
/**
* Parses an event and determines matching scenarios for notifications.
* Queries job watchers and notification settings before triggering scenario builders.
*
* @param {Object} req - The request object containing event data, trigger, table, and logger.
* @param {string} jobIdField - The field name used to extract the job ID from the event data.
* @returns {Promise<void>} Resolves when the parsing and notification dispatching process is complete.
* @throws {Error} If required request fields (event data, trigger, or table) or body shop data are missing.
*/
const scenarioParser = async (req, jobIdField) => {
const { event, trigger, table } = req.body;
const { logger } = req;
// Validate that required fields are present in the request body
if (!event?.data || !trigger || !table) {
throw new Error("Missing required request fields: event data, trigger, or table.");
}
// Step 1: Parse event data to extract necessary details.
// Step 1: Parse the event data to extract details like job ID and changed fields
const eventData = await eventParser({
newData: event.data.new,
oldData: event.data.old,
@@ -34,11 +42,12 @@ const scenarioParser = async (req, jobIdField) => {
jobIdField
});
// Step 2: Query job watchers for the given job ID.
// Step 2: Query job watchers associated with the job ID using GraphQL
const watcherData = await gqlClient.request(queries.GET_JOB_WATCHERS, {
jobid: eventData.jobId
});
// Transform watcher data into a simplified format with email and employee details
const jobWatchers = watcherData?.job_watchers_aggregate?.nodes?.map((watcher) => ({
email: watcher.user_email,
firstName: watcher?.user?.employee?.first_name,
@@ -46,21 +55,23 @@ const scenarioParser = async (req, jobIdField) => {
employeeId: watcher?.user?.employee?.id
}));
// Exit early if no job watchers are found for this job
if (isEmpty(jobWatchers)) {
return;
}
// Step 3: Retrieve body shop information from the job.
// Step 3: Extract body shop information from the job data
const bodyShopId = watcherData?.job?.bodyshop?.id;
const bodyShopName = watcherData?.job?.bodyshop?.shopname;
const jobRoNumber = watcherData?.job?.ro_number;
const jobClaimNumber = watcherData?.job?.clm_no;
// Validate that body shop data exists, as its required for notifications
if (!bodyShopId || !bodyShopName) {
throw new Error("No bodyshop data found for this job.");
}
// Step 4: Determine matching scenarios based on event data.
// Step 4: Identify scenarios that match the event data and job context
const matchingScenarios = getMatchingScenarios({
...eventData,
jobWatchers,
@@ -68,10 +79,12 @@ const scenarioParser = async (req, jobIdField) => {
bodyShopName
});
// Exit early if no matching scenarios are identified
if (isEmpty(matchingScenarios)) {
return;
}
// Combine event data with additional context for scenario processing
const finalScenarioData = {
...eventData,
jobWatchers,
@@ -80,22 +93,24 @@ const scenarioParser = async (req, jobIdField) => {
matchingScenarios
};
// Step 5: Query notification settings for job watchers.
// Step 5: Query notification settings for the job watchers
const associationsData = await gqlClient.request(queries.GET_NOTIFICATION_ASSOCIATIONS, {
emails: jobWatchers.map((x) => x.email),
shopid: bodyShopId
});
// Exit early if no notification associations are found
if (isEmpty(associationsData?.associations)) {
return;
}
// Step 6: Filter scenario watchers based on enabled notification methods.
// Step 6: Filter scenario watchers based on their enabled notification methods
finalScenarioData.matchingScenarios = finalScenarioData.matchingScenarios.map((scenario) => ({
...scenario,
scenarioWatchers: associationsData.associations
.filter((assoc) => {
const settings = assoc.notification_settings && assoc.notification_settings[scenario.key];
// Include only watchers with at least one enabled notification method (app, email, or FCM)
return settings && (settings.app || settings.email || settings.fcm);
})
.map((assoc) => {
@@ -103,6 +118,7 @@ const scenarioParser = async (req, jobIdField) => {
const watcherEmail = assoc.user || assoc.useremail;
const matchingWatcher = jobWatchers.find((watcher) => watcher.email === watcherEmail);
// Build watcher object with notification preferences and personal details
return {
user: watcherEmail,
email: settings.email,
@@ -115,21 +131,23 @@ const scenarioParser = async (req, jobIdField) => {
})
}));
// Exit early if no scenarios have eligible watchers after filtering
if (isEmpty(finalScenarioData?.matchingScenarios)) {
return;
}
// Step 7: Trigger scenario builders for matching scenarios with eligible watchers.
// Step 7: Build and collect scenarios to dispatch notifications for
const scenariosToDispatch = [];
for (const scenario of finalScenarioData.matchingScenarios) {
// Skip if no watchers or no builder function is defined for the scenario
if (isEmpty(scenario.scenarioWatchers) || !isFunction(scenario.builder)) {
continue;
}
let eligibleWatchers = scenario.scenarioWatchers;
// Ensure watchers are only notified if they are assigned to the changed field.
// Filter watchers to only those assigned to changed fields, if specified
if (scenario.matchToUserFields && scenario.matchToUserFields.length > 0) {
eligibleWatchers = scenario.scenarioWatchers.filter((watcher) =>
scenario.matchToUserFields.some(
@@ -138,14 +156,16 @@ const scenarioParser = async (req, jobIdField) => {
);
}
// Skip if no watchers remain after filtering
if (isEmpty(eligibleWatchers)) {
continue;
}
// Step 8: Filter scenario fields to only include changed fields.
// Step 8: Filter scenario fields to include only those that changed
const filteredScenarioFields =
scenario.fields?.filter((field) => eventData.changedFieldNames.includes(field)) || [];
// Use the scenarios builder to construct the notification data
scenariosToDispatch.push(
scenario.builder({
trigger: finalScenarioData.trigger.name,
@@ -167,30 +187,33 @@ const scenarioParser = async (req, jobIdField) => {
);
}
// Exit early if no scenarios are ready to dispatch
if (isEmpty(scenariosToDispatch)) {
return;
}
// Step 9: Dispatch Email Notifications to the Email Notification Queue
// Step 9: Dispatch email notifications to the email queue
const emailsToDispatch = scenariosToDispatch.map((scenario) => scenario?.email);
if (!isEmpty(emailsToDispatch)) {
dispatchEmailsToQueue({
emailsToDispatch,
logger
}).catch((e) =>
// Log any errors encountered during email dispatching
logger.log("Something went wrong dispatching emails to the Email Notification Queue", "error", "queue", null, {
message: e?.message
})
);
}
// Step 10: Dispatch App Notifications to the App Notification Queue
// Step 10: Dispatch app notifications to the app queue
const appsToDispatch = scenariosToDispatch.map((scenario) => scenario?.app);
if (!isEmpty(appsToDispatch)) {
dispatchAppsToQueue({
appsToDispatch,
logger
}).catch((e) =>
// Log any errors encountered during app notification dispatching
logger.log("Something went wrong dispatching apps to the App Notification Queue", "error", "queue", null, {
message: e?.message
})

View File

@@ -1,3 +1,16 @@
/**
* @module jobAssignmentHelper
* @description
* This module provides utility functions for handling job assignment types.
* Currently, it includes a function to map lowercase job assignment codes to their corresponding human-readable job types.
*/
/**
* Maps a lowercase job assignment code to its corresponding human-readable job type.
*
* @param {string} data - The lowercase job assignment code (e.g., "employee_pre").
* @returns {string} The human-readable job type (e.g., "Prep"). Returns an empty string if the code is unknown or if the input is null/undefined.
*/
const getJobAssignmentType = (data) => {
switch (data) {
case "employee_pre":