feature/IO-3096-GlobalNotifications - Checkpoint, merge master, ready DB
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
const { Queue, Worker } = require("bullmq");
|
||||
const graphQLClient = require("../../graphql-client/graphql-client").client;
|
||||
|
||||
// Base time-related constant in minutes, sourced from environment variable or defaulting to 1
|
||||
const APP_CONSOLIDATION_DELAY_IN_MINS = (() => {
|
||||
@@ -10,15 +11,70 @@ const APP_CONSOLIDATION_DELAY_IN_MINS = (() => {
|
||||
// Base time-related constant (in milliseconds) / DO NOT TOUCH
|
||||
const APP_CONSOLIDATION_DELAY = APP_CONSOLIDATION_DELAY_IN_MINS * 60000; // 1 minute (base timeout)
|
||||
|
||||
// Derived time-related constants based on APP_CONSOLIDATION_DELAY / DO NOT TOUCH, these are pegged to APP_CONSOLIDATION_DELAY
|
||||
const NOTIFICATION_STORAGE_EXPIRATION = APP_CONSOLIDATION_DELAY * 1.5; // 1.5 minutes (90s, for notification storage)
|
||||
const CONSOLIDATION_FLAG_EXPIRATION = APP_CONSOLIDATION_DELAY * 1.5; // 1.5 minutes (90s, buffer for consolidation flag)
|
||||
const LOCK_EXPIRATION = APP_CONSOLIDATION_DELAY * 0.25; // 15 seconds (quarter of base, for lock duration)
|
||||
const RATE_LIMITER_DURATION = APP_CONSOLIDATION_DELAY * 0.1; // 6 seconds (tenth of base, for rate limiting)
|
||||
// Derived time-related constants based on APP_CONSOLIDATION_DELAY / DO NOT TOUCH
|
||||
const NOTIFICATION_STORAGE_EXPIRATION = APP_CONSOLIDATION_DELAY * 1.5; // 1.5 minutes (90s)
|
||||
const CONSOLIDATION_FLAG_EXPIRATION = APP_CONSOLIDATION_DELAY * 1.5; // 1.5 minutes (90s)
|
||||
const LOCK_EXPIRATION = APP_CONSOLIDATION_DELAY * 0.25; // 15 seconds (quarter of base)
|
||||
const RATE_LIMITER_DURATION = APP_CONSOLIDATION_DELAY * 0.1; // 6 seconds (tenth of base)
|
||||
|
||||
let addQueue;
|
||||
let consolidateQueue;
|
||||
|
||||
// GraphQL mutation to insert notifications
|
||||
const INSERT_NOTIFICATIONS_MUTATION = `
|
||||
mutation INSERT_NOTIFICATIONS($objects: [notifications_insert_input!]!) {
|
||||
insert_notifications(objects: $objects) {
|
||||
affected_rows
|
||||
returning {
|
||||
id
|
||||
jobid
|
||||
associationid
|
||||
ui_translation_string
|
||||
ui_translation_meta
|
||||
html_body
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
/**
|
||||
* Builds an HTML unordered list from an array of notification bodies.
|
||||
*
|
||||
* @param {Array<Object>} notifications - Array of notification objects with a 'body' field.
|
||||
* @returns {string} HTML string representing an unordered list of bodies.
|
||||
*/
|
||||
const buildHtmlBody = (notifications) => {
|
||||
const listItems = notifications.map((n) => `<li>${n.body}</li>`).join("");
|
||||
return `<ul>${listItems}</ul>`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines the key and variables for a batch of notifications.
|
||||
*
|
||||
* @param {Array<Object>} notifications - Array of notification objects with 'key' and 'variables'.
|
||||
* @returns {Object} An object with 'key' and 'variables' properties.
|
||||
*/
|
||||
const determineKeyAndVariables = (notifications) => {
|
||||
if (notifications.length === 1) {
|
||||
// Single notification: use the original key and variables
|
||||
return {
|
||||
key: notifications[0].key,
|
||||
variables: notifications[0].variables
|
||||
};
|
||||
} else {
|
||||
// Multiple notifications: use a generic key and consolidate variables with their original keys
|
||||
return {
|
||||
key: "notifications.job.multipleChanges",
|
||||
variables: {
|
||||
variables: notifications.map((n) => ({
|
||||
key: n.key, // Include the original key in each variables object
|
||||
...n.variables
|
||||
}))
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Initializes the notification queues and workers for adding and consolidating notifications.
|
||||
*
|
||||
@@ -30,43 +86,37 @@ let consolidateQueue;
|
||||
* @returns {Queue} The initialized `addQueue` instance for dispatching notifications.
|
||||
*/
|
||||
const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
// Only initialize if queues don't already exist
|
||||
if (!addQueue || !consolidateQueue) {
|
||||
logger.logger.info("Initializing Notifications Queues");
|
||||
|
||||
// Create queue for adding notifications
|
||||
addQueue = new Queue("notificationsAdd", {
|
||||
connection: pubClient,
|
||||
prefix: "{BULLMQ}",
|
||||
defaultJobOptions: { removeOnComplete: true, removeOnFail: true }
|
||||
});
|
||||
|
||||
// Create queue for consolidating notifications
|
||||
consolidateQueue = new Queue("notificationsConsolidate", {
|
||||
connection: pubClient,
|
||||
prefix: "{BULLMQ}",
|
||||
defaultJobOptions: { removeOnComplete: true, removeOnFail: true }
|
||||
});
|
||||
|
||||
// Worker to process jobs from the addQueue
|
||||
const addWorker = new Worker(
|
||||
"notificationsAdd",
|
||||
async (job) => {
|
||||
const { jobId, key, variables, recipients } = job.data;
|
||||
const { jobId, key, variables, recipients, body } = job.data;
|
||||
logger.logger.info(`Adding notifications for jobId ${jobId}`);
|
||||
|
||||
const redisKeyPrefix = `app:notifications:${jobId}`;
|
||||
const notification = { key, variables, timestamp: Date.now() };
|
||||
const notification = { key, variables, body, timestamp: Date.now() };
|
||||
|
||||
// Store notification for each recipient in Redis
|
||||
for (const recipient of recipients) {
|
||||
const { user } = recipient;
|
||||
const userKey = `${redisKeyPrefix}:${user}`;
|
||||
const existingNotifications = await pubClient.get(userKey);
|
||||
const notifications = existingNotifications ? JSON.parse(existingNotifications) : [];
|
||||
notifications.push(notification);
|
||||
// Set with expiration to avoid stale data
|
||||
await pubClient.set(userKey, JSON.stringify(notifications), "EX", NOTIFICATION_STORAGE_EXPIRATION / 1000); // Convert to seconds
|
||||
await pubClient.set(userKey, JSON.stringify(notifications), "EX", NOTIFICATION_STORAGE_EXPIRATION / 1000);
|
||||
logger.logger.debug(`Stored notification for ${user} under ${userKey}: ${JSON.stringify(notifications)}`);
|
||||
}
|
||||
|
||||
@@ -75,20 +125,18 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
logger.logger.debug(`Consolidation flag set for jobId ${jobId}: ${flagSet}`);
|
||||
|
||||
if (flagSet) {
|
||||
// Schedule consolidation job with delay and retries
|
||||
await consolidateQueue.add(
|
||||
"consolidate-notifications",
|
||||
{ jobId, recipients },
|
||||
{
|
||||
jobId: `consolidate:${jobId}`,
|
||||
delay: APP_CONSOLIDATION_DELAY,
|
||||
attempts: 3, // Retry up to 3 times
|
||||
backoff: LOCK_EXPIRATION // Retry delay matches lock expiration (15s)
|
||||
attempts: 3,
|
||||
backoff: LOCK_EXPIRATION
|
||||
}
|
||||
);
|
||||
logger.logger.info(`Scheduled consolidation for jobId ${jobId}`);
|
||||
// Set expiration on flag
|
||||
await pubClient.expire(consolidateKey, CONSOLIDATION_FLAG_EXPIRATION / 1000); // Convert to seconds
|
||||
await pubClient.expire(consolidateKey, CONSOLIDATION_FLAG_EXPIRATION / 1000);
|
||||
} else {
|
||||
logger.logger.debug(`Consolidation already scheduled for jobId ${jobId}`);
|
||||
}
|
||||
@@ -100,7 +148,6 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
}
|
||||
);
|
||||
|
||||
// Worker to process jobs from the consolidateQueue
|
||||
const consolidateWorker = new Worker(
|
||||
"notificationsConsolidate",
|
||||
async (job) => {
|
||||
@@ -109,8 +156,7 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
|
||||
const redisKeyPrefix = `app:notifications:${jobId}`;
|
||||
const lockKey = `lock:consolidate:${jobId}`;
|
||||
// Acquire a lock to prevent concurrent consolidation
|
||||
const lockAcquired = await pubClient.set(lockKey, "locked", "NX", "EX", LOCK_EXPIRATION / 1000); // Convert to seconds
|
||||
const lockAcquired = await pubClient.set(lockKey, "locked", "NX", "EX", LOCK_EXPIRATION / 1000);
|
||||
logger.logger.debug(`Lock acquisition for jobId ${jobId}: ${lockAcquired}`);
|
||||
|
||||
if (lockAcquired) {
|
||||
@@ -119,7 +165,6 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
const uniqueUsers = [...new Set(recipients.map((r) => r.user))];
|
||||
logger.logger.debug(`Unique users for jobId ${jobId}: ${uniqueUsers}`);
|
||||
|
||||
// Retrieve and structure notifications by user and bodyShopId
|
||||
for (const user of uniqueUsers) {
|
||||
const userKey = `${redisKeyPrefix}:${user}`;
|
||||
const notifications = await pubClient.get(userKey);
|
||||
@@ -141,29 +186,71 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
|
||||
logger.logger.debug(`Consolidated notifications: ${JSON.stringify(allNotifications)}`);
|
||||
|
||||
// Emit notifications to users via Socket.io
|
||||
// Insert notifications into the database and collect IDs
|
||||
const notificationInserts = [];
|
||||
const notificationIdMap = new Map();
|
||||
|
||||
for (const [user, bodyShopData] of Object.entries(allNotifications)) {
|
||||
const userRecipients = recipients.filter((r) => r.user === user);
|
||||
const employeeId = userRecipients[0]?.employeeId;
|
||||
|
||||
for (const [bodyShopId, notifications] of Object.entries(bodyShopData)) {
|
||||
const { key, variables } = determineKeyAndVariables(notifications);
|
||||
const htmlBody = buildHtmlBody(notifications);
|
||||
notificationInserts.push({
|
||||
jobid: jobId,
|
||||
associationid: employeeId || null,
|
||||
ui_translation_string: key,
|
||||
ui_translation_meta: JSON.stringify(variables),
|
||||
html_body: htmlBody
|
||||
});
|
||||
notificationIdMap.set(`${user}:${bodyShopId}`, null);
|
||||
}
|
||||
}
|
||||
|
||||
if (notificationInserts.length > 0) {
|
||||
const insertResponse = await graphQLClient.request(INSERT_NOTIFICATIONS_MUTATION, {
|
||||
objects: notificationInserts
|
||||
});
|
||||
logger.logger.info(
|
||||
`Inserted ${insertResponse.insert_notifications.affected_rows} notifications for jobId ${jobId}`
|
||||
);
|
||||
|
||||
insertResponse.insert_notifications.returning.forEach((row, index) => {
|
||||
const user = uniqueUsers[Math.floor(index / Object.keys(allNotifications[uniqueUsers[0]]).length)];
|
||||
const bodyShopId = Object.keys(allNotifications[user])[
|
||||
index % Object.keys(allNotifications[user]).length
|
||||
];
|
||||
notificationIdMap.set(`${user}:${bodyShopId}`, row.id);
|
||||
});
|
||||
}
|
||||
|
||||
// Emit notifications to users via Socket.io with notification ID
|
||||
for (const [user, bodyShopData] of Object.entries(allNotifications)) {
|
||||
const userMapping = await redisHelpers.getUserSocketMapping(user);
|
||||
logger.logger.debug(`User socket mapping for ${user}: ${JSON.stringify(userMapping)}`);
|
||||
|
||||
for (const [bodyShopId, notifications] of Object.entries(bodyShopData)) {
|
||||
const notificationId = notificationIdMap.get(`${user}:${bodyShopId}`);
|
||||
if (userMapping && userMapping[bodyShopId]?.socketIds) {
|
||||
userMapping[bodyShopId].socketIds.forEach((socketId) => {
|
||||
logger.logger.debug(
|
||||
`Emitting to socket ${socketId}: ${JSON.stringify({
|
||||
jobId,
|
||||
bodyShopId,
|
||||
notifications
|
||||
notifications,
|
||||
notificationId
|
||||
})}`
|
||||
);
|
||||
ioRedis.to(socketId).emit("notification", {
|
||||
jobId,
|
||||
bodyShopId,
|
||||
notifications
|
||||
notifications,
|
||||
notificationId
|
||||
});
|
||||
});
|
||||
logger.logger.info(
|
||||
`Sent ${notifications.length} consolidated notifications to ${user} for jobId ${jobId}`
|
||||
`Sent ${notifications.length} consolidated notifications to ${user} for jobId ${jobId} with notificationId ${notificationId}`
|
||||
);
|
||||
} else {
|
||||
logger.logger.warn(`No socket IDs found for ${user} in bodyShopId ${bodyShopId}`);
|
||||
@@ -174,7 +261,7 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
await pubClient.del(`app:consolidate:${jobId}`);
|
||||
} catch (err) {
|
||||
logger.logger.error(`Consolidation error for jobId ${jobId}: ${err.message}`, { error: err });
|
||||
throw err; // Trigger retry if attempts remain
|
||||
throw err;
|
||||
} finally {
|
||||
await pubClient.del(lockKey);
|
||||
}
|
||||
@@ -190,20 +277,15 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
}
|
||||
);
|
||||
|
||||
// Log worker completion events
|
||||
addWorker.on("completed", (job) => logger.logger.info(`Add job ${job.id} completed`));
|
||||
consolidateWorker.on("completed", (job) => logger.logger.info(`Consolidate job ${job.id} completed`));
|
||||
|
||||
// Log worker failure events with error details
|
||||
addWorker.on("failed", (job, err) =>
|
||||
logger.logger.error(`Add job ${job.id} failed: ${err.message}`, { error: err })
|
||||
);
|
||||
|
||||
consolidateWorker.on("failed", (job, err) =>
|
||||
logger.logger.error(`Consolidate job ${job.id} failed: ${err.message}`, { error: err })
|
||||
);
|
||||
|
||||
// Graceful shutdown handler for workers
|
||||
const shutdown = async () => {
|
||||
logger.logger.info("Closing app queue workers...");
|
||||
await Promise.all([addWorker.close(), consolidateWorker.close()]);
|
||||
@@ -240,10 +322,10 @@ const dispatchAppsToQueue = async ({ appsToDispatch, logger }) => {
|
||||
const appQueue = getQueue();
|
||||
|
||||
for (const app of appsToDispatch) {
|
||||
const { jobId, bodyShopId, key, variables, recipients } = app;
|
||||
const { jobId, bodyShopId, key, variables, recipients, body } = app;
|
||||
await appQueue.add(
|
||||
"add-notification",
|
||||
{ jobId, bodyShopId, key, variables, recipients },
|
||||
{ jobId, bodyShopId, key, variables, recipients, body },
|
||||
{ jobId: `${jobId}:${Date.now()}` }
|
||||
);
|
||||
logger.logger.info(`Added notification to queue for jobId ${jobId} with ${recipients.length} recipients`);
|
||||
|
||||
Reference in New Issue
Block a user