feature/IO-3492-FCM-Queue-For-Notifications: Implement FCM queue and worker for notifications
This commit is contained in:
@@ -42,6 +42,13 @@ const buildNotificationContent = (notifications) => {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert MS to S
|
||||
* @param ms
|
||||
* @returns {number}
|
||||
*/
|
||||
const seconds = (ms) => Math.max(1, Math.ceil(ms / 1000));
|
||||
|
||||
/**
|
||||
* Initializes the notification queues and workers for adding and consolidating notifications.
|
||||
*/
|
||||
@@ -52,6 +59,13 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
|
||||
devDebugLogger(`Initializing Notifications Queues with prefix: ${prefix}`);
|
||||
|
||||
// Redis key helpers (per jobId)
|
||||
const recipientsSetKey = (jobId) => `app:${devKey}:recipients:${jobId}`; // set of `${user}:${bodyShopId}`
|
||||
const recipientAssocHashKey = (jobId) => `app:${devKey}:recipientAssoc:${jobId}`; // hash `${user}:${bodyShopId}` => associationId
|
||||
const consolidateFlagKey = (jobId) => `app:${devKey}:consolidate:${jobId}`;
|
||||
const lockKeyForJob = (jobId) => `lock:${devKey}:consolidate:${jobId}`;
|
||||
const listKey = ({ jobId, user, bodyShopId }) => `app:${devKey}:notifications:${jobId}:${user}:${bodyShopId}`;
|
||||
|
||||
addQueue = new Queue("notificationsAdd", {
|
||||
prefix,
|
||||
connection: pubClient,
|
||||
@@ -70,27 +84,39 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
const { jobId, key, variables, recipients, body, jobRoNumber } = job.data;
|
||||
devDebugLogger(`Adding notifications for jobId ${jobId}`);
|
||||
|
||||
const redisKeyPrefix = `app:${devKey}:notifications:${jobId}`;
|
||||
const notification = { key, variables, body, jobRoNumber, timestamp: Date.now() };
|
||||
|
||||
for (const recipient of recipients) {
|
||||
const { user } = recipient;
|
||||
const userKey = `${redisKeyPrefix}:${user}`;
|
||||
const existingNotifications = await pubClient.get(userKey);
|
||||
const notifications = existingNotifications ? JSON.parse(existingNotifications) : [];
|
||||
notifications.push(notification);
|
||||
await pubClient.set(userKey, JSON.stringify(notifications), "EX", NOTIFICATION_STORAGE_EXPIRATION / 1000);
|
||||
devDebugLogger(`Stored notification for ${user} under ${userKey}: ${JSON.stringify(notifications)}`);
|
||||
// Store notifications atomically (RPUSH) and store recipients in a Redis set
|
||||
for (const recipient of recipients || []) {
|
||||
const { user, bodyShopId, associationId } = recipient;
|
||||
if (!user || !bodyShopId) continue;
|
||||
|
||||
const rk = `${user}:${bodyShopId}`;
|
||||
|
||||
// (1) Store notification payload in a list (atomic append)
|
||||
const lk = listKey({ jobId, user, bodyShopId });
|
||||
await pubClient.rpush(lk, JSON.stringify(notification));
|
||||
await pubClient.expire(lk, seconds(NOTIFICATION_STORAGE_EXPIRATION));
|
||||
|
||||
// (2) Track recipients in a set, and associationId in a hash
|
||||
await pubClient.sadd(recipientsSetKey(jobId), rk);
|
||||
await pubClient.expire(recipientsSetKey(jobId), seconds(NOTIFICATION_STORAGE_EXPIRATION));
|
||||
|
||||
if (associationId) {
|
||||
await pubClient.hset(recipientAssocHashKey(jobId), rk, String(associationId));
|
||||
}
|
||||
await pubClient.expire(recipientAssocHashKey(jobId), seconds(NOTIFICATION_STORAGE_EXPIRATION));
|
||||
}
|
||||
|
||||
const consolidateKey = `app:${devKey}:consolidate:${jobId}`;
|
||||
const flagSet = await pubClient.setnx(consolidateKey, "pending");
|
||||
// Schedule consolidation once per jobId
|
||||
const flagKey = consolidateFlagKey(jobId);
|
||||
const flagSet = await pubClient.setnx(flagKey, "pending");
|
||||
devDebugLogger(`Consolidation flag set for jobId ${jobId}: ${flagSet}`);
|
||||
|
||||
if (flagSet) {
|
||||
await consolidateQueue.add(
|
||||
"consolidate-notifications",
|
||||
{ jobId, recipients },
|
||||
{ jobId },
|
||||
{
|
||||
jobId: `consolidate-${jobId}`,
|
||||
delay: APP_CONSOLIDATION_DELAY,
|
||||
@@ -98,8 +124,9 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
backoff: LOCK_EXPIRATION
|
||||
}
|
||||
);
|
||||
|
||||
await pubClient.expire(flagKey, seconds(CONSOLIDATION_FLAG_EXPIRATION));
|
||||
devDebugLogger(`Scheduled consolidation for jobId ${jobId}`);
|
||||
await pubClient.expire(consolidateKey, CONSOLIDATION_FLAG_EXPIRATION / 1000);
|
||||
} else {
|
||||
devDebugLogger(`Consolidation already scheduled for jobId ${jobId}`);
|
||||
}
|
||||
@@ -114,122 +141,163 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
const consolidateWorker = new Worker(
|
||||
"notificationsConsolidate",
|
||||
async (job) => {
|
||||
const { jobId, recipients } = job.data;
|
||||
const { jobId } = job.data;
|
||||
devDebugLogger(`Consolidating notifications for jobId ${jobId}`);
|
||||
|
||||
const redisKeyPrefix = `app:${devKey}:notifications:${jobId}`;
|
||||
const lockKey = `lock:${devKey}:consolidate:${jobId}`;
|
||||
|
||||
const lockAcquired = await pubClient.set(lockKey, "locked", "NX", "EX", LOCK_EXPIRATION / 1000);
|
||||
const lockKey = lockKeyForJob(jobId);
|
||||
const lockAcquired = await pubClient.set(lockKey, "locked", "NX", "EX", seconds(LOCK_EXPIRATION));
|
||||
devDebugLogger(`Lock acquisition for jobId ${jobId}: ${lockAcquired}`);
|
||||
|
||||
if (lockAcquired) {
|
||||
try {
|
||||
const allNotifications = {};
|
||||
const uniqueUsers = [...new Set(recipients.map((r) => r.user))];
|
||||
devDebugLogger(`Unique users for jobId ${jobId}: ${uniqueUsers}`);
|
||||
|
||||
for (const user of uniqueUsers) {
|
||||
const userKey = `${redisKeyPrefix}:${user}`;
|
||||
const notifications = await pubClient.get(userKey);
|
||||
devDebugLogger(`Retrieved notifications for ${user}: ${notifications}`);
|
||||
|
||||
if (notifications) {
|
||||
const parsedNotifications = JSON.parse(notifications);
|
||||
const userRecipients = recipients.filter((r) => r.user === user);
|
||||
for (const { bodyShopId } of userRecipients) {
|
||||
allNotifications[user] = allNotifications[user] || {};
|
||||
allNotifications[user][bodyShopId] = parsedNotifications;
|
||||
}
|
||||
await pubClient.del(userKey);
|
||||
devDebugLogger(`Deleted Redis key ${userKey}`);
|
||||
} else {
|
||||
devDebugLogger(`No notifications found for ${user} under ${userKey}`);
|
||||
}
|
||||
}
|
||||
|
||||
devDebugLogger(`Consolidated notifications: ${JSON.stringify(allNotifications)}`);
|
||||
|
||||
// Insert notifications into the database and collect IDs
|
||||
const notificationInserts = [];
|
||||
const notificationIdMap = new Map();
|
||||
|
||||
for (const [user, bodyShopData] of Object.entries(allNotifications)) {
|
||||
const userRecipients = recipients.filter((r) => r.user === user);
|
||||
const associationId = userRecipients[0]?.associationId;
|
||||
|
||||
for (const [bodyShopId, notifications] of Object.entries(bodyShopData)) {
|
||||
const { scenario_text, fcm_text, scenario_meta } = buildNotificationContent(notifications);
|
||||
notificationInserts.push({
|
||||
jobid: jobId,
|
||||
associationid: associationId,
|
||||
scenario_text: JSON.stringify(scenario_text),
|
||||
fcm_text: fcm_text,
|
||||
scenario_meta: JSON.stringify(scenario_meta)
|
||||
});
|
||||
notificationIdMap.set(`${user}:${bodyShopId}`, null);
|
||||
}
|
||||
}
|
||||
|
||||
if (notificationInserts.length > 0) {
|
||||
const insertResponse = await graphQLClient.request(INSERT_NOTIFICATIONS_MUTATION, {
|
||||
objects: notificationInserts
|
||||
});
|
||||
devDebugLogger(
|
||||
`Inserted ${insertResponse.insert_notifications.affected_rows} notifications for jobId ${jobId}`
|
||||
);
|
||||
|
||||
insertResponse.insert_notifications.returning.forEach((row, index) => {
|
||||
const user = uniqueUsers[Math.floor(index / Object.keys(allNotifications[uniqueUsers[0]]).length)];
|
||||
const bodyShopId = Object.keys(allNotifications[user])[
|
||||
index % Object.keys(allNotifications[user]).length
|
||||
];
|
||||
notificationIdMap.set(`${user}:${bodyShopId}`, row.id);
|
||||
});
|
||||
}
|
||||
|
||||
// Emit notifications to users via Socket.io with notification ID
|
||||
for (const [user, bodyShopData] of Object.entries(allNotifications)) {
|
||||
const userMapping = await redisHelpers.getUserSocketMapping(user);
|
||||
const userRecipients = recipients.filter((r) => r.user === user);
|
||||
const associationId = userRecipients[0]?.associationId;
|
||||
|
||||
for (const [bodyShopId, notifications] of Object.entries(bodyShopData)) {
|
||||
const notificationId = notificationIdMap.get(`${user}:${bodyShopId}`);
|
||||
const jobRoNumber = notifications[0]?.jobRoNumber;
|
||||
|
||||
if (userMapping && userMapping[bodyShopId]?.socketIds) {
|
||||
userMapping[bodyShopId].socketIds.forEach((socketId) => {
|
||||
ioRedis.to(socketId).emit("notification", {
|
||||
jobId,
|
||||
jobRoNumber,
|
||||
bodyShopId,
|
||||
notifications,
|
||||
notificationId,
|
||||
associationId
|
||||
});
|
||||
});
|
||||
devDebugLogger(
|
||||
`Sent ${notifications.length} consolidated notifications to ${user} for jobId ${jobId} with notificationId ${notificationId}`
|
||||
);
|
||||
} else {
|
||||
devDebugLogger(`No socket IDs found for ${user} in bodyShopId ${bodyShopId}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await pubClient.del(`app:${devKey}:consolidate:${jobId}`);
|
||||
} catch (err) {
|
||||
logger.log(`app-queue-consolidation-error`, "ERROR", "notifications", "api", {
|
||||
message: err?.message,
|
||||
stack: err?.stack
|
||||
});
|
||||
throw err;
|
||||
} finally {
|
||||
await pubClient.del(lockKey);
|
||||
}
|
||||
} else {
|
||||
if (!lockAcquired) {
|
||||
devDebugLogger(`Skipped consolidation for jobId ${jobId} - lock held by another worker`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const rkSet = recipientsSetKey(jobId);
|
||||
const assocHash = recipientAssocHashKey(jobId);
|
||||
|
||||
const recipientKeys = await pubClient.smembers(rkSet);
|
||||
if (!recipientKeys?.length) {
|
||||
devDebugLogger(`No recipients found for jobId ${jobId}, nothing to consolidate.`);
|
||||
await pubClient.del(consolidateFlagKey(jobId));
|
||||
return;
|
||||
}
|
||||
|
||||
const assocMap = await pubClient.hgetall(assocHash);
|
||||
|
||||
// Collect notifications by recipientKey
|
||||
const notificationsByRecipient = new Map(); // rk => parsed notifications array
|
||||
|
||||
for (const rk of recipientKeys) {
|
||||
const [user, bodyShopId] = rk.split(":");
|
||||
const lk = listKey({ jobId, user, bodyShopId });
|
||||
|
||||
const items = await pubClient.lrange(lk, 0, -1);
|
||||
if (!items?.length) continue;
|
||||
|
||||
const parsed = items
|
||||
.map((x) => {
|
||||
try {
|
||||
return JSON.parse(x);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
if (parsed.length) {
|
||||
notificationsByRecipient.set(rk, parsed);
|
||||
}
|
||||
|
||||
// Cleanup list key after reading
|
||||
await pubClient.del(lk);
|
||||
}
|
||||
|
||||
if (!notificationsByRecipient.size) {
|
||||
devDebugLogger(`No notifications found in lists for jobId ${jobId}, nothing to insert/emit.`);
|
||||
await pubClient.del(rkSet);
|
||||
await pubClient.del(assocHash);
|
||||
await pubClient.del(consolidateFlagKey(jobId));
|
||||
return;
|
||||
}
|
||||
|
||||
// Build DB inserts
|
||||
const inserts = [];
|
||||
const insertMeta = []; // keep rk + associationId to emit after insert
|
||||
|
||||
for (const [rk, notifications] of notificationsByRecipient.entries()) {
|
||||
const associationId = assocMap?.[rk];
|
||||
|
||||
// If your DB requires associationid NOT NULL, skip if missing
|
||||
if (!associationId) {
|
||||
devDebugLogger(`Skipping insert for ${rk} (missing associationId).`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { scenario_text, fcm_text, scenario_meta } = buildNotificationContent(notifications);
|
||||
|
||||
inserts.push({
|
||||
jobid: jobId,
|
||||
associationid: associationId,
|
||||
// NOTE: if these are jsonb columns, remove JSON.stringify and pass arrays directly.
|
||||
scenario_text: JSON.stringify(scenario_text),
|
||||
fcm_text,
|
||||
scenario_meta: JSON.stringify(scenario_meta)
|
||||
});
|
||||
|
||||
insertMeta.push({ rk, associationId });
|
||||
}
|
||||
|
||||
// Map notificationId by associationId from Hasura returning rows
|
||||
const idByAssociationId = new Map();
|
||||
|
||||
if (inserts.length > 0) {
|
||||
const insertResponse = await graphQLClient.request(INSERT_NOTIFICATIONS_MUTATION, { objects: inserts });
|
||||
|
||||
const returning = insertResponse?.insert_notifications?.returning || [];
|
||||
returning.forEach((row) => {
|
||||
// Expecting your mutation to return associationid as well as id.
|
||||
// If your mutation currently doesn’t return associationid, update it.
|
||||
if (row?.associationid) idByAssociationId.set(String(row.associationid), row.id);
|
||||
});
|
||||
|
||||
devDebugLogger(
|
||||
`Inserted ${insertResponse.insert_notifications.affected_rows} notifications for jobId ${jobId}`
|
||||
);
|
||||
}
|
||||
|
||||
// Emit via Socket.io
|
||||
// Group by user to reduce mapping lookups
|
||||
const uniqueUsers = [...new Set(insertMeta.map(({ rk }) => rk.split(":")[0]))];
|
||||
|
||||
for (const user of uniqueUsers) {
|
||||
const userMapping = await redisHelpers.getUserSocketMapping(user);
|
||||
const entriesForUser = insertMeta
|
||||
.map((m) => ({ ...m, user: m.rk.split(":")[0], bodyShopId: m.rk.split(":")[1] }))
|
||||
.filter((m) => m.user === user);
|
||||
|
||||
for (const entry of entriesForUser) {
|
||||
const { rk, bodyShopId, associationId } = entry;
|
||||
const notifications = notificationsByRecipient.get(rk) || [];
|
||||
if (!notifications.length) continue;
|
||||
|
||||
const jobRoNumber = notifications[0]?.jobRoNumber;
|
||||
const notificationId = idByAssociationId.get(String(associationId)) || null;
|
||||
|
||||
if (userMapping && userMapping[bodyShopId]?.socketIds) {
|
||||
userMapping[bodyShopId].socketIds.forEach((socketId) => {
|
||||
ioRedis.to(socketId).emit("notification", {
|
||||
jobId,
|
||||
jobRoNumber,
|
||||
bodyShopId,
|
||||
notifications,
|
||||
notificationId,
|
||||
associationId
|
||||
});
|
||||
});
|
||||
|
||||
devDebugLogger(
|
||||
`Sent ${notifications.length} consolidated notifications to ${user} for jobId ${jobId} (notificationId ${notificationId})`
|
||||
);
|
||||
} else {
|
||||
devDebugLogger(`No socket IDs found for ${user} in bodyShopId ${bodyShopId}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup recipient tracking keys + consolidation flag
|
||||
await pubClient.del(rkSet);
|
||||
await pubClient.del(assocHash);
|
||||
await pubClient.del(consolidateFlagKey(jobId));
|
||||
} catch (err) {
|
||||
logger.log("app-queue-consolidation-error", "ERROR", "notifications", "api", {
|
||||
message: err?.message,
|
||||
stack: err?.stack
|
||||
});
|
||||
throw err;
|
||||
} finally {
|
||||
await pubClient.del(lockKey);
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -244,13 +312,14 @@ const loadAppQueue = async ({ pubClient, logger, redisHelpers, ioRedis }) => {
|
||||
consolidateWorker.on("completed", (job) => devDebugLogger(`Consolidate job ${job.id} completed`));
|
||||
|
||||
addWorker.on("failed", (job, err) =>
|
||||
logger.log(`app-queue-notification-error`, "ERROR", "notifications", "api", {
|
||||
logger.log("app-queue-notification-error", "ERROR", "notifications", "api", {
|
||||
message: err?.message,
|
||||
stack: err?.stack
|
||||
})
|
||||
);
|
||||
|
||||
consolidateWorker.on("failed", (job, err) =>
|
||||
logger.log(`app-queue-consolidation-failed:`, "ERROR", "notifications", "api", {
|
||||
logger.log("app-queue-consolidation-failed", "ERROR", "notifications", "api", {
|
||||
message: err?.message,
|
||||
stack: err?.stack
|
||||
})
|
||||
@@ -285,11 +354,13 @@ const dispatchAppsToQueue = async ({ appsToDispatch }) => {
|
||||
|
||||
for (const app of appsToDispatch) {
|
||||
const { jobId, bodyShopId, key, variables, recipients, body, jobRoNumber } = app;
|
||||
|
||||
await appQueue.add(
|
||||
"add-notification",
|
||||
{ jobId, bodyShopId, key, variables, recipients, body, jobRoNumber },
|
||||
{ jobId: `${jobId}-${Date.now()}` }
|
||||
);
|
||||
|
||||
devDebugLogger(`Added notification to queue for jobId ${jobId} with ${recipients.length} recipients`);
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user