feature/IO-3492-FCM-Queue-For-Notifications: Finalize

This commit is contained in:
Dave
2026-01-05 18:42:40 -05:00
parent 4cdc15f70b
commit 00bf5977ae
5 changed files with 170 additions and 35 deletions

View File

@@ -7,7 +7,7 @@ const getBullMQPrefix = require("../../utils/getBullMQPrefix");
const devDebugLogger = require("../../utils/devDebugLogger");
const { client: gqlClient } = require("../../graphql-client/graphql-client");
const { GET_USERS_FCM_TOKENS_BY_EMAILS } = require("../../graphql-client/queries");
const { GET_USERS_FCM_TOKENS_BY_EMAILS, UPDATE_USER_FCM_TOKENS_BY_EMAIL } = require("../../graphql-client/queries");
const FCM_CONSOLIDATION_DELAY_IN_MINS = (() => {
const envValue = process.env?.FCM_CONSOLIDATION_DELAY_IN_MINS;
@@ -19,7 +19,13 @@ const FCM_CONSOLIDATION_DELAY = FCM_CONSOLIDATION_DELAY_IN_MINS * 60000;
// pegged constants (pattern matches your other queues)
const CONSOLIDATION_KEY_EXPIRATION = FCM_CONSOLIDATION_DELAY * 1.5;
const LOCK_EXPIRATION = FCM_CONSOLIDATION_DELAY * 0.25;
// IMPORTANT: lock must outlive a full consolidation run to avoid duplicate sends.
const LOCK_EXPIRATION = FCM_CONSOLIDATION_DELAY * 1.5;
// Keep Bull backoff separate from lock TTL to avoid unexpected long retries.
const BACKOFF_DELAY = Math.max(1000, Math.floor(FCM_CONSOLIDATION_DELAY * 0.25));
const RATE_LIMITER_DURATION = FCM_CONSOLIDATION_DELAY * 0.1;
const NOTIFICATION_EXPIRATION = FCM_CONSOLIDATION_DELAY * 1.5;
@@ -137,6 +143,47 @@ const normalizeTokens = (fcmtokens) => {
return [];
};
/**
* Remove specified tokens from the stored fcmtokens jsonb while preserving the original shape.
* @param fcmtokens
* @param tokensToRemove
* @returns {*}
*/
const removeTokensFromFcmtokens = (fcmtokens, tokensToRemove) => {
const remove = new Set((tokensToRemove || []).map((t) => String(t).trim()).filter(Boolean));
if (!remove.size) return fcmtokens;
if (!fcmtokens) return fcmtokens;
if (typeof fcmtokens === "string") {
const s = fcmtokens.trim();
return remove.has(s) ? null : fcmtokens;
}
if (Array.isArray(fcmtokens)) {
const next = fcmtokens.filter((t) => !remove.has(String(t).trim()));
return next.length ? next : [];
}
if (typeof fcmtokens === "object") {
const next = {};
for (const [k, v] of Object.entries(fcmtokens)) {
const keyIsToken = isExpoPushToken(k) && remove.has(k);
let valueToken = null;
if (typeof v === "string") valueToken = v;
else if (v && typeof v === "object") valueToken = v.pushTokenString || v.token || v.expoPushToken || null;
const valueIsToken = valueToken && remove.has(String(valueToken).trim());
if (keyIsToken || valueIsToken) continue;
next[k] = v;
}
return Object.keys(next).length ? next : {};
}
return fcmtokens;
};
/**
* Safely parse JSON response.
* @param res
@@ -151,12 +198,18 @@ const safeJson = async (res) => {
};
/**
* Send Expo push notifications
* Send Expo push notifications.
* Returns invalid tokens that should be removed (e.g., DeviceNotRegistered).
*
* @param {Array<Object>} messages Expo messages array
* @param {Object} logger
* @returns {Promise<{invalidTokens: string[], ticketIds: string[]}>}
*/
const sendExpoPush = async ({ messages, logger }) => {
if (!messages?.length) return;
if (!messages?.length) return { invalidTokens: [], ticketIds: [] };
const invalidTokens = new Set();
const ticketIds = [];
for (const batch of chunk(messages, EXPO_MAX_MESSAGES_PER_REQUEST)) {
const res = await fetch(EXPO_PUSH_ENDPOINT, {
@@ -179,15 +232,43 @@ const sendExpoPush = async ({ messages, logger }) => {
throw new Error(`Expo push HTTP error: ${res.status} ${res.statusText}`);
}
const data = payload?.data;
if (Array.isArray(data)) {
const errors = data.filter((t) => t?.status === "error");
if (errors.length) {
logger?.log?.("expo-push-ticket-errors", "ERROR", "notifications", "api", { errors });
const tickets = Array.isArray(payload?.data) ? payload.data : payload?.data ? [payload.data] : [];
if (!tickets.length) {
logger?.log?.("expo-push-bad-response", "ERROR", "notifications", "api", { payload });
continue;
}
// Expo returns tickets in the same order as messages in the request batch
for (let i = 0; i < tickets.length; i++) {
const t = tickets[i];
const msg = batch[i];
const token = typeof msg?.to === "string" ? msg.to : null;
if (t?.status === "ok" && t?.id) ticketIds.push(String(t.id));
if (t?.status === "error") {
const errCode = t?.details?.error;
const msgText = String(t?.message || "");
const shouldDelete =
errCode === "DeviceNotRegistered" || /not a registered push notification recipient/i.test(msgText);
if (shouldDelete && token && isExpoPushToken(token)) {
invalidTokens.add(token);
}
logger?.log?.("expo-push-ticket-error", "ERROR", "notifications", "api", {
token,
ticket: t
});
}
}
}
return { invalidTokens: [...invalidTokens], ticketIds };
};
/**
* Build a summary string for push notification body.
* @param count
@@ -239,10 +320,10 @@ const loadFcmQueue = async ({ pubClient, logger }) => {
const metaKey = `fcm:${devKey}:meta:${jobId}`;
const redisKeyPrefix = `fcm:${devKey}:notifications:${jobId}`; // per-user list keys
// store job-level metadata once
await pubClient.hsetnx(metaKey, "jobRoNumber", jobRoNumber || "");
await pubClient.hsetnx(metaKey, "bodyShopId", bodyShopId || "");
await pubClient.hsetnx(metaKey, "bodyShopName", bodyShopName || "");
// Store job-level metadata (always keep latest values)
await pubClient.hset(metaKey, "jobRoNumber", jobRoNumber || "");
await pubClient.hset(metaKey, "bodyShopId", bodyShopId || "");
await pubClient.hset(metaKey, "bodyShopName", bodyShopName || "");
await pubClient.expire(metaKey, seconds(NOTIFICATION_EXPIRATION));
for (const r of recipients || []) {
@@ -279,7 +360,7 @@ const loadFcmQueue = async ({ pubClient, logger }) => {
jobId: `consolidate-${jobId}`,
delay: FCM_CONSOLIDATION_DELAY,
attempts: 3,
backoff: LOCK_EXPIRATION
backoff: BACKOFF_DELAY
}
);
@@ -325,8 +406,13 @@ const loadFcmQueue = async ({ pubClient, logger }) => {
// Fetch tokens for all recipients (1 DB round-trip)
const usersResp = await gqlClient.request(GET_USERS_FCM_TOKENS_BY_EMAILS, { emails: userEmails });
// Map: email -> { raw, tokens }
const tokenMap = new Map(
(usersResp?.users || []).map((u) => [String(u.email), normalizeTokens(u.fcmtokens)])
(usersResp?.users || []).map((u) => [
String(u.email),
{ raw: u.fcmtokens, tokens: normalizeTokens(u.fcmtokens) }
])
);
for (const userEmail of userEmails) {
@@ -352,12 +438,12 @@ const loadFcmQueue = async ({ pubClient, logger }) => {
const notificationBody = buildPushSummary({ count, jobRoNumber, bodyShopName });
// associationId should be stable for a user in a jobs bodyshop; take first non-null
const firstWithAssociation = parsed.find((p) => p?.associationId != null);
const associationId =
parsed.find((p) => p?.associationId)?.associationId != null
? String(parsed.find((p) => p?.associationId)?.associationId)
: "";
firstWithAssociation?.associationId != null ? String(firstWithAssociation.associationId) : "";
const tokens = tokenMap.get(String(userEmail)) || [];
const tokenInfo = tokenMap.get(String(userEmail)) || { raw: null, tokens: [] };
const tokens = tokenInfo.tokens || [];
if (!tokens.length) {
devDebugLogger(`No Expo push tokens for ${userEmail}; skipping push for jobId ${jobId}`);
@@ -383,7 +469,28 @@ const loadFcmQueue = async ({ pubClient, logger }) => {
}
}));
await sendExpoPush({ messages, logger });
const { invalidTokens } = await sendExpoPush({ messages, logger });
// Opportunistic cleanup: remove invalid tokens from users.fcmtokens
if (invalidTokens?.length) {
try {
const nextFcmtokens = removeTokensFromFcmtokens(tokenInfo.raw, invalidTokens);
await gqlClient.request(UPDATE_USER_FCM_TOKENS_BY_EMAIL, {
email: String(userEmail),
fcmtokens: nextFcmtokens
});
devDebugLogger(`Cleaned ${invalidTokens.length} invalid Expo tokens for ${userEmail}`);
} catch (e) {
logger?.log?.("expo-push-token-cleanup-failed", "ERROR", "notifications", "api", {
userEmail: String(userEmail),
message: e?.message,
stack: e?.stack
});
// Do not throw: cleanup failure should not retry the whole consolidation and risk duplicate pushes.
}
}
devDebugLogger(`Sent Expo push to ${userEmail} for jobId ${jobId} (${count} updates)`);