diff --git a/docker-compose.yml b/docker-compose.yml index 503d874b3..50dc56932 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -167,6 +167,27 @@ services: # volumes: # - redis-insight-data:/db +# ##Optional Container for SFTP/SSH Server for testing +# ssh-sftp-server: +# image: atmoz/sftp:alpine # Using an image with SFTP support +# container_name: ssh-sftp-server +# hostname: ssh-sftp-server +# networks: +# - redis-cluster-net +# ports: +# - "2222:22" # Expose port 22 for SSH/SFTP (mapped to 2222 on the host) +# volumes: +# - ./certs/id_rsa.pub:/home/user/.ssh/keys/id_rsa.pub:ro # Mount the SSH public key +# - ./upload:/home/user/upload # Mount a local directory for SFTP uploads +# environment: +# - SFTP_USERS=user:password:1001:100:upload +# command: > +# /bin/sh -c " +# echo 'Match User user' >> /etc/ssh/sshd_config && +# sed -i -e 's#ForceCommand internal-sftp#ForceCommand internal-sftp -d /upload#' /etc/ssh/sshd_config && +# /usr/sbin/sshd -D +# " + networks: redis-cluster-net: driver: bridge diff --git a/server/data/chatter.js b/server/data/chatter.js index 5c9ce9ddc..b879bff59 100644 --- a/server/data/chatter.js +++ b/server/data/chatter.js @@ -22,135 +22,128 @@ const ftpSetup = { serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"] } }; + +const allcsvsToUpload = []; +const allErrors = []; + exports.default = async (req, res) => { // Only process if in production environment. if (process.env.NODE_ENV !== "production") { res.sendStatus(403); return; } - + // Only process if the appropriate token is provided. if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) { res.sendStatus(401); return; } - //Query for the List of Bodyshop Clients. - logger.log("chatter-start", "DEBUG", "api", null, null); - const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS); - const specificShopIds = req.body.bodyshopIds; // ['uuid] - const { start, end, skipUpload } = req.body; //YYYY-MM-DD - - const allcsvsToUpload = []; - const allErrors = []; try { - for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) { - logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, { - shopname: bodyshop.shopname - }); - try { - const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, { - bodyshopid: bodyshop.id, - start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"), - ...(end && { end: moment(end).endOf("day") }) - }); + //Query for the List of Bodyshop Clients. + logger.log("chatter-start", "DEBUG", "api", null, null); + const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS); + const specificShopIds = req.body.bodyshopIds; // ['uuid]; - const chatterObject = jobs.map((j) => { - return { - poc_trigger_code: bodyshops_by_pk.chatterid, - firstname: j.ownr_co_nm ? null : j.ownr_fn, - lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln, - transaction_id: j.ro_number, - email: j.ownr_ea, - phone_number: j.ownr_ph1 - }; - }); + const { start, end, skipUpload } = req.body; //YYYY-MM-DD - const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" }); + const batchSize = 10; - allcsvsToUpload.push({ - count: chatterObject.length, - csv: ret, - filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv` - }); + const shopsToProcess = + specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops; + logger.log("chatter-shopsToProcess-generated", "DEBUG", "api", null, null); - logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, { - shopname: bodyshop.shopname - }); - } catch (error) { - //Error at the shop level. - logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, { - ...error - }); - - allErrors.push({ - bodyshopid: bodyshop.id, - imexshopid: bodyshop.imexshopid, - shopname: bodyshop.shopname, - fatal: true, - errors: [error.toString()] - }); - } finally { - allErrors.push({ - bodyshopid: bodyshop.id, - imexshopid: bodyshop.imexshopid, - shopname: bodyshop.shopname - }); - } - } - - if (skipUpload) { - for (const csvObj of allcsvsToUpload) { - fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv); - } - - sendServerEmail({ - subject: `Chatter Report ${moment().format("MM-DD-YY")}`, - text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))} - Uploaded: ${JSON.stringify( - allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })), - null, - 2 - )} - ` - }); - res.json(allcsvsToUpload); + if (shopsToProcess.length === 0) { + logger.log("chatter-shopsToProcess-empty", "DEBUG", "api", null, null); + res.sendStatus(200); return; } - const sftp = new Client(); - sftp.on("error", (errors) => logger.log("chatter-sftp-error", "ERROR", "api", null, { ...errors })); - try { - //Get the private key from AWS Secrets Manager. - ftpSetup.privateKey = await getPrivateKey(); + for (let i = 0; i < shopsToProcess.length; i += batchSize) { + const batch = shopsToProcess.slice(i, i + batchSize); + await processBatch(batch, start, end); - //Connect to the FTP and upload all. - await sftp.connect(ftpSetup); - - for (const csvObj of allcsvsToUpload) { - logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename }); - - const uploadResult = await sftp.put(Buffer.from(csvObj.xml), `/${csvObj.filename}`); - logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, { uploadResult }); + if (skipUpload) { + for (const csvObj of allcsvsToUpload) { + fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv); + } + } else { + await uploadViaSFTP(allcsvsToUpload); } - } catch (error) { - logger.log("chatter-sftp-error", "ERROR", "api", null, { ...error }); - } finally { - sftp.end(); + sendServerEmail({ subject: `Chatter Report ${moment().format("MM-DD-YY")}`, text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))} Uploaded: ${JSON.stringify( - allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })), + allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })), null, 2 )}` }); + + logger.log("chatter-end", "DEBUG", "api", null, null); res.sendStatus(200); } } catch (error) { - res.status(200).json(error); + logger.log("chatter-shopsToProcess-error", "ERROR", "api", null, { error: error.message, stack: error.stack }); + res.status(500).json({ error: error.message, stack: error.stack }); } }; +async function processBatch(batch, start, end) { + for (const bodyshop of batch) { + try { + logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, { + shopname: bodyshop.shopname + }); + + const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, { + bodyshopid: bodyshop.id, + start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"), + ...(end && { end: moment(end).endOf("day") }) + }); + + const chatterObject = jobs.map((j) => { + return { + poc_trigger_code: bodyshops_by_pk.chatterid, + firstname: j.ownr_co_nm ? null : j.ownr_fn, + lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln, + transaction_id: j.ro_number, + email: j.ownr_ea, + phone_number: j.ownr_ph1 + }; + }); + + const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" }); + + allcsvsToUpload.push({ + count: chatterObject.length, + csv: ret, + filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv` + }); + + logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, { + shopname: bodyshop.shopname + }); + } catch (error) { + //Error at the shop level. + logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack }); + + allErrors.push({ + bodyshopid: bodyshop.id, + imexshopid: bodyshop.imexshopid, + shopname: bodyshop.shopname, + fatal: true, + errors: [error.toString()] + }); + } finally { + allErrors.push({ + bodyshopid: bodyshop.id, + imexshopid: bodyshop.imexshopid, + shopname: bodyshop.shopname + }); + } + } +} + async function getPrivateKey() { // Connect to AWS Secrets Manager const client = new SecretsManagerClient({ region: "ca-central-1" }); @@ -160,10 +153,49 @@ async function getPrivateKey() { try { const { SecretString, SecretBinary } = await client.send(command); if (SecretString || SecretBinary) logger.log("chatter-retrieved-private-key", "DEBUG", "api", null, null); - const chatterPrivateKey = SecretString ? SecretString : Buffer.from(SecretBinary, "base64").toString("ascii"); + const chatterPrivateKey = SecretString + ? SecretString + : Buffer.from(SecretBinary, "base64").toString("ascii"); return chatterPrivateKey; } catch (error) { - logger.log("chatter-get-private-key", "ERROR", "api", null, error); - throw err; + logger.log("chatter-get-private-key", "ERROR", "api", null, { error: error.message, stack: error.stack }); + throw error; + } +} + +async function uploadViaSFTP(allcsvsToUpload) { + const sftp = new Client(); + sftp.on("error", (errors) => + logger.log("chatter-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack }) + ); + try { + //Get the private key from AWS Secrets Manager. + const privateKey = await getPrivateKey(); + + //Connect to the FTP and upload all. + await sftp.connect({ ...ftpSetup, privateKey }); + + for (const csvObj of allcsvsToUpload) { + try { + logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename }); + csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`); + logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, { + filename: csvObj.filename, + result: csvObj.result + }); + } catch (error) { + logger.log("chatter-sftp-upload-error", "ERROR", "api", null, { + filename: csvObj.filename, + error: error.message, + stack: error.stack + }); + throw error; + } + } + } catch (error) { + logger.log("chatter-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack }); + throw error; + } finally { + sftp.end(); } } diff --git a/upload/.gitignore b/upload/.gitignore new file mode 100644 index 000000000..d6b7ef32c --- /dev/null +++ b/upload/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore