From f55764e859f7f96a78d4ad2a9a479d0f1a3b18a7 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Wed, 9 Apr 2025 14:56:49 -0400 Subject: [PATCH 01/12] feature/IO-2282-VSSTA-Integration: - Boilerplate in new route - Fix issues with imgproxy - Clean up imgproxy --- ...nt-imgproxy-gallery.download.component.jsx | 10 +- ...s-documents-imgproxy-gallery.component.jsx | 2 +- server.js | 1 + server/accounting/pbs/pbs-ap-allocations.js | 2 +- server/accounting/pbs/pbs-job-export.js | 4 +- server/graphql-client/graphql-client.js | 14 ++- server/integrations/VSSTA/vsstaIntegration.js | 36 ++++++ server/integrations/VSSTA/vsstaMiddleware.js | 5 + server/media/imgproxy-media.js | 114 ++++++++++-------- server/media/media.js | 25 ++-- server/media/util/base64UrlEncode.js | 4 + server/media/util/createHmacSha256.js | 7 ++ server/routes/intergrationRoutes.js | 8 ++ server/routes/jobRoutes.js | 1 - 14 files changed, 152 insertions(+), 81 deletions(-) create mode 100644 server/integrations/VSSTA/vsstaIntegration.js create mode 100644 server/integrations/VSSTA/vsstaMiddleware.js create mode 100644 server/media/util/base64UrlEncode.js create mode 100644 server/media/util/createHmacSha256.js create mode 100644 server/routes/intergrationRoutes.js diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx index 6c08936dc..8644115fd 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx @@ -1,12 +1,10 @@ import { Button, Space } from "antd"; import axios from "axios"; -import React, { useState } from "react"; +import { useState } from "react"; import { useTranslation } from "react-i18next"; import { logImEXEvent } from "../../firebase/firebase.utils"; import cleanAxios from "../../utils/CleanAxios"; import formatBytes from "../../utils/formatbytes"; -//import yauzl from "yauzl"; - import { connect } from "react-redux"; import { createStructuredSelector } from "reselect"; import { selectBodyshop } from "../../redux/user/user.selectors"; @@ -28,7 +26,7 @@ const mapDispatchToProps = (dispatch) => ({ export default connect(mapStateToProps, mapDispatchToProps)(JobsDocumentsImgproxyDownloadButton); -export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, identifier }) { +export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, identifier, jobId }) { const { t } = useTranslation(); const [download, setDownload] = useState(null); const [loading, setLoading] = useState(false); @@ -46,6 +44,7 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i }; }); } + function standardMediaDownload(bufferData) { const a = document.createElement("a"); const url = window.URL.createObjectURL(new Blob([bufferData])); @@ -53,13 +52,14 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i a.download = `${identifier || "documents"}.zip`; a.click(); } + const handleDownload = async () => { logImEXEvent("jobs_documents_download"); setLoading(true); const zipUrl = await axios({ url: "/media/imgproxy/download", method: "POST", - data: { documentids: imagesToDownload.map((_) => _.id) } + data: { jobId, documentids: imagesToDownload.map((_) => _.id) } }); const theDownloadedZip = await cleanAxios({ diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx index a07ed0bf1..f99485dc8 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx @@ -75,7 +75,7 @@ function JobsDocumentsImgproxyComponent({ - + { app.use("/cdk", require("./server/routes/cdkRoutes")); app.use("/csi", require("./server/routes/csiRoutes")); app.use("/payroll", require("./server/routes/payrollRoutes")); + app.use("/integrations", require("./server/routes/intergrationRoutes")); // Default route for forbidden access app.get("/", (req, res) => { diff --git a/server/accounting/pbs/pbs-ap-allocations.js b/server/accounting/pbs/pbs-ap-allocations.js index 9574b166d..62bd84270 100644 --- a/server/accounting/pbs/pbs-ap-allocations.js +++ b/server/accounting/pbs/pbs-ap-allocations.js @@ -217,7 +217,7 @@ exports.PbsExportAp = async function (socket, { billids, txEnvelope }) { socket.emit("ap-export-success", billid); } else { - CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`); + CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`); socket.emit("ap-export-failure", { billid, error: AccountPostingChange.Message diff --git a/server/accounting/pbs/pbs-job-export.js b/server/accounting/pbs/pbs-job-export.js index c38560293..e3dc20dcf 100644 --- a/server/accounting/pbs/pbs-job-export.js +++ b/server/accounting/pbs/pbs-job-export.js @@ -105,14 +105,14 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte socket.emit("export-success", socket.JobData.id); } else { - CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`); + CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`); } } catch (error) { CdkBase.createLogEvent(socket, "ERROR", `Error encountered in CdkSelectedCustomer. ${error}`); await InsertFailedExportLog(socket, error); } }; - +// Was Successful async function CheckForErrors(socket, response) { if (response.WasSuccessful === undefined || response.WasSuccessful === true) { CdkBase.createLogEvent(socket, "DEBUG", `Successful response from DMS. ${response.Message || ""}`); diff --git a/server/graphql-client/graphql-client.js b/server/graphql-client/graphql-client.js index 069386b73..79d86315b 100644 --- a/server/graphql-client/graphql-client.js +++ b/server/graphql-client/graphql-client.js @@ -1,17 +1,19 @@ const GraphQLClient = require("graphql-request").GraphQLClient; -const path = require("path"); -require("dotenv").config({ - path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) -}); + //New bug introduced with Graphql Request. // https://github.com/prisma-labs/graphql-request/issues/206 // const { Headers } = require("cross-fetch"); // global.Headers = global.Headers || Headers; -exports.client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { +const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { headers: { "x-hasura-admin-secret": process.env.HASURA_ADMIN_SECRET } }); -exports.unauthclient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT); +const unauthorizedClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT); + +module.exports = { + client, + unauthorizedClient +}; diff --git a/server/integrations/VSSTA/vsstaIntegration.js b/server/integrations/VSSTA/vsstaIntegration.js new file mode 100644 index 000000000..30f7531a9 --- /dev/null +++ b/server/integrations/VSSTA/vsstaIntegration.js @@ -0,0 +1,36 @@ +const client = require("../../graphql-client/graphql-client").client; + +/** + * VSSTA Integration + * @param req + * @param res + * @returns {Promise} + */ +const vsstaIntegration = async (req, res) => { + const { logger } = req; + + // Examplwe req.body + //{ + // "shop_id":"test", + // "“ro_nbr“":"71475", + // "vin_nbr":"12345678912345678", + // "pdf_download_link":"https://portal-staging.vssta.com/invoice_data/1500564", + // "“company_api_key“":"xxxxx", + // "scan_type":"PRE", + // "scan_fee":"119.00", + // "scanner_number":"1234", + // "scan_time":"2022-08-23 17:53:50", + // "technician":"Frank Jones", + // "year":"2021", + // "make":"TOYOTA", + // "model":"Tacoma SR5 grade" + // + // } + // 1 - We would want to get the Job by searching the ro_nbr and shop_id (The assumption) + + // 2 - We want to download the file provided from the pdf_download_link and associate (upload) it + // to S3 bucket for media, and insert a document record in the database, the file is base64 encoded (pdf), we will want to unencode it when storing it as a pdf + // We might not have to un-encode it, ultimately we want to send the base64 and the end is a pdf file the user can view from the documents section. +}; + +module.exports = vsstaIntegration; diff --git a/server/integrations/VSSTA/vsstaMiddleware.js b/server/integrations/VSSTA/vsstaMiddleware.js new file mode 100644 index 000000000..800f9bfa2 --- /dev/null +++ b/server/integrations/VSSTA/vsstaMiddleware.js @@ -0,0 +1,5 @@ +const vsstaMiddleware = (req, res, next) => { + next(); +}; + +module.exports = vsstaMiddleware; diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index fdb313984..d26b572ce 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -1,8 +1,12 @@ const path = require("path"); -require("dotenv").config({ - path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) -}); const logger = require("../utils/logger"); +const { Upload } = require("@aws-sdk/lib-storage"); +const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); +const { InstanceRegion } = require("../utils/instanceMgr"); +const archiver = require("archiver"); +const stream = require("node:stream"); +const base64UrlEncode = require("./util/base64UrlEncode"); +const createHmacSha256 = require("./util/createHmacSha256"); const { S3Client, PutObjectCommand, @@ -10,35 +14,31 @@ const { CopyObjectCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3"); -const { Upload } = require("@aws-sdk/lib-storage"); - -const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); -const crypto = require("crypto"); -const { InstanceRegion } = require("../utils/instanceMgr"); const { GET_DOCUMENTS_BY_JOB, QUERY_TEMPORARY_DOCS, GET_DOCUMENTS_BY_IDS, DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries"); -const archiver = require("archiver"); -const stream = require("node:stream"); const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN. -const imgproxyKey = process.env.IMGPROXY_KEY; const imgproxySalt = process.env.IMGPROXY_SALT; const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET; //Generate a signed upload link for the S3 bucket. //All uploads must be going to the same shop and jobid. -exports.generateSignedUploadUrls = async (req, res) => { +const generateSignedUploadUrls = async (req, res) => { const { filenames, bodyshopid, jobid } = req.body; try { - logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid }); + logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { + filenames, + bodyshopid, + jobid + }); const signedUrls = []; for (const filename of filenames) { - const key = filename; + const key = filename; const client = new S3Client({ region: InstanceRegion() }); const command = new PutObjectCommand({ Bucket: imgproxyDestinationBucket, @@ -67,7 +67,7 @@ exports.generateSignedUploadUrls = async (req, res) => { } }; -exports.getThumbnailUrls = async (req, res) => { +const getThumbnailUrls = async (req, res) => { const { jobid, billid } = req.body; try { @@ -86,10 +86,11 @@ exports.getThumbnailUrls = async (req, res) => { for (const document of data.documents) { //Format to follow: - /////< base 64 URL encoded to image path> - + /////< base 64 URL encoded to image path> //When working with documents from Cloudinary, the URL does not include the extension. + let key; + if (/\.[^/.]+$/.test(document.key)) { key = document.key; } else { @@ -98,12 +99,12 @@ exports.getThumbnailUrls = async (req, res) => { // Build the S3 path to the object. const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`; const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path); + //Thumbnail Generation Block const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`; const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`); //Full Size URL block - const fullSizeProxyPath = `${base64UrlEncodedKeyString}`; const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`); @@ -114,8 +115,8 @@ exports.getThumbnailUrls = async (req, res) => { Bucket: imgproxyDestinationBucket, Key: key }); - const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 }); - s3Props.presignedGetUrl = presignedGetUrl; + + s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 }); const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`; const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`); @@ -146,40 +147,46 @@ exports.getThumbnailUrls = async (req, res) => { } }; -exports.getBillFiles = async (req, res) => { - //Givena bill ID, get the documents associated to it. -}; - -exports.downloadFiles = async (req, res) => { +const downloadFiles = async (req, res) => { //Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk - const { jobid, billid, documentids } = req.body; + const { jobId, billid, documentids } = req.body; + try { - logger.log("imgproxy-download", "DEBUG", req.user?.email, jobid, { billid, jobid, documentids }); + logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids }); //Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components. const client = req.userGraphQLClient; + //Query for the keys of the document IDs const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids }); - //Using the Keys, get all of the S3 links, zip them, and send back to the client. + + //Using the Keys, get all the S3 links, zip them, and send back to the client. const s3client = new S3Client({ region: InstanceRegion() }); const archiveStream = archiver("zip"); + archiveStream.on("error", (error) => { console.error("Archival encountered an error:", error); throw new Error(error); }); + const passthrough = new stream.PassThrough(); archiveStream.pipe(passthrough); + for (const key of data.documents.map((d) => d.key)) { - const response = await s3client.send(new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key })); - // :: `response.Body` is a Buffer - console.log(path.basename(key)); + const response = await s3client.send( + new GetObjectCommand({ + Bucket: imgproxyDestinationBucket, + Key: key + }) + ); + archiveStream.append(response.Body, { name: path.basename(key) }); } - archiveStream.finalize(); + await archiveStream.finalize(); - const archiveKey = `archives/${jobid}/archive-${new Date().toISOString()}.zip`; + const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`; const parallelUploads3 = new Upload({ client: s3client, @@ -192,7 +199,7 @@ exports.downloadFiles = async (req, res) => { console.log(progress); }); - const uploadResult = await parallelUploads3.done(); + await parallelUploads3.done(); //Generate the presigned URL to download it. const presignedUrl = await getSignedUrl( s3client, @@ -203,8 +210,8 @@ exports.downloadFiles = async (req, res) => { res.json({ success: true, url: presignedUrl }); //Iterate over them, build the link based on the media type, and return the array. } catch (error) { - logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, { - jobid, + logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, { + jobId, billid, message: error.message, stack: error.stack @@ -213,7 +220,7 @@ exports.downloadFiles = async (req, res) => { } }; -exports.deleteFiles = async (req, res) => { +const deleteFiles = async (req, res) => { //Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future. //Mark as deleted from the documents section of the database. const { ids } = req.body; @@ -232,7 +239,7 @@ exports.deleteFiles = async (req, res) => { (async () => { try { // Delete the original object - const deleteResult = await s3client.send( + await s3client.send( new DeleteObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.key @@ -250,7 +257,7 @@ exports.deleteFiles = async (req, res) => { const result = await Promise.all(deleteTransactions); const errors = result.filter((d) => d.error); - //Delete only the succesful deletes. + //Delete only the successful deletes. const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, { ids: result.filter((t) => !t.error).map((d) => d.id) }); @@ -266,7 +273,7 @@ exports.deleteFiles = async (req, res) => { } }; -exports.moveFiles = async (req, res) => { +const moveFiles = async (req, res) => { const { documents, tojobid } = req.body; try { logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid }); @@ -278,7 +285,7 @@ exports.moveFiles = async (req, res) => { (async () => { try { // Copy the object to the new key - const copyresult = await s3client.send( + await s3client.send( new CopyObjectCommand({ Bucket: imgproxyDestinationBucket, CopySource: `${imgproxyDestinationBucket}/${document.from}`, @@ -288,7 +295,7 @@ exports.moveFiles = async (req, res) => { ); // Delete the original object - const deleteResult = await s3client.send( + await s3client.send( new DeleteObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.from @@ -297,7 +304,12 @@ exports.moveFiles = async (req, res) => { return document; } catch (error) { - return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket }; + return { + id: document.id, + from: document.from, + error: error, + bucket: imgproxyDestinationBucket + }; } })() ); @@ -307,6 +319,7 @@ exports.moveFiles = async (req, res) => { const errors = result.filter((d) => d.error); let mutations = ""; + result .filter((d) => !d.error) .forEach((d, idx) => { @@ -327,7 +340,7 @@ exports.moveFiles = async (req, res) => { }`); res.json({ errors, mutationResult }); } else { - res.json({ errors: "No images were succesfully moved on remote server. " }); + res.json({ errors: "No images were successfully moved on remote server. " }); } } catch (error) { logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, { @@ -340,9 +353,10 @@ exports.moveFiles = async (req, res) => { } }; -function base64UrlEncode(str) { - return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); -} -function createHmacSha256(data) { - return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url"); -} +module.exports = { + generateSignedUploadUrls, + getThumbnailUrls, + downloadFiles, + deleteFiles, + moveFiles +}; diff --git a/server/media/media.js b/server/media/media.js index 06b1c9bb8..af9628c8a 100644 --- a/server/media/media.js +++ b/server/media/media.js @@ -1,14 +1,9 @@ -const path = require("path"); const _ = require("lodash"); const logger = require("../utils/logger"); const client = require("../graphql-client/graphql-client").client; const queries = require("../graphql-client/queries"); -require("dotenv").config({ - path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) -}); - -var cloudinary = require("cloudinary").v2; +const cloudinary = require("cloudinary").v2; cloudinary.config(process.env.CLOUDINARY_URL); const createSignedUploadURL = (req, res) => { @@ -16,8 +11,6 @@ const createSignedUploadURL = (req, res) => { res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET)); }; -exports.createSignedUploadURL = createSignedUploadURL; - const downloadFiles = (req, res) => { const { ids } = req.body; logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null); @@ -28,7 +21,6 @@ const downloadFiles = (req, res) => { }); res.send(url); }; -exports.downloadFiles = downloadFiles; const deleteFiles = async (req, res) => { const { ids } = req.body; @@ -91,8 +83,6 @@ const deleteFiles = async (req, res) => { } }; -exports.deleteFiles = deleteFiles; - const renameKeys = async (req, res) => { const { documents, tojobid } = req.body; logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents); @@ -102,13 +92,12 @@ const renameKeys = async (req, res) => { proms.push( (async () => { try { - const res = { + return { id: d.id, ...(await cloudinary.uploader.rename(d.from, d.to, { resource_type: DetermineFileType(d.type) })) }; - return res; } catch (error) { return { id: d.id, from: d.from, error: error }; } @@ -148,10 +137,9 @@ const renameKeys = async (req, res) => { }`); res.json({ errors, mutationResult }); } else { - res.json({ errors: "No images were succesfully moved on remote server. " }); + res.json({ errors: "No images were successfully moved on remote server. " }); } }; -exports.renameKeys = renameKeys; //Also needs to be updated in upload utility and mobile app. function DetermineFileType(filetype) { @@ -163,3 +151,10 @@ function DetermineFileType(filetype) { return "auto"; } + +module.exports = { + createSignedUploadURL, + downloadFiles, + deleteFiles, + renameKeys +}; diff --git a/server/media/util/base64UrlEncode.js b/server/media/util/base64UrlEncode.js new file mode 100644 index 000000000..4094148b3 --- /dev/null +++ b/server/media/util/base64UrlEncode.js @@ -0,0 +1,4 @@ +const base64UrlEncode = (str) => + Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); + +module.exports = base64UrlEncode; diff --git a/server/media/util/createHmacSha256.js b/server/media/util/createHmacSha256.js new file mode 100644 index 000000000..05b7d52a3 --- /dev/null +++ b/server/media/util/createHmacSha256.js @@ -0,0 +1,7 @@ +const crypto = require("crypto"); + +const imgproxyKey = process.env.IMGPROXY_KEY; + +const createHmacSha256 = (data) => crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url"); + +module.exports = createHmacSha256; diff --git a/server/routes/intergrationRoutes.js b/server/routes/intergrationRoutes.js new file mode 100644 index 000000000..9d3fc20f4 --- /dev/null +++ b/server/routes/intergrationRoutes.js @@ -0,0 +1,8 @@ +const express = require("express"); +const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegration"); +const vsstaMiddleware = require("../integrations/VSSTA/vsstaMiddleware"); +const router = express.Router(); + +router.post("/vssta", vsstaMiddleware, vsstaIntegration); + +module.exports = router; diff --git a/server/routes/jobRoutes.js b/server/routes/jobRoutes.js index aab3e8823..e7c747907 100644 --- a/server/routes/jobRoutes.js +++ b/server/routes/jobRoutes.js @@ -1,6 +1,5 @@ const express = require("express"); const router = express.Router(); -const job = require("../job/job"); const ppc = require("../ccc/partspricechange"); const { partsScan } = require("../parts-scan/parts-scan"); const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMIddleware"); From 5adf591670cd99f444bd584e31cad84e8822cb2d Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Thu, 10 Apr 2025 09:27:49 -0400 Subject: [PATCH 02/12] feature/IO-2282-VSSTA-Integration: - Clean up imgproxy-media.js --- server/media/imgproxy-media.js | 43 ++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index d26b572ce..6bee7a6bc 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -50,17 +50,19 @@ const generateSignedUploadUrls = async (req, res) => { } logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls }); - res.json({ + + return res.json({ success: true, signedUrls }); } catch (error) { - res.status(400).json({ - success: false, + logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, { message: error.message, stack: error.stack }); - logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, { + + return res.status(400).json({ + success: false, message: error.message, stack: error.stack }); @@ -134,7 +136,7 @@ const getThumbnailUrls = async (req, res) => { }); } - res.json(proxiedUrls); + return res.json(proxiedUrls); //Iterate over them, build the link based on the media type, and return the array. } catch (error) { logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, { @@ -143,7 +145,8 @@ const getThumbnailUrls = async (req, res) => { message: error.message, stack: error.stack }); - res.status(400).json({ message: error.message, stack: error.stack }); + + return res.status(400).json({ message: error.message, stack: error.stack }); } }; @@ -169,9 +172,9 @@ const downloadFiles = async (req, res) => { throw new Error(error); }); - const passthrough = new stream.PassThrough(); + const passThrough = new stream.PassThrough(); - archiveStream.pipe(passthrough); + archiveStream.pipe(passThrough); for (const key of data.documents.map((d) => d.key)) { const response = await s3client.send( @@ -192,7 +195,7 @@ const downloadFiles = async (req, res) => { client: s3client, queueSize: 4, // optional concurrency configuration leavePartsOnError: false, // optional manually handle dropped parts - params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passthrough } + params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough } }); parallelUploads3.on("httpUploadProgress", (progress) => { @@ -200,6 +203,7 @@ const downloadFiles = async (req, res) => { }); await parallelUploads3.done(); + //Generate the presigned URL to download it. const presignedUrl = await getSignedUrl( s3client, @@ -207,7 +211,7 @@ const downloadFiles = async (req, res) => { { expiresIn: 360 } ); - res.json({ success: true, url: presignedUrl }); + return res.json({ success: true, url: presignedUrl }); //Iterate over them, build the link based on the media type, and return the array. } catch (error) { logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, { @@ -216,7 +220,8 @@ const downloadFiles = async (req, res) => { message: error.message, stack: error.stack }); - res.status(400).json({ message: error.message, stack: error.stack }); + + return res.status(400).json({ message: error.message, stack: error.stack }); } }; @@ -262,14 +267,15 @@ const deleteFiles = async (req, res) => { ids: result.filter((t) => !t.error).map((d) => d.id) }); - res.json({ errors, deleteMutationResult }); + return res.json({ errors, deleteMutationResult }); } catch (error) { logger.log("imgproxy-delete-files-error", "ERROR", req.user.email, null, { ids, message: error.message, stack: error.stack }); - res.status(400).json({ message: error.message, stack: error.stack }); + + return res.status(400).json({ message: error.message, stack: error.stack }); } }; @@ -334,14 +340,16 @@ const moveFiles = async (req, res) => { }); const client = req.userGraphQLClient; + if (mutations !== "") { const mutationResult = await client.request(`mutation { ${mutations} }`); - res.json({ errors, mutationResult }); - } else { - res.json({ errors: "No images were successfully moved on remote server. " }); + + return res.json({ errors, mutationResult }); } + + return res.json({ errors: "No images were successfully moved on remote server. " }); } catch (error) { logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, { documents, @@ -349,7 +357,8 @@ const moveFiles = async (req, res) => { message: error.message, stack: error.stack }); - res.status(400).json({ message: error.message, stack: error.stack }); + + return res.status(400).json({ message: error.message, stack: error.stack }); } }; From e8b9fcbc6e5a99d7b1d2468a3340a0f4d7de0205 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Thu, 10 Apr 2025 09:37:31 -0400 Subject: [PATCH 03/12] feature/IO-2282-VSSTA-Integration: - Clean up imgproxy-media.js --- server/media/imgproxy-media.js | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index 6bee7a6bc..5790ecfb7 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -25,8 +25,13 @@ const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437d const imgproxySalt = process.env.IMGPROXY_SALT; const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET; -//Generate a signed upload link for the S3 bucket. -//All uploads must be going to the same shop and jobid. +/** + * Generate a Signed URL Link for the s3 bucket. + * All Uploads must be going to the same Shop and JobId + * @param req + * @param res + * @returns {Promise<*>} + */ const generateSignedUploadUrls = async (req, res) => { const { filenames, bodyshopid, jobid } = req.body; try { @@ -69,6 +74,12 @@ const generateSignedUploadUrls = async (req, res) => { } }; +/** + * Get Thumbnail URLS + * @param req + * @param res + * @returns {Promise<*>} + */ const getThumbnailUrls = async (req, res) => { const { jobid, billid } = req.body; @@ -150,6 +161,12 @@ const getThumbnailUrls = async (req, res) => { } }; +/** + * Download Files + * @param req + * @param res + * @returns {Promise<*>} + */ const downloadFiles = async (req, res) => { //Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk const { jobId, billid, documentids } = req.body; @@ -225,6 +242,12 @@ const downloadFiles = async (req, res) => { } }; +/** + * Delete Files + * @param req + * @param res + * @returns {Promise<*>} + */ const deleteFiles = async (req, res) => { //Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future. //Mark as deleted from the documents section of the database. @@ -279,6 +302,12 @@ const deleteFiles = async (req, res) => { } }; +/** + * Move Files + * @param req + * @param res + * @returns {Promise<*>} + */ const moveFiles = async (req, res) => { const { documents, tojobid } = req.body; try { From d444821cf74fd65792b037f49fd1d1e2a730909d Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 10:46:49 -0400 Subject: [PATCH 04/12] feature/IO-2282-VSSTA-Integration: - checkpoint --- server/graphql-client/queries.js | 28 ++++ server/integrations/VSSTA/vsstaIntegration.js | 36 ----- .../VSSTA/vsstaIntegrationRoute.js | 123 ++++++++++++++++++ server/integrations/VSSTA/vsstaMiddleware.js | 5 - server/media/imgproxy-media.js | 7 +- server/media/media.js | 31 ++--- server/media/util/determineFileType.js | 17 +++ .../middleware/vsstaIntegrationMiddleware.js | 17 +++ server/routes/intergrationRoutes.js | 4 +- 9 files changed, 205 insertions(+), 63 deletions(-) delete mode 100644 server/integrations/VSSTA/vsstaIntegration.js create mode 100644 server/integrations/VSSTA/vsstaIntegrationRoute.js delete mode 100644 server/integrations/VSSTA/vsstaMiddleware.js create mode 100644 server/media/util/determineFileType.js create mode 100644 server/middleware/vsstaIntegrationMiddleware.js diff --git a/server/graphql-client/queries.js b/server/graphql-client/queries.js index 16c955467..fe08897ba 100644 --- a/server/graphql-client/queries.js +++ b/server/graphql-client/queries.js @@ -2853,3 +2853,31 @@ query GET_BODYSHOP_BY_MERCHANTID($merchantID: String!) { email } }`; + +// Define the GraphQL query to get a job by RO number and shop ID +exports.GET_JOB_BY_RO_NUMBER_AND_SHOP_ID = ` + query GET_JOB_BY_RO_NUMBER_AND_SHOP_ID($roNumber: String!, $shopId: String!) { + jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}) { + id + shopid + bodyshopid + bodyshop { + id + email + } + } + } +`; + +// Define the mutation to insert a new document +exports.INSERT_NEW_DOCUMENT = ` + mutation INSERT_NEW_DOCUMENT($docInput: [documents_insert_input!]!) { + insert_documents(objects: $docInput) { + returning { + id + name + key + } + } + } +`; diff --git a/server/integrations/VSSTA/vsstaIntegration.js b/server/integrations/VSSTA/vsstaIntegration.js deleted file mode 100644 index 30f7531a9..000000000 --- a/server/integrations/VSSTA/vsstaIntegration.js +++ /dev/null @@ -1,36 +0,0 @@ -const client = require("../../graphql-client/graphql-client").client; - -/** - * VSSTA Integration - * @param req - * @param res - * @returns {Promise} - */ -const vsstaIntegration = async (req, res) => { - const { logger } = req; - - // Examplwe req.body - //{ - // "shop_id":"test", - // "“ro_nbr“":"71475", - // "vin_nbr":"12345678912345678", - // "pdf_download_link":"https://portal-staging.vssta.com/invoice_data/1500564", - // "“company_api_key“":"xxxxx", - // "scan_type":"PRE", - // "scan_fee":"119.00", - // "scanner_number":"1234", - // "scan_time":"2022-08-23 17:53:50", - // "technician":"Frank Jones", - // "year":"2021", - // "make":"TOYOTA", - // "model":"Tacoma SR5 grade" - // - // } - // 1 - We would want to get the Job by searching the ro_nbr and shop_id (The assumption) - - // 2 - We want to download the file provided from the pdf_download_link and associate (upload) it - // to S3 bucket for media, and insert a document record in the database, the file is base64 encoded (pdf), we will want to unencode it when storing it as a pdf - // We might not have to un-encode it, ultimately we want to send the base64 and the end is a pdf file the user can view from the documents section. -}; - -module.exports = vsstaIntegration; diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js new file mode 100644 index 000000000..77f5094b9 --- /dev/null +++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js @@ -0,0 +1,123 @@ +const axios = require("axios"); +const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3"); +const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); +const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../../graphql-client/queries"); +const determineFileType = require("../../media/util/determineFileType"); +const { InstanceRegion } = require("../../utils/instanceMgr"); +const client = require("../../graphql-client/graphql-client").client; + +// Assume these are configured environment variables or constants +const S3_BUCKET = process.env.S3_BUCKET || "your-s3-bucket-name"; + +const vsstaIntegrationRoute = async (req, res) => { + const { logger } = req; + try { + const requiredParams = [ + "shop_id", + "ro_nbr", + "pdf_download_link", + "company_api_key", + "scan_type", + "scan_time", + "technician", + "year", + "make", + "model" + ]; + + const missingParams = requiredParams.filter((param) => !req.body[param]); + + if (missingParams.length > 0) { + logger.error(`Missing required parameters: ${missingParams.join(", ")}`); + return res.status(400).json({ + error: "Missing required parameters", + missingParams + }); + } + + const { shop_id, ro_nbr, pdf_download_link, scan_type, scan_time, technician, year, make, model, company_api_key } = + req.body; + + // 1. Get the job record by ro_number and shop_id + const jobResult = await client.request(GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, { + roNumber: ro_nbr, + shopId: shop_id + }); + + if (!jobResult.jobs || jobResult.jobs.length === 0) { + logger.error(`No job found for RO number ${ro_nbr} and shop ID ${shop_id}`); + return res.status(404).json({ error: "Job not found" }); + } + + const job = jobResult.jobs[0]; + logger.info(`Found job with ID ${job.id} for RO number ${ro_nbr}`); + + // 2. Download the PDF from the provided link + logger.info(`Downloading PDF from ${pdf_download_link}`); + const pdfResponse = await axios.get(pdf_download_link, { + responseType: "arraybuffer", + headers: { + "auth:token": company_api_key + } + }); + + // 3. Generate key for S3 + const timestamp = Date.now(); + const fileName = `VSSTA_${scan_type}_Scan_${timestamp}.pdf`; + const s3Key = `${job.bodyshopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}-${timestamp}.pdf`; + + // 4. Generate presigned URL for S3 upload + logger.info(`Generating presigned URL for S3 key ${s3Key}`); + + const s3Client = new S3Client({ region: InstanceRegion() }); + + const putCommand = new PutObjectCommand({ + Bucket: S3_BUCKET, + Key: s3Key, + ContentType: "application/pdf", + StorageClass: "INTELLIGENT_TIERING" + }); + + const presignedUrl = await getSignedUrl(s3Client, putCommand, { expiresIn: 360 }); + + // 5. Upload file to S3 + logger.info(`Uploading PDF to S3 with key ${s3Key}`); + await axios.put(presignedUrl, pdfResponse.data, { + headers: { "Content-Type": "application/pdf" } + }); + + // 6. Create document record in database + const documentMeta = { + jobid: job.id, // Matches jobid (uuid, nullable) + uploaded_by: "VSSTA Integration", // Matches uploaded_by (text) + name: fileName, // Matches name (text, nullable) + key: s3Key, // Matches key (text, default: '0'::text) + type: determineFileType("application/pdf"), // Matches type (text, nullable), using determineFileType + extension: "pdf", // Matches extension (text, nullable) + bodyshopid: job.bodyshopid, // Matches bodyshopid (uuid, nullable) + size: pdfResponse.data.length, // Matches size (integer, default: 0) + takenat: scan_time, // Matches takenat (timestamp with time zone, nullable) + description: `VSSTA ${scan_type} scan for ${year} ${make} ${model}, performed by ${technician} at ${scan_time}` // Not in schema, will be ignored by the database + }; + + const documentInsert = await client.request(INSERT_NEW_DOCUMENT, { + docInput: [documentMeta] + }); + + if (documentInsert.insert_documents?.returning?.length > 0) { + logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`); + return res.status(200).json({ + message: "VSSTA integration successful", + documentId: documentInsert.insert_documents.returning[0].id + }); + } else { + logger.error("Failed to create document record"); + return res.status(500).json({ error: "Failed to create document record" }); + } + } catch (error) { + logger.error(`VSSTA integration error: ${error.message}`, error); + return res.status(500).json({ error: error.message }); + } +}; + +module.exports = vsstaIntegrationRoute; diff --git a/server/integrations/VSSTA/vsstaMiddleware.js b/server/integrations/VSSTA/vsstaMiddleware.js deleted file mode 100644 index 800f9bfa2..000000000 --- a/server/integrations/VSSTA/vsstaMiddleware.js +++ /dev/null @@ -1,5 +0,0 @@ -const vsstaMiddleware = (req, res, next) => { - next(); -}; - -module.exports = vsstaMiddleware; diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index 5790ecfb7..e30aee90e 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -215,9 +215,10 @@ const downloadFiles = async (req, res) => { params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough } }); - parallelUploads3.on("httpUploadProgress", (progress) => { - console.log(progress); - }); + // Disabled progress logging for upload, uncomment if needed + // parallelUploads3.on("httpUploadProgress", (progress) => { + // console.log(progress); + // }); await parallelUploads3.done(); diff --git a/server/media/media.js b/server/media/media.js index af9628c8a..1f207ea14 100644 --- a/server/media/media.js +++ b/server/media/media.js @@ -1,7 +1,8 @@ const _ = require("lodash"); const logger = require("../utils/logger"); const client = require("../graphql-client/graphql-client").client; -const queries = require("../graphql-client/queries"); +const determineFileType = require("./util/determineFileType"); +const { DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries"); const cloudinary = require("cloudinary").v2; cloudinary.config(process.env.CLOUDINARY_URL); @@ -13,22 +14,26 @@ const createSignedUploadURL = (req, res) => { const downloadFiles = (req, res) => { const { ids } = req.body; + logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null); const url = cloudinary.utils.download_zip_url({ public_ids: ids, flatten_folders: true }); + res.send(url); }; const deleteFiles = async (req, res) => { const { ids } = req.body; - const types = _.groupBy(ids, (x) => DetermineFileType(x.type)); + + const types = _.groupBy(ids, (x) => determineFileType(x.type)); logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null); const returns = []; + if (types.image) { //delete images @@ -39,8 +44,8 @@ const deleteFiles = async (req, res) => { ) ); } + if (types.video) { - //delete images returns.push( returns.push( await cloudinary.api.delete_resources( types.video.map((x) => x.key), @@ -48,8 +53,8 @@ const deleteFiles = async (req, res) => { ) ); } + if (types.raw) { - //delete images returns.push( returns.push( await cloudinary.api.delete_resources( types.raw.map((x) => `${x.key}.${x.extension}`), @@ -60,6 +65,7 @@ const deleteFiles = async (req, res) => { // Delete it on apollo. const successfulDeletes = []; + returns.forEach((resType) => { Object.keys(resType.deleted).forEach((key) => { if (resType.deleted[key] === "deleted" || resType.deleted[key] === "not_found") { @@ -69,7 +75,7 @@ const deleteFiles = async (req, res) => { }); try { - const result = await client.request(queries.DELETE_MEDIA_DOCUMENTS, { + const result = await client.request(DELETE_MEDIA_DOCUMENTS, { ids: ids.filter((i) => successfulDeletes.includes(i.key)).map((i) => i.id) }); @@ -85,9 +91,11 @@ const deleteFiles = async (req, res) => { const renameKeys = async (req, res) => { const { documents, tojobid } = req.body; + logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents); const proms = []; + documents.forEach((d) => { proms.push( (async () => { @@ -95,7 +103,7 @@ const renameKeys = async (req, res) => { return { id: d.id, ...(await cloudinary.uploader.rename(d.from, d.to, { - resource_type: DetermineFileType(d.type) + resource_type: determineFileType(d.type) })) }; } catch (error) { @@ -141,17 +149,6 @@ const renameKeys = async (req, res) => { } }; -//Also needs to be updated in upload utility and mobile app. -function DetermineFileType(filetype) { - if (!filetype) return "auto"; - else if (filetype.startsWith("image")) return "image"; - else if (filetype.startsWith("video")) return "video"; - else if (filetype.startsWith("application/pdf")) return "image"; - else if (filetype.startsWith("application")) return "raw"; - - return "auto"; -} - module.exports = { createSignedUploadURL, downloadFiles, diff --git a/server/media/util/determineFileType.js b/server/media/util/determineFileType.js new file mode 100644 index 000000000..9bd8a4732 --- /dev/null +++ b/server/media/util/determineFileType.js @@ -0,0 +1,17 @@ +/** + * @description Determines the file type based on the filetype string. + * @note Also needs to be updated in the mobile app utility. + * @param filetype + * @returns {string} + */ +const determineFileType = (filetype) => { + if (!filetype) return "auto"; + else if (filetype.startsWith("image")) return "image"; + else if (filetype.startsWith("video")) return "video"; + else if (filetype.startsWith("application/pdf")) return "image"; + else if (filetype.startsWith("application")) return "raw"; + + return "auto"; +}; + +module.exports = determineFileType; diff --git a/server/middleware/vsstaIntegrationMiddleware.js b/server/middleware/vsstaIntegrationMiddleware.js new file mode 100644 index 000000000..7739c4a7a --- /dev/null +++ b/server/middleware/vsstaIntegrationMiddleware.js @@ -0,0 +1,17 @@ +/** + * VSSTA Integration Middleware + * @param req + * @param res + * @param next + * @returns {*} + */ +const vsstaIntegrationMiddleware = (req, res, next) => { + if (req.headers["vssta-integration-secret"] !== process.env.VSSTA_INTEGRATION_SECRET) { + return res.status(401).send("Unauthorized"); + } + + req.isIntegrationAuthorized = true; + next(); +}; + +module.exports = vsstaIntegrationMiddleware; diff --git a/server/routes/intergrationRoutes.js b/server/routes/intergrationRoutes.js index 9d3fc20f4..841805675 100644 --- a/server/routes/intergrationRoutes.js +++ b/server/routes/intergrationRoutes.js @@ -1,6 +1,6 @@ const express = require("express"); -const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegration"); -const vsstaMiddleware = require("../integrations/VSSTA/vsstaMiddleware"); +const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegrationRoute"); +const vsstaMiddleware = require("../middleware/vsstaIntegrationMiddleware"); const router = express.Router(); router.post("/vssta", vsstaMiddleware, vsstaIntegration); From 35a7222f5e17d4ad1e40664acf4ef7bcfa612775 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 11:29:44 -0400 Subject: [PATCH 05/12] feature/IO-2282-VSSTA-Integration: - checkpoint --- server/graphql-client/queries.js | 9 +-- .../VSSTA/vsstaIntegrationRoute.js | 55 ++++++++++++------- 2 files changed, 36 insertions(+), 28 deletions(-) diff --git a/server/graphql-client/queries.js b/server/graphql-client/queries.js index fe08897ba..761550f1d 100644 --- a/server/graphql-client/queries.js +++ b/server/graphql-client/queries.js @@ -2856,15 +2856,10 @@ query GET_BODYSHOP_BY_MERCHANTID($merchantID: String!) { // Define the GraphQL query to get a job by RO number and shop ID exports.GET_JOB_BY_RO_NUMBER_AND_SHOP_ID = ` - query GET_JOB_BY_RO_NUMBER_AND_SHOP_ID($roNumber: String!, $shopId: String!) { - jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}) { + query GET_JOB_BY_RO_NUMBER_AND_SHOP_ID($roNumber: String!, $shopId: uuid!) { + jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}, limit: 1) { id shopid - bodyshopid - bodyshop { - id - email - } } } `; diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js index 77f5094b9..c40fce462 100644 --- a/server/integrations/VSSTA/vsstaIntegrationRoute.js +++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js @@ -2,15 +2,14 @@ const axios = require("axios"); const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3"); const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../../graphql-client/queries"); -const determineFileType = require("../../media/util/determineFileType"); const { InstanceRegion } = require("../../utils/instanceMgr"); const client = require("../../graphql-client/graphql-client").client; -// Assume these are configured environment variables or constants -const S3_BUCKET = process.env.S3_BUCKET || "your-s3-bucket-name"; +const S3_BUCKET = process.env.IMGPROXY_DESTINATION_BUCKET; const vsstaIntegrationRoute = async (req, res) => { const { logger } = req; + try { const requiredParams = [ "shop_id", @@ -28,7 +27,10 @@ const vsstaIntegrationRoute = async (req, res) => { const missingParams = requiredParams.filter((param) => !req.body[param]); if (missingParams.length > 0) { - logger.error(`Missing required parameters: ${missingParams.join(", ")}`); + logger.log(`vssta-integration-missing-param`, "error", "api", "vssta", { + params: missingParams + }); + return res.status(400).json({ error: "Missing required parameters", missingParams @@ -45,29 +47,31 @@ const vsstaIntegrationRoute = async (req, res) => { }); if (!jobResult.jobs || jobResult.jobs.length === 0) { - logger.error(`No job found for RO number ${ro_nbr} and shop ID ${shop_id}`); + logger.log(`vssta-integration-missing-ro`, "error", "api", "vssta"); + return res.status(404).json({ error: "Job not found" }); } const job = jobResult.jobs[0]; - logger.info(`Found job with ID ${job.id} for RO number ${ro_nbr}`); + + logger.logger.info(`Found job with ID ${job.id} for RO number ${ro_nbr}`); // 2. Download the PDF from the provided link - logger.info(`Downloading PDF from ${pdf_download_link}`); + logger.logger.info(`Downloading PDF from ${pdf_download_link}`); const pdfResponse = await axios.get(pdf_download_link, { - responseType: "arraybuffer", - headers: { - "auth:token": company_api_key - } + responseType: "arraybuffer" + // headers: { + // "auth:token": company_api_key + // } }); // 3. Generate key for S3 const timestamp = Date.now(); const fileName = `VSSTA_${scan_type}_Scan_${timestamp}.pdf`; - const s3Key = `${job.bodyshopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}-${timestamp}.pdf`; + const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}-${timestamp}.pdf`; // 4. Generate presigned URL for S3 upload - logger.info(`Generating presigned URL for S3 key ${s3Key}`); + logger.logger.info(`Generating presigned URL for S3 key ${s3Key}`); const s3Client = new S3Client({ region: InstanceRegion() }); @@ -81,7 +85,8 @@ const vsstaIntegrationRoute = async (req, res) => { const presignedUrl = await getSignedUrl(s3Client, putCommand, { expiresIn: 360 }); // 5. Upload file to S3 - logger.info(`Uploading PDF to S3 with key ${s3Key}`); + logger.logger.info(`Uploading PDF to S3 with key ${s3Key}`); + await axios.put(presignedUrl, pdfResponse.data, { headers: { "Content-Type": "application/pdf" } }); @@ -92,12 +97,13 @@ const vsstaIntegrationRoute = async (req, res) => { uploaded_by: "VSSTA Integration", // Matches uploaded_by (text) name: fileName, // Matches name (text, nullable) key: s3Key, // Matches key (text, default: '0'::text) - type: determineFileType("application/pdf"), // Matches type (text, nullable), using determineFileType + // type: determineFileType("application/pdf"), // Matches type (text, nullable), using determineFileType + // Ask Patrick why determineFileType just returns image... + type: "application/pdf", // Matches type (text, nullable), extension: "pdf", // Matches extension (text, nullable) - bodyshopid: job.bodyshopid, // Matches bodyshopid (uuid, nullable) + bodyshopid: job.shopid, // Matches bodyshopid (uuid, nullable) size: pdfResponse.data.length, // Matches size (integer, default: 0) - takenat: scan_time, // Matches takenat (timestamp with time zone, nullable) - description: `VSSTA ${scan_type} scan for ${year} ${make} ${model}, performed by ${technician} at ${scan_time}` // Not in schema, will be ignored by the database + takenat: scan_time // Matches takenat (timestamp with time zone, nullable) }; const documentInsert = await client.request(INSERT_NEW_DOCUMENT, { @@ -105,17 +111,24 @@ const vsstaIntegrationRoute = async (req, res) => { }); if (documentInsert.insert_documents?.returning?.length > 0) { - logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`); + logger.logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`); return res.status(200).json({ message: "VSSTA integration successful", documentId: documentInsert.insert_documents.returning[0].id }); } else { - logger.error("Failed to create document record"); + logger.log(`vssta-integration-failed-to-create-document-record`, "error", "api", "vssta", { + params: missingParams + }); + return res.status(500).json({ error: "Failed to create document record" }); } } catch (error) { - logger.error(`VSSTA integration error: ${error.message}`, error); + logger.log(`vssta-integration-general`, "error", "api", "vssta", { + error: error?.message, + stack: error?.stack + }); + return res.status(500).json({ error: error.message }); } }; From f09cb7b247ccf246f373e8ae55aa13155bf6ca49 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 12:40:33 -0400 Subject: [PATCH 06/12] feature/IO-2282-VSSTA-Integration: - Finish Integration --- .../VSSTA/vsstaIntegrationRoute.js | 105 +++++++++--------- 1 file changed, 55 insertions(+), 50 deletions(-) diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js index c40fce462..4fa0b800f 100644 --- a/server/integrations/VSSTA/vsstaIntegrationRoute.js +++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js @@ -5,25 +5,34 @@ const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../.. const { InstanceRegion } = require("../../utils/instanceMgr"); const client = require("../../graphql-client/graphql-client").client; -const S3_BUCKET = process.env.IMGPROXY_DESTINATION_BUCKET; +const S3_BUCKET = process.env?.IMGPROXY_DESTINATION_BUCKET; + +/** + * @description VSSTA integration route + * @type {string[]} + */ +const requiredParams = [ + "shop_id", + "ro_nbr", + "pdf_download_link", + "company_api_key", + "scan_type", + "scan_time", + "technician", + "year", + "make", + "model" +]; const vsstaIntegrationRoute = async (req, res) => { const { logger } = req; - try { - const requiredParams = [ - "shop_id", - "ro_nbr", - "pdf_download_link", - "company_api_key", - "scan_type", - "scan_time", - "technician", - "year", - "make", - "model" - ]; + if (!S3_BUCKET) { + logger.log("vssta-integration-missing-bucket", "error", "api", "vssta"); + return res.status(500).json({ error: "Improper configuration" }); + } + try { const missingParams = requiredParams.filter((param) => !req.body[param]); if (missingParams.length > 0) { @@ -54,25 +63,24 @@ const vsstaIntegrationRoute = async (req, res) => { const job = jobResult.jobs[0]; - logger.logger.info(`Found job with ID ${job.id} for RO number ${ro_nbr}`); - - // 2. Download the PDF from the provided link - logger.logger.info(`Downloading PDF from ${pdf_download_link}`); + // 2. Download the base64-encoded PDF string from the provided link const pdfResponse = await axios.get(pdf_download_link, { - responseType: "arraybuffer" - // headers: { - // "auth:token": company_api_key - // } + responseType: "text", // Expect base64 string + headers: { + "auth-token": company_api_key + } }); - // 3. Generate key for S3 + // 3. Decode the base64 string to a PDF buffer + const base64String = pdfResponse.data.replace(/^data:application\/pdf;base64,/, ""); + const pdfBuffer = Buffer.from(base64String, "base64"); + + // 4. Generate key for S3 const timestamp = Date.now(); const fileName = `VSSTA_${scan_type}_Scan_${timestamp}.pdf`; const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}-${timestamp}.pdf`; - // 4. Generate presigned URL for S3 upload - logger.logger.info(`Generating presigned URL for S3 key ${s3Key}`); - + // 5. Generate presigned URL for S3 upload const s3Client = new S3Client({ region: InstanceRegion() }); const putCommand = new PutObjectCommand({ @@ -84,45 +92,42 @@ const vsstaIntegrationRoute = async (req, res) => { const presignedUrl = await getSignedUrl(s3Client, putCommand, { expiresIn: 360 }); - // 5. Upload file to S3 - logger.logger.info(`Uploading PDF to S3 with key ${s3Key}`); - - await axios.put(presignedUrl, pdfResponse.data, { + // 6. Upload the decoded PDF to S3 + await axios.put(presignedUrl, pdfBuffer, { headers: { "Content-Type": "application/pdf" } }); - // 6. Create document record in database + // 7. Create document record in database const documentMeta = { - jobid: job.id, // Matches jobid (uuid, nullable) - uploaded_by: "VSSTA Integration", // Matches uploaded_by (text) - name: fileName, // Matches name (text, nullable) - key: s3Key, // Matches key (text, default: '0'::text) - // type: determineFileType("application/pdf"), // Matches type (text, nullable), using determineFileType - // Ask Patrick why determineFileType just returns image... - type: "application/pdf", // Matches type (text, nullable), - extension: "pdf", // Matches extension (text, nullable) - bodyshopid: job.shopid, // Matches bodyshopid (uuid, nullable) - size: pdfResponse.data.length, // Matches size (integer, default: 0) - takenat: scan_time // Matches takenat (timestamp with time zone, nullable) + jobid: job.id, + uploaded_by: "VSSTA Integration", + name: fileName, + key: s3Key, + type: "application/pdf", + extension: "pdf", + bodyshopid: job.shopid, + size: pdfBuffer.length, + takenat: scan_time }; const documentInsert = await client.request(INSERT_NEW_DOCUMENT, { docInput: [documentMeta] }); - if (documentInsert.insert_documents?.returning?.length > 0) { - logger.logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`); - return res.status(200).json({ - message: "VSSTA integration successful", - documentId: documentInsert.insert_documents.returning[0].id - }); - } else { + // Reversed flow: check for error case + if (!documentInsert.insert_documents?.returning?.length) { logger.log(`vssta-integration-failed-to-create-document-record`, "error", "api", "vssta", { params: missingParams }); - return res.status(500).json({ error: "Failed to create document record" }); } + + // Success case + logger.logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`); + return res.status(200).json({ + message: "VSSTA integration successful", + documentId: documentInsert.insert_documents.returning[0].id + }); } catch (error) { logger.log(`vssta-integration-general`, "error", "api", "vssta", { error: error?.message, From 91fe1f4af9a5425b060bf02dec0c1d92acc77a7b Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 12:55:38 -0400 Subject: [PATCH 07/12] feature/IO-2282-VSSTA-Integration: - Finish Integration --- server/graphql-client/queries.js | 3 +++ server/integrations/VSSTA/vsstaIntegrationRoute.js | 10 ++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/server/graphql-client/queries.js b/server/graphql-client/queries.js index 761550f1d..83341bebb 100644 --- a/server/graphql-client/queries.js +++ b/server/graphql-client/queries.js @@ -2860,6 +2860,9 @@ exports.GET_JOB_BY_RO_NUMBER_AND_SHOP_ID = ` jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}, limit: 1) { id shopid + bodyshop { + timezone + } } } `; diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js index 4fa0b800f..b9d9ac5bd 100644 --- a/server/integrations/VSSTA/vsstaIntegrationRoute.js +++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js @@ -3,6 +3,7 @@ const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3"); const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../../graphql-client/queries"); const { InstanceRegion } = require("../../utils/instanceMgr"); +const moment = require("moment/moment"); const client = require("../../graphql-client/graphql-client").client; const S3_BUCKET = process.env?.IMGPROXY_DESTINATION_BUCKET; @@ -76,9 +77,9 @@ const vsstaIntegrationRoute = async (req, res) => { const pdfBuffer = Buffer.from(base64String, "base64"); // 4. Generate key for S3 - const timestamp = Date.now(); - const fileName = `VSSTA_${scan_type}_Scan_${timestamp}.pdf`; - const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}-${timestamp}.pdf`; + const timestamp = moment(scan_time).tz(job.bodyshop.timezone).format("YYYYMMDD-HHmmss"); + const fileName = `${timestamp}_VSSTA_${scan_type}_Scan_${technician}_${year}_${make}_${model}`; + const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}.pdf`; // 5. Generate presigned URL for S3 upload const s3Client = new S3Client({ region: InstanceRegion() }); @@ -114,7 +115,6 @@ const vsstaIntegrationRoute = async (req, res) => { docInput: [documentMeta] }); - // Reversed flow: check for error case if (!documentInsert.insert_documents?.returning?.length) { logger.log(`vssta-integration-failed-to-create-document-record`, "error", "api", "vssta", { params: missingParams @@ -122,8 +122,6 @@ const vsstaIntegrationRoute = async (req, res) => { return res.status(500).json({ error: "Failed to create document record" }); } - // Success case - logger.logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`); return res.status(200).json({ message: "VSSTA integration successful", documentId: documentInsert.insert_documents.returning[0].id From 0b7a23d5552ce9eb00849eef7714ed526aed88f0 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 13:02:54 -0400 Subject: [PATCH 08/12] feature/IO-2282-VSSTA-Integration: - include some tests for media utils --- server/media/tests/media-utils.test.js | 98 ++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 server/media/tests/media-utils.test.js diff --git a/server/media/tests/media-utils.test.js b/server/media/tests/media-utils.test.js new file mode 100644 index 000000000..b25678da1 --- /dev/null +++ b/server/media/tests/media-utils.test.js @@ -0,0 +1,98 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import determineFileType from "../util/determineFileType"; +import base64UrlEncode from "../util/base64UrlEncode"; + +describe("Media Utils", () => { + describe("base64UrlEncode", () => { + it("should encode string to base64url format", () => { + expect(base64UrlEncode("hello world")).toBe("aGVsbG8gd29ybGQ"); + }); + + it('should replace "+" with "-"', () => { + // '+' in base64 appears when encoding specific binary data + expect(base64UrlEncode("hello+world")).toBe("aGVsbG8rd29ybGQ"); + }); + + it('should replace "/" with "_"', () => { + expect(base64UrlEncode("path/to/resource")).toBe("cGF0aC90by9yZXNvdXJjZQ"); + }); + + it('should remove trailing "=" characters', () => { + // Using a string that will produce padding in base64 + expect(base64UrlEncode("padding==")).toBe("cGFkZGluZz09"); + }); + }); + + describe("createHmacSha256", () => { + let createHmacSha256; + const originalEnv = process.env; + + beforeEach(async () => { + vi.resetModules(); + process.env = { ...originalEnv }; + process.env.IMGPROXY_KEY = "test-key"; + + // Dynamically import the module after setting env var + const module = await import("../util/createHmacSha256"); + createHmacSha256 = module.default; + }); + + afterEach(() => { + process.env = originalEnv; + }); + + it("should create a valid HMAC SHA-256 hash", () => { + const result = createHmacSha256("test-data"); + expect(typeof result).toBe("string"); + expect(result.length).toBeGreaterThan(0); + }); + + it("should produce consistent hashes for the same input", () => { + const hash1 = createHmacSha256("test-data"); + const hash2 = createHmacSha256("test-data"); + expect(hash1).toBe(hash2); + }); + + it("should produce different hashes for different inputs", () => { + const hash1 = createHmacSha256("test-data-1"); + const hash2 = createHmacSha256("test-data-2"); + expect(hash1).not.toBe(hash2); + }); + }); + + describe("determineFileType", () => { + it('should return "auto" when no filetype is provided', () => { + expect(determineFileType()).toBe("auto"); + expect(determineFileType(null)).toBe("auto"); + expect(determineFileType(undefined)).toBe("auto"); + }); + + it('should return "image" for image filetypes', () => { + expect(determineFileType("image/jpeg")).toBe("image"); + expect(determineFileType("image/png")).toBe("image"); + expect(determineFileType("image/gif")).toBe("image"); + }); + + it('should return "video" for video filetypes', () => { + expect(determineFileType("video/mp4")).toBe("video"); + expect(determineFileType("video/quicktime")).toBe("video"); + expect(determineFileType("video/x-msvideo")).toBe("video"); + }); + + it('should return "image" for PDF files', () => { + expect(determineFileType("application/pdf")).toBe("image"); + }); + + it('should return "raw" for other application types', () => { + expect(determineFileType("application/zip")).toBe("raw"); + expect(determineFileType("application/json")).toBe("raw"); + expect(determineFileType("application/msword")).toBe("raw"); + }); + + it('should return "auto" for unrecognized types', () => { + expect(determineFileType("audio/mpeg")).toBe("auto"); + expect(determineFileType("text/html")).toBe("auto"); + expect(determineFileType("unknown-type")).toBe("auto"); + }); + }); +}); From 6035d9440414b658805fcee2932ca31a9e73b166 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 13:05:42 -0400 Subject: [PATCH 09/12] feature/IO-2282-VSSTA-Integration: - doc blocks / cleanup --- server/media/media.js | 22 ++++++++++++++++++++++ server/media/util/base64UrlEncode.js | 5 +++++ server/media/util/createHmacSha256.js | 5 +++++ 3 files changed, 32 insertions(+) diff --git a/server/media/media.js b/server/media/media.js index 1f207ea14..7cb8a1b5d 100644 --- a/server/media/media.js +++ b/server/media/media.js @@ -7,11 +7,21 @@ const { DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries"); const cloudinary = require("cloudinary").v2; cloudinary.config(process.env.CLOUDINARY_URL); +/** + * @description Creates a signed upload URL for Cloudinary. + * @param req + * @param res + */ const createSignedUploadURL = (req, res) => { logger.log("media-signed-upload", "DEBUG", req.user.email, null, null); res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET)); }; +/** + * @description Downloads files from Cloudinary. + * @param req + * @param res + */ const downloadFiles = (req, res) => { const { ids } = req.body; @@ -25,6 +35,12 @@ const downloadFiles = (req, res) => { res.send(url); }; +/** + * @description Deletes files from Cloudinary and Apollo. + * @param req + * @param res + * @returns {Promise} + */ const deleteFiles = async (req, res) => { const { ids } = req.body; @@ -89,6 +105,12 @@ const deleteFiles = async (req, res) => { } }; +/** + * @description Renames keys in Cloudinary and updates the database. + * @param req + * @param res + * @returns {Promise} + */ const renameKeys = async (req, res) => { const { documents, tojobid } = req.body; diff --git a/server/media/util/base64UrlEncode.js b/server/media/util/base64UrlEncode.js index 4094148b3..24537cb2c 100644 --- a/server/media/util/base64UrlEncode.js +++ b/server/media/util/base64UrlEncode.js @@ -1,3 +1,8 @@ +/** + * @description Converts a string to a base64url encoded string. + * @param str + * @returns {string} + */ const base64UrlEncode = (str) => Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); diff --git a/server/media/util/createHmacSha256.js b/server/media/util/createHmacSha256.js index 05b7d52a3..6be9d6022 100644 --- a/server/media/util/createHmacSha256.js +++ b/server/media/util/createHmacSha256.js @@ -2,6 +2,11 @@ const crypto = require("crypto"); const imgproxyKey = process.env.IMGPROXY_KEY; +/** + * @description Creates a HMAC SHA-256 hash of the given data. + * @param data + * @returns {string} + */ const createHmacSha256 = (data) => crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url"); module.exports = createHmacSha256; From 30f34a17eae88263a988338def44853f50667d21 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 13:20:07 -0400 Subject: [PATCH 10/12] feature/IO-2282-VSSTA-Integration: - doc blocks / cleanup --- server/notifications/scenarioBuilders.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/notifications/scenarioBuilders.js b/server/notifications/scenarioBuilders.js index b3f4d0fd2..d1bdb22a0 100644 --- a/server/notifications/scenarioBuilders.js +++ b/server/notifications/scenarioBuilders.js @@ -182,7 +182,7 @@ const newMediaAddedReassignedBuilder = (data) => { : data.changedFields?.jobid && data.changedFields.jobid.old !== data.changedFields.jobid.new ? "moved to this job" : "updated"; - const body = `An ${mediaType} has been ${action}.`; + const body = `A ${mediaType} has been ${action}.`; return buildNotification(data, "notifications.job.newMediaAdded", body, { mediaType, From 0e75f54d6e25da919e07eb8a3caced3d4a9bf9d3 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 13:39:34 -0400 Subject: [PATCH 11/12] feature/IO-2282-VSSTA-Integration: - doc blocks / cleanup --- server/integrations/VSSTA/vsstaIntegrationRoute.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js index b9d9ac5bd..c98b689ca 100644 --- a/server/integrations/VSSTA/vsstaIntegrationRoute.js +++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js @@ -1,3 +1,7 @@ +// Notes: At the moment we take in RO Number, and ShopID. This is not very good considering the RO number can often be null, need +// to ask if it is possible that we just send the Job ID itself, this way we don't need to really care about the bodyshop, and we +// don't risk getting a null + const axios = require("axios"); const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3"); const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); From aa6ad109c90d315210ff8300bcd53c73e6abab35 Mon Sep 17 00:00:00 2001 From: Dave Richer Date: Tue, 15 Apr 2025 14:21:28 -0400 Subject: [PATCH 12/12] feature/IO-3187-Admin-Enhancements - Minor cleanup --- server/integrations/VSSTA/vsstaIntegrationRoute.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js index c98b689ca..f7444477a 100644 --- a/server/integrations/VSSTA/vsstaIntegrationRoute.js +++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js @@ -51,8 +51,8 @@ const vsstaIntegrationRoute = async (req, res) => { }); } - const { shop_id, ro_nbr, pdf_download_link, scan_type, scan_time, technician, year, make, model, company_api_key } = - req.body; + // technician, year, make, model, is also available. + const { shop_id, ro_nbr, pdf_download_link, scan_type, scan_time, company_api_key } = req.body; // 1. Get the job record by ro_number and shop_id const jobResult = await client.request(GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, { @@ -82,7 +82,7 @@ const vsstaIntegrationRoute = async (req, res) => { // 4. Generate key for S3 const timestamp = moment(scan_time).tz(job.bodyshop.timezone).format("YYYYMMDD-HHmmss"); - const fileName = `${timestamp}_VSSTA_${scan_type}_Scan_${technician}_${year}_${make}_${model}`; + const fileName = `${timestamp}_VSSTA_${scan_type}`; const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}.pdf`; // 5. Generate presigned URL for S3 upload