diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx index 6c08936dc..8644115fd 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx @@ -1,12 +1,10 @@ import { Button, Space } from "antd"; import axios from "axios"; -import React, { useState } from "react"; +import { useState } from "react"; import { useTranslation } from "react-i18next"; import { logImEXEvent } from "../../firebase/firebase.utils"; import cleanAxios from "../../utils/CleanAxios"; import formatBytes from "../../utils/formatbytes"; -//import yauzl from "yauzl"; - import { connect } from "react-redux"; import { createStructuredSelector } from "reselect"; import { selectBodyshop } from "../../redux/user/user.selectors"; @@ -28,7 +26,7 @@ const mapDispatchToProps = (dispatch) => ({ export default connect(mapStateToProps, mapDispatchToProps)(JobsDocumentsImgproxyDownloadButton); -export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, identifier }) { +export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, identifier, jobId }) { const { t } = useTranslation(); const [download, setDownload] = useState(null); const [loading, setLoading] = useState(false); @@ -46,6 +44,7 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i }; }); } + function standardMediaDownload(bufferData) { const a = document.createElement("a"); const url = window.URL.createObjectURL(new Blob([bufferData])); @@ -53,13 +52,14 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i a.download = `${identifier || "documents"}.zip`; a.click(); } + const handleDownload = async () => { logImEXEvent("jobs_documents_download"); setLoading(true); const zipUrl = await axios({ url: "/media/imgproxy/download", method: "POST", - data: { documentids: imagesToDownload.map((_) => _.id) } + data: { jobId, documentids: imagesToDownload.map((_) => _.id) } }); const theDownloadedZip = await cleanAxios({ diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx index a07ed0bf1..f99485dc8 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx @@ -75,7 +75,7 @@ function JobsDocumentsImgproxyComponent({ - + { app.use("/cdk", require("./server/routes/cdkRoutes")); app.use("/csi", require("./server/routes/csiRoutes")); app.use("/payroll", require("./server/routes/payrollRoutes")); + app.use("/integrations", require("./server/routes/intergrationRoutes")); // Default route for forbidden access app.get("/", (req, res) => { diff --git a/server/accounting/pbs/pbs-ap-allocations.js b/server/accounting/pbs/pbs-ap-allocations.js index 9574b166d..62bd84270 100644 --- a/server/accounting/pbs/pbs-ap-allocations.js +++ b/server/accounting/pbs/pbs-ap-allocations.js @@ -217,7 +217,7 @@ exports.PbsExportAp = async function (socket, { billids, txEnvelope }) { socket.emit("ap-export-success", billid); } else { - CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`); + CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`); socket.emit("ap-export-failure", { billid, error: AccountPostingChange.Message diff --git a/server/accounting/pbs/pbs-job-export.js b/server/accounting/pbs/pbs-job-export.js index c38560293..e3dc20dcf 100644 --- a/server/accounting/pbs/pbs-job-export.js +++ b/server/accounting/pbs/pbs-job-export.js @@ -105,14 +105,14 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte socket.emit("export-success", socket.JobData.id); } else { - CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`); + CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`); } } catch (error) { CdkBase.createLogEvent(socket, "ERROR", `Error encountered in CdkSelectedCustomer. ${error}`); await InsertFailedExportLog(socket, error); } }; - +// Was Successful async function CheckForErrors(socket, response) { if (response.WasSuccessful === undefined || response.WasSuccessful === true) { CdkBase.createLogEvent(socket, "DEBUG", `Successful response from DMS. ${response.Message || ""}`); diff --git a/server/graphql-client/graphql-client.js b/server/graphql-client/graphql-client.js index 069386b73..79d86315b 100644 --- a/server/graphql-client/graphql-client.js +++ b/server/graphql-client/graphql-client.js @@ -1,17 +1,19 @@ const GraphQLClient = require("graphql-request").GraphQLClient; -const path = require("path"); -require("dotenv").config({ - path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) -}); + //New bug introduced with Graphql Request. // https://github.com/prisma-labs/graphql-request/issues/206 // const { Headers } = require("cross-fetch"); // global.Headers = global.Headers || Headers; -exports.client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { +const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { headers: { "x-hasura-admin-secret": process.env.HASURA_ADMIN_SECRET } }); -exports.unauthclient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT); +const unauthorizedClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT); + +module.exports = { + client, + unauthorizedClient +}; diff --git a/server/integrations/VSSTA/vsstaIntegration.js b/server/integrations/VSSTA/vsstaIntegration.js new file mode 100644 index 000000000..30f7531a9 --- /dev/null +++ b/server/integrations/VSSTA/vsstaIntegration.js @@ -0,0 +1,36 @@ +const client = require("../../graphql-client/graphql-client").client; + +/** + * VSSTA Integration + * @param req + * @param res + * @returns {Promise} + */ +const vsstaIntegration = async (req, res) => { + const { logger } = req; + + // Examplwe req.body + //{ + // "shop_id":"test", + // "“ro_nbr“":"71475", + // "vin_nbr":"12345678912345678", + // "pdf_download_link":"https://portal-staging.vssta.com/invoice_data/1500564", + // "“company_api_key“":"xxxxx", + // "scan_type":"PRE", + // "scan_fee":"119.00", + // "scanner_number":"1234", + // "scan_time":"2022-08-23 17:53:50", + // "technician":"Frank Jones", + // "year":"2021", + // "make":"TOYOTA", + // "model":"Tacoma SR5 grade" + // + // } + // 1 - We would want to get the Job by searching the ro_nbr and shop_id (The assumption) + + // 2 - We want to download the file provided from the pdf_download_link and associate (upload) it + // to S3 bucket for media, and insert a document record in the database, the file is base64 encoded (pdf), we will want to unencode it when storing it as a pdf + // We might not have to un-encode it, ultimately we want to send the base64 and the end is a pdf file the user can view from the documents section. +}; + +module.exports = vsstaIntegration; diff --git a/server/integrations/VSSTA/vsstaMiddleware.js b/server/integrations/VSSTA/vsstaMiddleware.js new file mode 100644 index 000000000..800f9bfa2 --- /dev/null +++ b/server/integrations/VSSTA/vsstaMiddleware.js @@ -0,0 +1,5 @@ +const vsstaMiddleware = (req, res, next) => { + next(); +}; + +module.exports = vsstaMiddleware; diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index fdb313984..d26b572ce 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -1,8 +1,12 @@ const path = require("path"); -require("dotenv").config({ - path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) -}); const logger = require("../utils/logger"); +const { Upload } = require("@aws-sdk/lib-storage"); +const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); +const { InstanceRegion } = require("../utils/instanceMgr"); +const archiver = require("archiver"); +const stream = require("node:stream"); +const base64UrlEncode = require("./util/base64UrlEncode"); +const createHmacSha256 = require("./util/createHmacSha256"); const { S3Client, PutObjectCommand, @@ -10,35 +14,31 @@ const { CopyObjectCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3"); -const { Upload } = require("@aws-sdk/lib-storage"); - -const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); -const crypto = require("crypto"); -const { InstanceRegion } = require("../utils/instanceMgr"); const { GET_DOCUMENTS_BY_JOB, QUERY_TEMPORARY_DOCS, GET_DOCUMENTS_BY_IDS, DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries"); -const archiver = require("archiver"); -const stream = require("node:stream"); const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN. -const imgproxyKey = process.env.IMGPROXY_KEY; const imgproxySalt = process.env.IMGPROXY_SALT; const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET; //Generate a signed upload link for the S3 bucket. //All uploads must be going to the same shop and jobid. -exports.generateSignedUploadUrls = async (req, res) => { +const generateSignedUploadUrls = async (req, res) => { const { filenames, bodyshopid, jobid } = req.body; try { - logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid }); + logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { + filenames, + bodyshopid, + jobid + }); const signedUrls = []; for (const filename of filenames) { - const key = filename; + const key = filename; const client = new S3Client({ region: InstanceRegion() }); const command = new PutObjectCommand({ Bucket: imgproxyDestinationBucket, @@ -67,7 +67,7 @@ exports.generateSignedUploadUrls = async (req, res) => { } }; -exports.getThumbnailUrls = async (req, res) => { +const getThumbnailUrls = async (req, res) => { const { jobid, billid } = req.body; try { @@ -86,10 +86,11 @@ exports.getThumbnailUrls = async (req, res) => { for (const document of data.documents) { //Format to follow: - /////< base 64 URL encoded to image path> - + /////< base 64 URL encoded to image path> //When working with documents from Cloudinary, the URL does not include the extension. + let key; + if (/\.[^/.]+$/.test(document.key)) { key = document.key; } else { @@ -98,12 +99,12 @@ exports.getThumbnailUrls = async (req, res) => { // Build the S3 path to the object. const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`; const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path); + //Thumbnail Generation Block const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`; const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`); //Full Size URL block - const fullSizeProxyPath = `${base64UrlEncodedKeyString}`; const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`); @@ -114,8 +115,8 @@ exports.getThumbnailUrls = async (req, res) => { Bucket: imgproxyDestinationBucket, Key: key }); - const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 }); - s3Props.presignedGetUrl = presignedGetUrl; + + s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 }); const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`; const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`); @@ -146,40 +147,46 @@ exports.getThumbnailUrls = async (req, res) => { } }; -exports.getBillFiles = async (req, res) => { - //Givena bill ID, get the documents associated to it. -}; - -exports.downloadFiles = async (req, res) => { +const downloadFiles = async (req, res) => { //Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk - const { jobid, billid, documentids } = req.body; + const { jobId, billid, documentids } = req.body; + try { - logger.log("imgproxy-download", "DEBUG", req.user?.email, jobid, { billid, jobid, documentids }); + logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids }); //Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components. const client = req.userGraphQLClient; + //Query for the keys of the document IDs const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids }); - //Using the Keys, get all of the S3 links, zip them, and send back to the client. + + //Using the Keys, get all the S3 links, zip them, and send back to the client. const s3client = new S3Client({ region: InstanceRegion() }); const archiveStream = archiver("zip"); + archiveStream.on("error", (error) => { console.error("Archival encountered an error:", error); throw new Error(error); }); + const passthrough = new stream.PassThrough(); archiveStream.pipe(passthrough); + for (const key of data.documents.map((d) => d.key)) { - const response = await s3client.send(new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key })); - // :: `response.Body` is a Buffer - console.log(path.basename(key)); + const response = await s3client.send( + new GetObjectCommand({ + Bucket: imgproxyDestinationBucket, + Key: key + }) + ); + archiveStream.append(response.Body, { name: path.basename(key) }); } - archiveStream.finalize(); + await archiveStream.finalize(); - const archiveKey = `archives/${jobid}/archive-${new Date().toISOString()}.zip`; + const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`; const parallelUploads3 = new Upload({ client: s3client, @@ -192,7 +199,7 @@ exports.downloadFiles = async (req, res) => { console.log(progress); }); - const uploadResult = await parallelUploads3.done(); + await parallelUploads3.done(); //Generate the presigned URL to download it. const presignedUrl = await getSignedUrl( s3client, @@ -203,8 +210,8 @@ exports.downloadFiles = async (req, res) => { res.json({ success: true, url: presignedUrl }); //Iterate over them, build the link based on the media type, and return the array. } catch (error) { - logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, { - jobid, + logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, { + jobId, billid, message: error.message, stack: error.stack @@ -213,7 +220,7 @@ exports.downloadFiles = async (req, res) => { } }; -exports.deleteFiles = async (req, res) => { +const deleteFiles = async (req, res) => { //Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future. //Mark as deleted from the documents section of the database. const { ids } = req.body; @@ -232,7 +239,7 @@ exports.deleteFiles = async (req, res) => { (async () => { try { // Delete the original object - const deleteResult = await s3client.send( + await s3client.send( new DeleteObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.key @@ -250,7 +257,7 @@ exports.deleteFiles = async (req, res) => { const result = await Promise.all(deleteTransactions); const errors = result.filter((d) => d.error); - //Delete only the succesful deletes. + //Delete only the successful deletes. const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, { ids: result.filter((t) => !t.error).map((d) => d.id) }); @@ -266,7 +273,7 @@ exports.deleteFiles = async (req, res) => { } }; -exports.moveFiles = async (req, res) => { +const moveFiles = async (req, res) => { const { documents, tojobid } = req.body; try { logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid }); @@ -278,7 +285,7 @@ exports.moveFiles = async (req, res) => { (async () => { try { // Copy the object to the new key - const copyresult = await s3client.send( + await s3client.send( new CopyObjectCommand({ Bucket: imgproxyDestinationBucket, CopySource: `${imgproxyDestinationBucket}/${document.from}`, @@ -288,7 +295,7 @@ exports.moveFiles = async (req, res) => { ); // Delete the original object - const deleteResult = await s3client.send( + await s3client.send( new DeleteObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.from @@ -297,7 +304,12 @@ exports.moveFiles = async (req, res) => { return document; } catch (error) { - return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket }; + return { + id: document.id, + from: document.from, + error: error, + bucket: imgproxyDestinationBucket + }; } })() ); @@ -307,6 +319,7 @@ exports.moveFiles = async (req, res) => { const errors = result.filter((d) => d.error); let mutations = ""; + result .filter((d) => !d.error) .forEach((d, idx) => { @@ -327,7 +340,7 @@ exports.moveFiles = async (req, res) => { }`); res.json({ errors, mutationResult }); } else { - res.json({ errors: "No images were succesfully moved on remote server. " }); + res.json({ errors: "No images were successfully moved on remote server. " }); } } catch (error) { logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, { @@ -340,9 +353,10 @@ exports.moveFiles = async (req, res) => { } }; -function base64UrlEncode(str) { - return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); -} -function createHmacSha256(data) { - return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url"); -} +module.exports = { + generateSignedUploadUrls, + getThumbnailUrls, + downloadFiles, + deleteFiles, + moveFiles +}; diff --git a/server/media/media.js b/server/media/media.js index 06b1c9bb8..af9628c8a 100644 --- a/server/media/media.js +++ b/server/media/media.js @@ -1,14 +1,9 @@ -const path = require("path"); const _ = require("lodash"); const logger = require("../utils/logger"); const client = require("../graphql-client/graphql-client").client; const queries = require("../graphql-client/queries"); -require("dotenv").config({ - path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) -}); - -var cloudinary = require("cloudinary").v2; +const cloudinary = require("cloudinary").v2; cloudinary.config(process.env.CLOUDINARY_URL); const createSignedUploadURL = (req, res) => { @@ -16,8 +11,6 @@ const createSignedUploadURL = (req, res) => { res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET)); }; -exports.createSignedUploadURL = createSignedUploadURL; - const downloadFiles = (req, res) => { const { ids } = req.body; logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null); @@ -28,7 +21,6 @@ const downloadFiles = (req, res) => { }); res.send(url); }; -exports.downloadFiles = downloadFiles; const deleteFiles = async (req, res) => { const { ids } = req.body; @@ -91,8 +83,6 @@ const deleteFiles = async (req, res) => { } }; -exports.deleteFiles = deleteFiles; - const renameKeys = async (req, res) => { const { documents, tojobid } = req.body; logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents); @@ -102,13 +92,12 @@ const renameKeys = async (req, res) => { proms.push( (async () => { try { - const res = { + return { id: d.id, ...(await cloudinary.uploader.rename(d.from, d.to, { resource_type: DetermineFileType(d.type) })) }; - return res; } catch (error) { return { id: d.id, from: d.from, error: error }; } @@ -148,10 +137,9 @@ const renameKeys = async (req, res) => { }`); res.json({ errors, mutationResult }); } else { - res.json({ errors: "No images were succesfully moved on remote server. " }); + res.json({ errors: "No images were successfully moved on remote server. " }); } }; -exports.renameKeys = renameKeys; //Also needs to be updated in upload utility and mobile app. function DetermineFileType(filetype) { @@ -163,3 +151,10 @@ function DetermineFileType(filetype) { return "auto"; } + +module.exports = { + createSignedUploadURL, + downloadFiles, + deleteFiles, + renameKeys +}; diff --git a/server/media/util/base64UrlEncode.js b/server/media/util/base64UrlEncode.js new file mode 100644 index 000000000..4094148b3 --- /dev/null +++ b/server/media/util/base64UrlEncode.js @@ -0,0 +1,4 @@ +const base64UrlEncode = (str) => + Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); + +module.exports = base64UrlEncode; diff --git a/server/media/util/createHmacSha256.js b/server/media/util/createHmacSha256.js new file mode 100644 index 000000000..05b7d52a3 --- /dev/null +++ b/server/media/util/createHmacSha256.js @@ -0,0 +1,7 @@ +const crypto = require("crypto"); + +const imgproxyKey = process.env.IMGPROXY_KEY; + +const createHmacSha256 = (data) => crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url"); + +module.exports = createHmacSha256; diff --git a/server/routes/intergrationRoutes.js b/server/routes/intergrationRoutes.js new file mode 100644 index 000000000..9d3fc20f4 --- /dev/null +++ b/server/routes/intergrationRoutes.js @@ -0,0 +1,8 @@ +const express = require("express"); +const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegration"); +const vsstaMiddleware = require("../integrations/VSSTA/vsstaMiddleware"); +const router = express.Router(); + +router.post("/vssta", vsstaMiddleware, vsstaIntegration); + +module.exports = router; diff --git a/server/routes/jobRoutes.js b/server/routes/jobRoutes.js index aab3e8823..e7c747907 100644 --- a/server/routes/jobRoutes.js +++ b/server/routes/jobRoutes.js @@ -1,6 +1,5 @@ const express = require("express"); const router = express.Router(); -const job = require("../job/job"); const ppc = require("../ccc/partspricechange"); const { partsScan } = require("../parts-scan/parts-scan"); const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMIddleware");