diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx
index 6c08936dc..8644115fd 100644
--- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx
+++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx
@@ -1,12 +1,10 @@
import { Button, Space } from "antd";
import axios from "axios";
-import React, { useState } from "react";
+import { useState } from "react";
import { useTranslation } from "react-i18next";
import { logImEXEvent } from "../../firebase/firebase.utils";
import cleanAxios from "../../utils/CleanAxios";
import formatBytes from "../../utils/formatbytes";
-//import yauzl from "yauzl";
-
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
import { selectBodyshop } from "../../redux/user/user.selectors";
@@ -28,7 +26,7 @@ const mapDispatchToProps = (dispatch) => ({
export default connect(mapStateToProps, mapDispatchToProps)(JobsDocumentsImgproxyDownloadButton);
-export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, identifier }) {
+export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, identifier, jobId }) {
const { t } = useTranslation();
const [download, setDownload] = useState(null);
const [loading, setLoading] = useState(false);
@@ -46,6 +44,7 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i
};
});
}
+
function standardMediaDownload(bufferData) {
const a = document.createElement("a");
const url = window.URL.createObjectURL(new Blob([bufferData]));
@@ -53,13 +52,14 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i
a.download = `${identifier || "documents"}.zip`;
a.click();
}
+
const handleDownload = async () => {
logImEXEvent("jobs_documents_download");
setLoading(true);
const zipUrl = await axios({
url: "/media/imgproxy/download",
method: "POST",
- data: { documentids: imagesToDownload.map((_) => _.id) }
+ data: { jobId, documentids: imagesToDownload.map((_) => _.id) }
});
const theDownloadedZip = await cleanAxios({
diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx
index a07ed0bf1..f99485dc8 100644
--- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx
+++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx
@@ -75,7 +75,7 @@ function JobsDocumentsImgproxyComponent({
-
+
{
app.use("/cdk", require("./server/routes/cdkRoutes"));
app.use("/csi", require("./server/routes/csiRoutes"));
app.use("/payroll", require("./server/routes/payrollRoutes"));
+ app.use("/integrations", require("./server/routes/intergrationRoutes"));
// Default route for forbidden access
app.get("/", (req, res) => {
diff --git a/server/accounting/pbs/pbs-ap-allocations.js b/server/accounting/pbs/pbs-ap-allocations.js
index 9574b166d..62bd84270 100644
--- a/server/accounting/pbs/pbs-ap-allocations.js
+++ b/server/accounting/pbs/pbs-ap-allocations.js
@@ -217,7 +217,7 @@ exports.PbsExportAp = async function (socket, { billids, txEnvelope }) {
socket.emit("ap-export-success", billid);
} else {
- CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`);
+ CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`);
socket.emit("ap-export-failure", {
billid,
error: AccountPostingChange.Message
diff --git a/server/accounting/pbs/pbs-job-export.js b/server/accounting/pbs/pbs-job-export.js
index c38560293..e3dc20dcf 100644
--- a/server/accounting/pbs/pbs-job-export.js
+++ b/server/accounting/pbs/pbs-job-export.js
@@ -105,14 +105,14 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte
socket.emit("export-success", socket.JobData.id);
} else {
- CdkBase.createLogEvent(socket, "ERROR", `Export was not succesful.`);
+ CdkBase.createLogEvent(socket, "ERROR", `Export was not successful.`);
}
} catch (error) {
CdkBase.createLogEvent(socket, "ERROR", `Error encountered in CdkSelectedCustomer. ${error}`);
await InsertFailedExportLog(socket, error);
}
};
-
+// Was Successful
async function CheckForErrors(socket, response) {
if (response.WasSuccessful === undefined || response.WasSuccessful === true) {
CdkBase.createLogEvent(socket, "DEBUG", `Successful response from DMS. ${response.Message || ""}`);
diff --git a/server/graphql-client/graphql-client.js b/server/graphql-client/graphql-client.js
index 069386b73..79d86315b 100644
--- a/server/graphql-client/graphql-client.js
+++ b/server/graphql-client/graphql-client.js
@@ -1,17 +1,19 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
-const path = require("path");
-require("dotenv").config({
- path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
-});
+
//New bug introduced with Graphql Request.
// https://github.com/prisma-labs/graphql-request/issues/206
// const { Headers } = require("cross-fetch");
// global.Headers = global.Headers || Headers;
-exports.client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
+const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
"x-hasura-admin-secret": process.env.HASURA_ADMIN_SECRET
}
});
-exports.unauthclient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT);
+const unauthorizedClient = new GraphQLClient(process.env.GRAPHQL_ENDPOINT);
+
+module.exports = {
+ client,
+ unauthorizedClient
+};
diff --git a/server/graphql-client/queries.js b/server/graphql-client/queries.js
index 7a5e0b8d2..7f358e95d 100644
--- a/server/graphql-client/queries.js
+++ b/server/graphql-client/queries.js
@@ -2902,3 +2902,29 @@ query GET_USER_BY_EMAIL($email: String!) {
}
}
}`;
+
+// Define the GraphQL query to get a job by RO number and shop ID
+exports.GET_JOB_BY_RO_NUMBER_AND_SHOP_ID = `
+ query GET_JOB_BY_RO_NUMBER_AND_SHOP_ID($roNumber: String!, $shopId: uuid!) {
+ jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}, limit: 1) {
+ id
+ shopid
+ bodyshop {
+ timezone
+ }
+ }
+ }
+`;
+
+// Define the mutation to insert a new document
+exports.INSERT_NEW_DOCUMENT = `
+ mutation INSERT_NEW_DOCUMENT($docInput: [documents_insert_input!]!) {
+ insert_documents(objects: $docInput) {
+ returning {
+ id
+ name
+ key
+ }
+ }
+ }
+`;
diff --git a/server/integrations/VSSTA/vsstaIntegrationRoute.js b/server/integrations/VSSTA/vsstaIntegrationRoute.js
new file mode 100644
index 000000000..f7444477a
--- /dev/null
+++ b/server/integrations/VSSTA/vsstaIntegrationRoute.js
@@ -0,0 +1,143 @@
+// Notes: At the moment we take in RO Number, and ShopID. This is not very good considering the RO number can often be null, need
+// to ask if it is possible that we just send the Job ID itself, this way we don't need to really care about the bodyshop, and we
+// don't risk getting a null
+
+const axios = require("axios");
+const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
+const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
+const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../../graphql-client/queries");
+const { InstanceRegion } = require("../../utils/instanceMgr");
+const moment = require("moment/moment");
+const client = require("../../graphql-client/graphql-client").client;
+
+const S3_BUCKET = process.env?.IMGPROXY_DESTINATION_BUCKET;
+
+/**
+ * @description VSSTA integration route
+ * @type {string[]}
+ */
+const requiredParams = [
+ "shop_id",
+ "ro_nbr",
+ "pdf_download_link",
+ "company_api_key",
+ "scan_type",
+ "scan_time",
+ "technician",
+ "year",
+ "make",
+ "model"
+];
+
+const vsstaIntegrationRoute = async (req, res) => {
+ const { logger } = req;
+
+ if (!S3_BUCKET) {
+ logger.log("vssta-integration-missing-bucket", "error", "api", "vssta");
+ return res.status(500).json({ error: "Improper configuration" });
+ }
+
+ try {
+ const missingParams = requiredParams.filter((param) => !req.body[param]);
+
+ if (missingParams.length > 0) {
+ logger.log(`vssta-integration-missing-param`, "error", "api", "vssta", {
+ params: missingParams
+ });
+
+ return res.status(400).json({
+ error: "Missing required parameters",
+ missingParams
+ });
+ }
+
+ // technician, year, make, model, is also available.
+ const { shop_id, ro_nbr, pdf_download_link, scan_type, scan_time, company_api_key } = req.body;
+
+ // 1. Get the job record by ro_number and shop_id
+ const jobResult = await client.request(GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, {
+ roNumber: ro_nbr,
+ shopId: shop_id
+ });
+
+ if (!jobResult.jobs || jobResult.jobs.length === 0) {
+ logger.log(`vssta-integration-missing-ro`, "error", "api", "vssta");
+
+ return res.status(404).json({ error: "Job not found" });
+ }
+
+ const job = jobResult.jobs[0];
+
+ // 2. Download the base64-encoded PDF string from the provided link
+ const pdfResponse = await axios.get(pdf_download_link, {
+ responseType: "text", // Expect base64 string
+ headers: {
+ "auth-token": company_api_key
+ }
+ });
+
+ // 3. Decode the base64 string to a PDF buffer
+ const base64String = pdfResponse.data.replace(/^data:application\/pdf;base64,/, "");
+ const pdfBuffer = Buffer.from(base64String, "base64");
+
+ // 4. Generate key for S3
+ const timestamp = moment(scan_time).tz(job.bodyshop.timezone).format("YYYYMMDD-HHmmss");
+ const fileName = `${timestamp}_VSSTA_${scan_type}`;
+ const s3Key = `${job.shopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}.pdf`;
+
+ // 5. Generate presigned URL for S3 upload
+ const s3Client = new S3Client({ region: InstanceRegion() });
+
+ const putCommand = new PutObjectCommand({
+ Bucket: S3_BUCKET,
+ Key: s3Key,
+ ContentType: "application/pdf",
+ StorageClass: "INTELLIGENT_TIERING"
+ });
+
+ const presignedUrl = await getSignedUrl(s3Client, putCommand, { expiresIn: 360 });
+
+ // 6. Upload the decoded PDF to S3
+ await axios.put(presignedUrl, pdfBuffer, {
+ headers: { "Content-Type": "application/pdf" }
+ });
+
+ // 7. Create document record in database
+ const documentMeta = {
+ jobid: job.id,
+ uploaded_by: "VSSTA Integration",
+ name: fileName,
+ key: s3Key,
+ type: "application/pdf",
+ extension: "pdf",
+ bodyshopid: job.shopid,
+ size: pdfBuffer.length,
+ takenat: scan_time
+ };
+
+ const documentInsert = await client.request(INSERT_NEW_DOCUMENT, {
+ docInput: [documentMeta]
+ });
+
+ if (!documentInsert.insert_documents?.returning?.length) {
+ logger.log(`vssta-integration-failed-to-create-document-record`, "error", "api", "vssta", {
+ params: missingParams
+ });
+ return res.status(500).json({ error: "Failed to create document record" });
+ }
+
+ return res.status(200).json({
+ message: "VSSTA integration successful",
+ documentId: documentInsert.insert_documents.returning[0].id
+ });
+ } catch (error) {
+ logger.log(`vssta-integration-general`, "error", "api", "vssta", {
+ error: error?.message,
+ stack: error?.stack
+ });
+
+ return res.status(500).json({ error: error.message });
+ }
+};
+
+module.exports = vsstaIntegrationRoute;
diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js
index fdb313984..e30aee90e 100644
--- a/server/media/imgproxy-media.js
+++ b/server/media/imgproxy-media.js
@@ -1,8 +1,12 @@
const path = require("path");
-require("dotenv").config({
- path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
-});
const logger = require("../utils/logger");
+const { Upload } = require("@aws-sdk/lib-storage");
+const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
+const { InstanceRegion } = require("../utils/instanceMgr");
+const archiver = require("archiver");
+const stream = require("node:stream");
+const base64UrlEncode = require("./util/base64UrlEncode");
+const createHmacSha256 = require("./util/createHmacSha256");
const {
S3Client,
PutObjectCommand,
@@ -10,35 +14,36 @@ const {
CopyObjectCommand,
DeleteObjectCommand
} = require("@aws-sdk/client-s3");
-const { Upload } = require("@aws-sdk/lib-storage");
-
-const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
-const crypto = require("crypto");
-const { InstanceRegion } = require("../utils/instanceMgr");
const {
GET_DOCUMENTS_BY_JOB,
QUERY_TEMPORARY_DOCS,
GET_DOCUMENTS_BY_IDS,
DELETE_MEDIA_DOCUMENTS
} = require("../graphql-client/queries");
-const archiver = require("archiver");
-const stream = require("node:stream");
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN.
-const imgproxyKey = process.env.IMGPROXY_KEY;
const imgproxySalt = process.env.IMGPROXY_SALT;
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET;
-//Generate a signed upload link for the S3 bucket.
-//All uploads must be going to the same shop and jobid.
-exports.generateSignedUploadUrls = async (req, res) => {
+/**
+ * Generate a Signed URL Link for the s3 bucket.
+ * All Uploads must be going to the same Shop and JobId
+ * @param req
+ * @param res
+ * @returns {Promise<*>}
+ */
+const generateSignedUploadUrls = async (req, res) => {
const { filenames, bodyshopid, jobid } = req.body;
try {
- logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid });
+ logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, {
+ filenames,
+ bodyshopid,
+ jobid
+ });
const signedUrls = [];
for (const filename of filenames) {
- const key = filename;
+ const key = filename;
const client = new S3Client({ region: InstanceRegion() });
const command = new PutObjectCommand({
Bucket: imgproxyDestinationBucket,
@@ -50,24 +55,32 @@ exports.generateSignedUploadUrls = async (req, res) => {
}
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
- res.json({
+
+ return res.json({
success: true,
signedUrls
});
} catch (error) {
- res.status(400).json({
- success: false,
+ logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
message: error.message,
stack: error.stack
});
- logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
+
+ return res.status(400).json({
+ success: false,
message: error.message,
stack: error.stack
});
}
};
-exports.getThumbnailUrls = async (req, res) => {
+/**
+ * Get Thumbnail URLS
+ * @param req
+ * @param res
+ * @returns {Promise<*>}
+ */
+const getThumbnailUrls = async (req, res) => {
const { jobid, billid } = req.body;
try {
@@ -86,10 +99,11 @@ exports.getThumbnailUrls = async (req, res) => {
for (const document of data.documents) {
//Format to follow:
- /////< base 64 URL encoded to image path>
-
+ /////< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
+
let key;
+
if (/\.[^/.]+$/.test(document.key)) {
key = document.key;
} else {
@@ -98,12 +112,12 @@ exports.getThumbnailUrls = async (req, res) => {
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
+
//Thumbnail Generation Block
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`);
//Full Size URL block
-
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
@@ -114,8 +128,8 @@ exports.getThumbnailUrls = async (req, res) => {
Bucket: imgproxyDestinationBucket,
Key: key
});
- const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
- s3Props.presignedGetUrl = presignedGetUrl;
+
+ s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
@@ -133,7 +147,7 @@ exports.getThumbnailUrls = async (req, res) => {
});
}
- res.json(proxiedUrls);
+ return res.json(proxiedUrls);
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
@@ -142,57 +156,72 @@ exports.getThumbnailUrls = async (req, res) => {
message: error.message,
stack: error.stack
});
- res.status(400).json({ message: error.message, stack: error.stack });
+
+ return res.status(400).json({ message: error.message, stack: error.stack });
}
};
-exports.getBillFiles = async (req, res) => {
- //Givena bill ID, get the documents associated to it.
-};
-
-exports.downloadFiles = async (req, res) => {
+/**
+ * Download Files
+ * @param req
+ * @param res
+ * @returns {Promise<*>}
+ */
+const downloadFiles = async (req, res) => {
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
- const { jobid, billid, documentids } = req.body;
+ const { jobId, billid, documentids } = req.body;
+
try {
- logger.log("imgproxy-download", "DEBUG", req.user?.email, jobid, { billid, jobid, documentids });
+ logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
+
//Query for the keys of the document IDs
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
- //Using the Keys, get all of the S3 links, zip them, and send back to the client.
+
+ //Using the Keys, get all the S3 links, zip them, and send back to the client.
const s3client = new S3Client({ region: InstanceRegion() });
const archiveStream = archiver("zip");
+
archiveStream.on("error", (error) => {
console.error("Archival encountered an error:", error);
throw new Error(error);
});
- const passthrough = new stream.PassThrough();
- archiveStream.pipe(passthrough);
+ const passThrough = new stream.PassThrough();
+
+ archiveStream.pipe(passThrough);
+
for (const key of data.documents.map((d) => d.key)) {
- const response = await s3client.send(new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }));
- // :: `response.Body` is a Buffer
- console.log(path.basename(key));
+ const response = await s3client.send(
+ new GetObjectCommand({
+ Bucket: imgproxyDestinationBucket,
+ Key: key
+ })
+ );
+
archiveStream.append(response.Body, { name: path.basename(key) });
}
- archiveStream.finalize();
+ await archiveStream.finalize();
- const archiveKey = `archives/${jobid}/archive-${new Date().toISOString()}.zip`;
+ const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`;
const parallelUploads3 = new Upload({
client: s3client,
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
- params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passthrough }
+ params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough }
});
- parallelUploads3.on("httpUploadProgress", (progress) => {
- console.log(progress);
- });
+ // Disabled progress logging for upload, uncomment if needed
+ // parallelUploads3.on("httpUploadProgress", (progress) => {
+ // console.log(progress);
+ // });
+
+ await parallelUploads3.done();
- const uploadResult = await parallelUploads3.done();
//Generate the presigned URL to download it.
const presignedUrl = await getSignedUrl(
s3client,
@@ -200,20 +229,27 @@ exports.downloadFiles = async (req, res) => {
{ expiresIn: 360 }
);
- res.json({ success: true, url: presignedUrl });
+ return res.json({ success: true, url: presignedUrl });
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
- logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
- jobid,
+ logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, {
+ jobId,
billid,
message: error.message,
stack: error.stack
});
- res.status(400).json({ message: error.message, stack: error.stack });
+
+ return res.status(400).json({ message: error.message, stack: error.stack });
}
};
-exports.deleteFiles = async (req, res) => {
+/**
+ * Delete Files
+ * @param req
+ * @param res
+ * @returns {Promise<*>}
+ */
+const deleteFiles = async (req, res) => {
//Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future.
//Mark as deleted from the documents section of the database.
const { ids } = req.body;
@@ -232,7 +268,7 @@ exports.deleteFiles = async (req, res) => {
(async () => {
try {
// Delete the original object
- const deleteResult = await s3client.send(
+ await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.key
@@ -250,23 +286,30 @@ exports.deleteFiles = async (req, res) => {
const result = await Promise.all(deleteTransactions);
const errors = result.filter((d) => d.error);
- //Delete only the succesful deletes.
+ //Delete only the successful deletes.
const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, {
ids: result.filter((t) => !t.error).map((d) => d.id)
});
- res.json({ errors, deleteMutationResult });
+ return res.json({ errors, deleteMutationResult });
} catch (error) {
logger.log("imgproxy-delete-files-error", "ERROR", req.user.email, null, {
ids,
message: error.message,
stack: error.stack
});
- res.status(400).json({ message: error.message, stack: error.stack });
+
+ return res.status(400).json({ message: error.message, stack: error.stack });
}
};
-exports.moveFiles = async (req, res) => {
+/**
+ * Move Files
+ * @param req
+ * @param res
+ * @returns {Promise<*>}
+ */
+const moveFiles = async (req, res) => {
const { documents, tojobid } = req.body;
try {
logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid });
@@ -278,7 +321,7 @@ exports.moveFiles = async (req, res) => {
(async () => {
try {
// Copy the object to the new key
- const copyresult = await s3client.send(
+ await s3client.send(
new CopyObjectCommand({
Bucket: imgproxyDestinationBucket,
CopySource: `${imgproxyDestinationBucket}/${document.from}`,
@@ -288,7 +331,7 @@ exports.moveFiles = async (req, res) => {
);
// Delete the original object
- const deleteResult = await s3client.send(
+ await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.from
@@ -297,7 +340,12 @@ exports.moveFiles = async (req, res) => {
return document;
} catch (error) {
- return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket };
+ return {
+ id: document.id,
+ from: document.from,
+ error: error,
+ bucket: imgproxyDestinationBucket
+ };
}
})()
);
@@ -307,6 +355,7 @@ exports.moveFiles = async (req, res) => {
const errors = result.filter((d) => d.error);
let mutations = "";
+
result
.filter((d) => !d.error)
.forEach((d, idx) => {
@@ -321,14 +370,16 @@ exports.moveFiles = async (req, res) => {
});
const client = req.userGraphQLClient;
+
if (mutations !== "") {
const mutationResult = await client.request(`mutation {
${mutations}
}`);
- res.json({ errors, mutationResult });
- } else {
- res.json({ errors: "No images were succesfully moved on remote server. " });
+
+ return res.json({ errors, mutationResult });
}
+
+ return res.json({ errors: "No images were successfully moved on remote server. " });
} catch (error) {
logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, {
documents,
@@ -336,13 +387,15 @@ exports.moveFiles = async (req, res) => {
message: error.message,
stack: error.stack
});
- res.status(400).json({ message: error.message, stack: error.stack });
+
+ return res.status(400).json({ message: error.message, stack: error.stack });
}
};
-function base64UrlEncode(str) {
- return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
-}
-function createHmacSha256(data) {
- return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
-}
+module.exports = {
+ generateSignedUploadUrls,
+ getThumbnailUrls,
+ downloadFiles,
+ deleteFiles,
+ moveFiles
+};
diff --git a/server/media/media.js b/server/media/media.js
index 06b1c9bb8..7cb8a1b5d 100644
--- a/server/media/media.js
+++ b/server/media/media.js
@@ -1,42 +1,55 @@
-const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const client = require("../graphql-client/graphql-client").client;
-const queries = require("../graphql-client/queries");
+const determineFileType = require("./util/determineFileType");
+const { DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries");
-require("dotenv").config({
- path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
-});
-
-var cloudinary = require("cloudinary").v2;
+const cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL);
+/**
+ * @description Creates a signed upload URL for Cloudinary.
+ * @param req
+ * @param res
+ */
const createSignedUploadURL = (req, res) => {
logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET));
};
-exports.createSignedUploadURL = createSignedUploadURL;
-
+/**
+ * @description Downloads files from Cloudinary.
+ * @param req
+ * @param res
+ */
const downloadFiles = (req, res) => {
const { ids } = req.body;
+
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const url = cloudinary.utils.download_zip_url({
public_ids: ids,
flatten_folders: true
});
+
res.send(url);
};
-exports.downloadFiles = downloadFiles;
+/**
+ * @description Deletes files from Cloudinary and Apollo.
+ * @param req
+ * @param res
+ * @returns {Promise}
+ */
const deleteFiles = async (req, res) => {
const { ids } = req.body;
- const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
+
+ const types = _.groupBy(ids, (x) => determineFileType(x.type));
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
const returns = [];
+
if (types.image) {
//delete images
@@ -47,8 +60,8 @@ const deleteFiles = async (req, res) => {
)
);
}
+
if (types.video) {
- //delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.video.map((x) => x.key),
@@ -56,8 +69,8 @@ const deleteFiles = async (req, res) => {
)
);
}
+
if (types.raw) {
- //delete images returns.push(
returns.push(
await cloudinary.api.delete_resources(
types.raw.map((x) => `${x.key}.${x.extension}`),
@@ -68,6 +81,7 @@ const deleteFiles = async (req, res) => {
// Delete it on apollo.
const successfulDeletes = [];
+
returns.forEach((resType) => {
Object.keys(resType.deleted).forEach((key) => {
if (resType.deleted[key] === "deleted" || resType.deleted[key] === "not_found") {
@@ -77,7 +91,7 @@ const deleteFiles = async (req, res) => {
});
try {
- const result = await client.request(queries.DELETE_MEDIA_DOCUMENTS, {
+ const result = await client.request(DELETE_MEDIA_DOCUMENTS, {
ids: ids.filter((i) => successfulDeletes.includes(i.key)).map((i) => i.id)
});
@@ -91,24 +105,29 @@ const deleteFiles = async (req, res) => {
}
};
-exports.deleteFiles = deleteFiles;
-
+/**
+ * @description Renames keys in Cloudinary and updates the database.
+ * @param req
+ * @param res
+ * @returns {Promise}
+ */
const renameKeys = async (req, res) => {
const { documents, tojobid } = req.body;
+
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
const proms = [];
+
documents.forEach((d) => {
proms.push(
(async () => {
try {
- const res = {
+ return {
id: d.id,
...(await cloudinary.uploader.rename(d.from, d.to, {
- resource_type: DetermineFileType(d.type)
+ resource_type: determineFileType(d.type)
}))
};
- return res;
} catch (error) {
return { id: d.id, from: d.from, error: error };
}
@@ -148,18 +167,13 @@ const renameKeys = async (req, res) => {
}`);
res.json({ errors, mutationResult });
} else {
- res.json({ errors: "No images were succesfully moved on remote server. " });
+ res.json({ errors: "No images were successfully moved on remote server. " });
}
};
-exports.renameKeys = renameKeys;
-//Also needs to be updated in upload utility and mobile app.
-function DetermineFileType(filetype) {
- if (!filetype) return "auto";
- else if (filetype.startsWith("image")) return "image";
- else if (filetype.startsWith("video")) return "video";
- else if (filetype.startsWith("application/pdf")) return "image";
- else if (filetype.startsWith("application")) return "raw";
-
- return "auto";
-}
+module.exports = {
+ createSignedUploadURL,
+ downloadFiles,
+ deleteFiles,
+ renameKeys
+};
diff --git a/server/media/tests/media-utils.test.js b/server/media/tests/media-utils.test.js
new file mode 100644
index 000000000..b25678da1
--- /dev/null
+++ b/server/media/tests/media-utils.test.js
@@ -0,0 +1,98 @@
+import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
+import determineFileType from "../util/determineFileType";
+import base64UrlEncode from "../util/base64UrlEncode";
+
+describe("Media Utils", () => {
+ describe("base64UrlEncode", () => {
+ it("should encode string to base64url format", () => {
+ expect(base64UrlEncode("hello world")).toBe("aGVsbG8gd29ybGQ");
+ });
+
+ it('should replace "+" with "-"', () => {
+ // '+' in base64 appears when encoding specific binary data
+ expect(base64UrlEncode("hello+world")).toBe("aGVsbG8rd29ybGQ");
+ });
+
+ it('should replace "/" with "_"', () => {
+ expect(base64UrlEncode("path/to/resource")).toBe("cGF0aC90by9yZXNvdXJjZQ");
+ });
+
+ it('should remove trailing "=" characters', () => {
+ // Using a string that will produce padding in base64
+ expect(base64UrlEncode("padding==")).toBe("cGFkZGluZz09");
+ });
+ });
+
+ describe("createHmacSha256", () => {
+ let createHmacSha256;
+ const originalEnv = process.env;
+
+ beforeEach(async () => {
+ vi.resetModules();
+ process.env = { ...originalEnv };
+ process.env.IMGPROXY_KEY = "test-key";
+
+ // Dynamically import the module after setting env var
+ const module = await import("../util/createHmacSha256");
+ createHmacSha256 = module.default;
+ });
+
+ afterEach(() => {
+ process.env = originalEnv;
+ });
+
+ it("should create a valid HMAC SHA-256 hash", () => {
+ const result = createHmacSha256("test-data");
+ expect(typeof result).toBe("string");
+ expect(result.length).toBeGreaterThan(0);
+ });
+
+ it("should produce consistent hashes for the same input", () => {
+ const hash1 = createHmacSha256("test-data");
+ const hash2 = createHmacSha256("test-data");
+ expect(hash1).toBe(hash2);
+ });
+
+ it("should produce different hashes for different inputs", () => {
+ const hash1 = createHmacSha256("test-data-1");
+ const hash2 = createHmacSha256("test-data-2");
+ expect(hash1).not.toBe(hash2);
+ });
+ });
+
+ describe("determineFileType", () => {
+ it('should return "auto" when no filetype is provided', () => {
+ expect(determineFileType()).toBe("auto");
+ expect(determineFileType(null)).toBe("auto");
+ expect(determineFileType(undefined)).toBe("auto");
+ });
+
+ it('should return "image" for image filetypes', () => {
+ expect(determineFileType("image/jpeg")).toBe("image");
+ expect(determineFileType("image/png")).toBe("image");
+ expect(determineFileType("image/gif")).toBe("image");
+ });
+
+ it('should return "video" for video filetypes', () => {
+ expect(determineFileType("video/mp4")).toBe("video");
+ expect(determineFileType("video/quicktime")).toBe("video");
+ expect(determineFileType("video/x-msvideo")).toBe("video");
+ });
+
+ it('should return "image" for PDF files', () => {
+ expect(determineFileType("application/pdf")).toBe("image");
+ });
+
+ it('should return "raw" for other application types', () => {
+ expect(determineFileType("application/zip")).toBe("raw");
+ expect(determineFileType("application/json")).toBe("raw");
+ expect(determineFileType("application/msword")).toBe("raw");
+ });
+
+ it('should return "auto" for unrecognized types', () => {
+ expect(determineFileType("audio/mpeg")).toBe("auto");
+ expect(determineFileType("text/html")).toBe("auto");
+ expect(determineFileType("unknown-type")).toBe("auto");
+ });
+ });
+});
diff --git a/server/media/util/base64UrlEncode.js b/server/media/util/base64UrlEncode.js
new file mode 100644
index 000000000..24537cb2c
--- /dev/null
+++ b/server/media/util/base64UrlEncode.js
@@ -0,0 +1,9 @@
+/**
+ * @description Converts a string to a base64url encoded string.
+ * @param str
+ * @returns {string}
+ */
+const base64UrlEncode = (str) =>
+ Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
+
+module.exports = base64UrlEncode;
diff --git a/server/media/util/createHmacSha256.js b/server/media/util/createHmacSha256.js
new file mode 100644
index 000000000..6be9d6022
--- /dev/null
+++ b/server/media/util/createHmacSha256.js
@@ -0,0 +1,12 @@
+const crypto = require("crypto");
+
+const imgproxyKey = process.env.IMGPROXY_KEY;
+
+/**
+ * @description Creates a HMAC SHA-256 hash of the given data.
+ * @param data
+ * @returns {string}
+ */
+const createHmacSha256 = (data) => crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
+
+module.exports = createHmacSha256;
diff --git a/server/media/util/determineFileType.js b/server/media/util/determineFileType.js
new file mode 100644
index 000000000..9bd8a4732
--- /dev/null
+++ b/server/media/util/determineFileType.js
@@ -0,0 +1,17 @@
+/**
+ * @description Determines the file type based on the filetype string.
+ * @note Also needs to be updated in the mobile app utility.
+ * @param filetype
+ * @returns {string}
+ */
+const determineFileType = (filetype) => {
+ if (!filetype) return "auto";
+ else if (filetype.startsWith("image")) return "image";
+ else if (filetype.startsWith("video")) return "video";
+ else if (filetype.startsWith("application/pdf")) return "image";
+ else if (filetype.startsWith("application")) return "raw";
+
+ return "auto";
+};
+
+module.exports = determineFileType;
diff --git a/server/middleware/vsstaIntegrationMiddleware.js b/server/middleware/vsstaIntegrationMiddleware.js
new file mode 100644
index 000000000..7739c4a7a
--- /dev/null
+++ b/server/middleware/vsstaIntegrationMiddleware.js
@@ -0,0 +1,17 @@
+/**
+ * VSSTA Integration Middleware
+ * @param req
+ * @param res
+ * @param next
+ * @returns {*}
+ */
+const vsstaIntegrationMiddleware = (req, res, next) => {
+ if (req.headers["vssta-integration-secret"] !== process.env.VSSTA_INTEGRATION_SECRET) {
+ return res.status(401).send("Unauthorized");
+ }
+
+ req.isIntegrationAuthorized = true;
+ next();
+};
+
+module.exports = vsstaIntegrationMiddleware;
diff --git a/server/notifications/scenarioBuilders.js b/server/notifications/scenarioBuilders.js
index b3f4d0fd2..d1bdb22a0 100644
--- a/server/notifications/scenarioBuilders.js
+++ b/server/notifications/scenarioBuilders.js
@@ -182,7 +182,7 @@ const newMediaAddedReassignedBuilder = (data) => {
: data.changedFields?.jobid && data.changedFields.jobid.old !== data.changedFields.jobid.new
? "moved to this job"
: "updated";
- const body = `An ${mediaType} has been ${action}.`;
+ const body = `A ${mediaType} has been ${action}.`;
return buildNotification(data, "notifications.job.newMediaAdded", body, {
mediaType,
diff --git a/server/routes/intergrationRoutes.js b/server/routes/intergrationRoutes.js
new file mode 100644
index 000000000..841805675
--- /dev/null
+++ b/server/routes/intergrationRoutes.js
@@ -0,0 +1,8 @@
+const express = require("express");
+const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegrationRoute");
+const vsstaMiddleware = require("../middleware/vsstaIntegrationMiddleware");
+const router = express.Router();
+
+router.post("/vssta", vsstaMiddleware, vsstaIntegration);
+
+module.exports = router;
diff --git a/server/routes/jobRoutes.js b/server/routes/jobRoutes.js
index aab3e8823..e7c747907 100644
--- a/server/routes/jobRoutes.js
+++ b/server/routes/jobRoutes.js
@@ -1,6 +1,5 @@
const express = require("express");
const router = express.Router();
-const job = require("../job/job");
const ppc = require("../ccc/partspricechange");
const { partsScan } = require("../parts-scan/parts-scan");
const eventAuthorizationMiddleware = require("../middleware/eventAuthorizationMIddleware");