feature/IO-2282-VSSTA-Integration: - checkpoint
This commit is contained in:
@@ -2853,3 +2853,31 @@ query GET_BODYSHOP_BY_MERCHANTID($merchantID: String!) {
|
||||
email
|
||||
}
|
||||
}`;
|
||||
|
||||
// Define the GraphQL query to get a job by RO number and shop ID
|
||||
exports.GET_JOB_BY_RO_NUMBER_AND_SHOP_ID = `
|
||||
query GET_JOB_BY_RO_NUMBER_AND_SHOP_ID($roNumber: String!, $shopId: String!) {
|
||||
jobs(where: {ro_number: {_eq: $roNumber}, shopid: {_eq: $shopId}}) {
|
||||
id
|
||||
shopid
|
||||
bodyshopid
|
||||
bodyshop {
|
||||
id
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// Define the mutation to insert a new document
|
||||
exports.INSERT_NEW_DOCUMENT = `
|
||||
mutation INSERT_NEW_DOCUMENT($docInput: [documents_insert_input!]!) {
|
||||
insert_documents(objects: $docInput) {
|
||||
returning {
|
||||
id
|
||||
name
|
||||
key
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
const client = require("../../graphql-client/graphql-client").client;
|
||||
|
||||
/**
|
||||
* VSSTA Integration
|
||||
* @param req
|
||||
* @param res
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const vsstaIntegration = async (req, res) => {
|
||||
const { logger } = req;
|
||||
|
||||
// Examplwe req.body
|
||||
//{
|
||||
// "shop_id":"test",
|
||||
// "“ro_nbr“":"71475",
|
||||
// "vin_nbr":"12345678912345678",
|
||||
// "pdf_download_link":"https://portal-staging.vssta.com/invoice_data/1500564",
|
||||
// "“company_api_key“":"xxxxx",
|
||||
// "scan_type":"PRE",
|
||||
// "scan_fee":"119.00",
|
||||
// "scanner_number":"1234",
|
||||
// "scan_time":"2022-08-23 17:53:50",
|
||||
// "technician":"Frank Jones",
|
||||
// "year":"2021",
|
||||
// "make":"TOYOTA",
|
||||
// "model":"Tacoma SR5 grade"
|
||||
//
|
||||
// }
|
||||
// 1 - We would want to get the Job by searching the ro_nbr and shop_id (The assumption)
|
||||
|
||||
// 2 - We want to download the file provided from the pdf_download_link and associate (upload) it
|
||||
// to S3 bucket for media, and insert a document record in the database, the file is base64 encoded (pdf), we will want to unencode it when storing it as a pdf
|
||||
// We might not have to un-encode it, ultimately we want to send the base64 and the end is a pdf file the user can view from the documents section.
|
||||
};
|
||||
|
||||
module.exports = vsstaIntegration;
|
||||
123
server/integrations/VSSTA/vsstaIntegrationRoute.js
Normal file
123
server/integrations/VSSTA/vsstaIntegrationRoute.js
Normal file
@@ -0,0 +1,123 @@
|
||||
const axios = require("axios");
|
||||
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
|
||||
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
|
||||
const { GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, INSERT_NEW_DOCUMENT } = require("../../graphql-client/queries");
|
||||
const determineFileType = require("../../media/util/determineFileType");
|
||||
const { InstanceRegion } = require("../../utils/instanceMgr");
|
||||
const client = require("../../graphql-client/graphql-client").client;
|
||||
|
||||
// Assume these are configured environment variables or constants
|
||||
const S3_BUCKET = process.env.S3_BUCKET || "your-s3-bucket-name";
|
||||
|
||||
const vsstaIntegrationRoute = async (req, res) => {
|
||||
const { logger } = req;
|
||||
try {
|
||||
const requiredParams = [
|
||||
"shop_id",
|
||||
"ro_nbr",
|
||||
"pdf_download_link",
|
||||
"company_api_key",
|
||||
"scan_type",
|
||||
"scan_time",
|
||||
"technician",
|
||||
"year",
|
||||
"make",
|
||||
"model"
|
||||
];
|
||||
|
||||
const missingParams = requiredParams.filter((param) => !req.body[param]);
|
||||
|
||||
if (missingParams.length > 0) {
|
||||
logger.error(`Missing required parameters: ${missingParams.join(", ")}`);
|
||||
return res.status(400).json({
|
||||
error: "Missing required parameters",
|
||||
missingParams
|
||||
});
|
||||
}
|
||||
|
||||
const { shop_id, ro_nbr, pdf_download_link, scan_type, scan_time, technician, year, make, model, company_api_key } =
|
||||
req.body;
|
||||
|
||||
// 1. Get the job record by ro_number and shop_id
|
||||
const jobResult = await client.request(GET_JOB_BY_RO_NUMBER_AND_SHOP_ID, {
|
||||
roNumber: ro_nbr,
|
||||
shopId: shop_id
|
||||
});
|
||||
|
||||
if (!jobResult.jobs || jobResult.jobs.length === 0) {
|
||||
logger.error(`No job found for RO number ${ro_nbr} and shop ID ${shop_id}`);
|
||||
return res.status(404).json({ error: "Job not found" });
|
||||
}
|
||||
|
||||
const job = jobResult.jobs[0];
|
||||
logger.info(`Found job with ID ${job.id} for RO number ${ro_nbr}`);
|
||||
|
||||
// 2. Download the PDF from the provided link
|
||||
logger.info(`Downloading PDF from ${pdf_download_link}`);
|
||||
const pdfResponse = await axios.get(pdf_download_link, {
|
||||
responseType: "arraybuffer",
|
||||
headers: {
|
||||
"auth:token": company_api_key
|
||||
}
|
||||
});
|
||||
|
||||
// 3. Generate key for S3
|
||||
const timestamp = Date.now();
|
||||
const fileName = `VSSTA_${scan_type}_Scan_${timestamp}.pdf`;
|
||||
const s3Key = `${job.bodyshopid}/${job.id}/${fileName.replace(/[^A-Z0-9]+/gi, "_")}-${timestamp}.pdf`;
|
||||
|
||||
// 4. Generate presigned URL for S3 upload
|
||||
logger.info(`Generating presigned URL for S3 key ${s3Key}`);
|
||||
|
||||
const s3Client = new S3Client({ region: InstanceRegion() });
|
||||
|
||||
const putCommand = new PutObjectCommand({
|
||||
Bucket: S3_BUCKET,
|
||||
Key: s3Key,
|
||||
ContentType: "application/pdf",
|
||||
StorageClass: "INTELLIGENT_TIERING"
|
||||
});
|
||||
|
||||
const presignedUrl = await getSignedUrl(s3Client, putCommand, { expiresIn: 360 });
|
||||
|
||||
// 5. Upload file to S3
|
||||
logger.info(`Uploading PDF to S3 with key ${s3Key}`);
|
||||
await axios.put(presignedUrl, pdfResponse.data, {
|
||||
headers: { "Content-Type": "application/pdf" }
|
||||
});
|
||||
|
||||
// 6. Create document record in database
|
||||
const documentMeta = {
|
||||
jobid: job.id, // Matches jobid (uuid, nullable)
|
||||
uploaded_by: "VSSTA Integration", // Matches uploaded_by (text)
|
||||
name: fileName, // Matches name (text, nullable)
|
||||
key: s3Key, // Matches key (text, default: '0'::text)
|
||||
type: determineFileType("application/pdf"), // Matches type (text, nullable), using determineFileType
|
||||
extension: "pdf", // Matches extension (text, nullable)
|
||||
bodyshopid: job.bodyshopid, // Matches bodyshopid (uuid, nullable)
|
||||
size: pdfResponse.data.length, // Matches size (integer, default: 0)
|
||||
takenat: scan_time, // Matches takenat (timestamp with time zone, nullable)
|
||||
description: `VSSTA ${scan_type} scan for ${year} ${make} ${model}, performed by ${technician} at ${scan_time}` // Not in schema, will be ignored by the database
|
||||
};
|
||||
|
||||
const documentInsert = await client.request(INSERT_NEW_DOCUMENT, {
|
||||
docInput: [documentMeta]
|
||||
});
|
||||
|
||||
if (documentInsert.insert_documents?.returning?.length > 0) {
|
||||
logger.info(`Document created with ID ${documentInsert.insert_documents.returning[0].id}`);
|
||||
return res.status(200).json({
|
||||
message: "VSSTA integration successful",
|
||||
documentId: documentInsert.insert_documents.returning[0].id
|
||||
});
|
||||
} else {
|
||||
logger.error("Failed to create document record");
|
||||
return res.status(500).json({ error: "Failed to create document record" });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`VSSTA integration error: ${error.message}`, error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = vsstaIntegrationRoute;
|
||||
@@ -1,5 +0,0 @@
|
||||
const vsstaMiddleware = (req, res, next) => {
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = vsstaMiddleware;
|
||||
@@ -215,9 +215,10 @@ const downloadFiles = async (req, res) => {
|
||||
params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough }
|
||||
});
|
||||
|
||||
parallelUploads3.on("httpUploadProgress", (progress) => {
|
||||
console.log(progress);
|
||||
});
|
||||
// Disabled progress logging for upload, uncomment if needed
|
||||
// parallelUploads3.on("httpUploadProgress", (progress) => {
|
||||
// console.log(progress);
|
||||
// });
|
||||
|
||||
await parallelUploads3.done();
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const _ = require("lodash");
|
||||
const logger = require("../utils/logger");
|
||||
const client = require("../graphql-client/graphql-client").client;
|
||||
const queries = require("../graphql-client/queries");
|
||||
const determineFileType = require("./util/determineFileType");
|
||||
const { DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries");
|
||||
|
||||
const cloudinary = require("cloudinary").v2;
|
||||
cloudinary.config(process.env.CLOUDINARY_URL);
|
||||
@@ -13,22 +14,26 @@ const createSignedUploadURL = (req, res) => {
|
||||
|
||||
const downloadFiles = (req, res) => {
|
||||
const { ids } = req.body;
|
||||
|
||||
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
|
||||
|
||||
const url = cloudinary.utils.download_zip_url({
|
||||
public_ids: ids,
|
||||
flatten_folders: true
|
||||
});
|
||||
|
||||
res.send(url);
|
||||
};
|
||||
|
||||
const deleteFiles = async (req, res) => {
|
||||
const { ids } = req.body;
|
||||
const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
|
||||
|
||||
const types = _.groupBy(ids, (x) => determineFileType(x.type));
|
||||
|
||||
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
|
||||
|
||||
const returns = [];
|
||||
|
||||
if (types.image) {
|
||||
//delete images
|
||||
|
||||
@@ -39,8 +44,8 @@ const deleteFiles = async (req, res) => {
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (types.video) {
|
||||
//delete images returns.push(
|
||||
returns.push(
|
||||
await cloudinary.api.delete_resources(
|
||||
types.video.map((x) => x.key),
|
||||
@@ -48,8 +53,8 @@ const deleteFiles = async (req, res) => {
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (types.raw) {
|
||||
//delete images returns.push(
|
||||
returns.push(
|
||||
await cloudinary.api.delete_resources(
|
||||
types.raw.map((x) => `${x.key}.${x.extension}`),
|
||||
@@ -60,6 +65,7 @@ const deleteFiles = async (req, res) => {
|
||||
|
||||
// Delete it on apollo.
|
||||
const successfulDeletes = [];
|
||||
|
||||
returns.forEach((resType) => {
|
||||
Object.keys(resType.deleted).forEach((key) => {
|
||||
if (resType.deleted[key] === "deleted" || resType.deleted[key] === "not_found") {
|
||||
@@ -69,7 +75,7 @@ const deleteFiles = async (req, res) => {
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await client.request(queries.DELETE_MEDIA_DOCUMENTS, {
|
||||
const result = await client.request(DELETE_MEDIA_DOCUMENTS, {
|
||||
ids: ids.filter((i) => successfulDeletes.includes(i.key)).map((i) => i.id)
|
||||
});
|
||||
|
||||
@@ -85,9 +91,11 @@ const deleteFiles = async (req, res) => {
|
||||
|
||||
const renameKeys = async (req, res) => {
|
||||
const { documents, tojobid } = req.body;
|
||||
|
||||
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
|
||||
|
||||
const proms = [];
|
||||
|
||||
documents.forEach((d) => {
|
||||
proms.push(
|
||||
(async () => {
|
||||
@@ -95,7 +103,7 @@ const renameKeys = async (req, res) => {
|
||||
return {
|
||||
id: d.id,
|
||||
...(await cloudinary.uploader.rename(d.from, d.to, {
|
||||
resource_type: DetermineFileType(d.type)
|
||||
resource_type: determineFileType(d.type)
|
||||
}))
|
||||
};
|
||||
} catch (error) {
|
||||
@@ -141,17 +149,6 @@ const renameKeys = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
//Also needs to be updated in upload utility and mobile app.
|
||||
function DetermineFileType(filetype) {
|
||||
if (!filetype) return "auto";
|
||||
else if (filetype.startsWith("image")) return "image";
|
||||
else if (filetype.startsWith("video")) return "video";
|
||||
else if (filetype.startsWith("application/pdf")) return "image";
|
||||
else if (filetype.startsWith("application")) return "raw";
|
||||
|
||||
return "auto";
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createSignedUploadURL,
|
||||
downloadFiles,
|
||||
|
||||
17
server/media/util/determineFileType.js
Normal file
17
server/media/util/determineFileType.js
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* @description Determines the file type based on the filetype string.
|
||||
* @note Also needs to be updated in the mobile app utility.
|
||||
* @param filetype
|
||||
* @returns {string}
|
||||
*/
|
||||
const determineFileType = (filetype) => {
|
||||
if (!filetype) return "auto";
|
||||
else if (filetype.startsWith("image")) return "image";
|
||||
else if (filetype.startsWith("video")) return "video";
|
||||
else if (filetype.startsWith("application/pdf")) return "image";
|
||||
else if (filetype.startsWith("application")) return "raw";
|
||||
|
||||
return "auto";
|
||||
};
|
||||
|
||||
module.exports = determineFileType;
|
||||
17
server/middleware/vsstaIntegrationMiddleware.js
Normal file
17
server/middleware/vsstaIntegrationMiddleware.js
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* VSSTA Integration Middleware
|
||||
* @param req
|
||||
* @param res
|
||||
* @param next
|
||||
* @returns {*}
|
||||
*/
|
||||
const vsstaIntegrationMiddleware = (req, res, next) => {
|
||||
if (req.headers["vssta-integration-secret"] !== process.env.VSSTA_INTEGRATION_SECRET) {
|
||||
return res.status(401).send("Unauthorized");
|
||||
}
|
||||
|
||||
req.isIntegrationAuthorized = true;
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = vsstaIntegrationMiddleware;
|
||||
@@ -1,6 +1,6 @@
|
||||
const express = require("express");
|
||||
const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegration");
|
||||
const vsstaMiddleware = require("../integrations/VSSTA/vsstaMiddleware");
|
||||
const vsstaIntegration = require("../integrations/VSSTA/vsstaIntegrationRoute");
|
||||
const vsstaMiddleware = require("../middleware/vsstaIntegrationMiddleware");
|
||||
const router = express.Router();
|
||||
|
||||
router.post("/vssta", vsstaMiddleware, vsstaIntegration);
|
||||
|
||||
Reference in New Issue
Block a user