feature/IO-2282-VSSTA-Integration:

- Boilerplate in new route
- Fix issues with imgproxy
- Clean up imgproxy
This commit is contained in:
Dave Richer
2025-04-09 14:56:49 -04:00
parent 282fa787a9
commit f55764e859
14 changed files with 152 additions and 81 deletions

View File

@@ -1,8 +1,12 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../utils/logger");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { InstanceRegion } = require("../utils/instanceMgr");
const archiver = require("archiver");
const stream = require("node:stream");
const base64UrlEncode = require("./util/base64UrlEncode");
const createHmacSha256 = require("./util/createHmacSha256");
const {
S3Client,
PutObjectCommand,
@@ -10,35 +14,31 @@ const {
CopyObjectCommand,
DeleteObjectCommand
} = require("@aws-sdk/client-s3");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const crypto = require("crypto");
const { InstanceRegion } = require("../utils/instanceMgr");
const {
GET_DOCUMENTS_BY_JOB,
QUERY_TEMPORARY_DOCS,
GET_DOCUMENTS_BY_IDS,
DELETE_MEDIA_DOCUMENTS
} = require("../graphql-client/queries");
const archiver = require("archiver");
const stream = require("node:stream");
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN.
const imgproxyKey = process.env.IMGPROXY_KEY;
const imgproxySalt = process.env.IMGPROXY_SALT;
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET;
//Generate a signed upload link for the S3 bucket.
//All uploads must be going to the same shop and jobid.
exports.generateSignedUploadUrls = async (req, res) => {
const generateSignedUploadUrls = async (req, res) => {
const { filenames, bodyshopid, jobid } = req.body;
try {
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid });
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, {
filenames,
bodyshopid,
jobid
});
const signedUrls = [];
for (const filename of filenames) {
const key = filename;
const key = filename;
const client = new S3Client({ region: InstanceRegion() });
const command = new PutObjectCommand({
Bucket: imgproxyDestinationBucket,
@@ -67,7 +67,7 @@ exports.generateSignedUploadUrls = async (req, res) => {
}
};
exports.getThumbnailUrls = async (req, res) => {
const getThumbnailUrls = async (req, res) => {
const { jobid, billid } = req.body;
try {
@@ -86,10 +86,11 @@ exports.getThumbnailUrls = async (req, res) => {
for (const document of data.documents) {
//Format to follow:
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with unencoded/unhashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key;
if (/\.[^/.]+$/.test(document.key)) {
key = document.key;
} else {
@@ -98,12 +99,12 @@ exports.getThumbnailUrls = async (req, res) => {
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
//Thumbnail Generation Block
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`);
//Full Size URL block
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
@@ -114,8 +115,8 @@ exports.getThumbnailUrls = async (req, res) => {
Bucket: imgproxyDestinationBucket,
Key: key
});
const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
s3Props.presignedGetUrl = presignedGetUrl;
s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
@@ -146,40 +147,46 @@ exports.getThumbnailUrls = async (req, res) => {
}
};
exports.getBillFiles = async (req, res) => {
//Givena bill ID, get the documents associated to it.
};
exports.downloadFiles = async (req, res) => {
const downloadFiles = async (req, res) => {
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
const { jobid, billid, documentids } = req.body;
const { jobId, billid, documentids } = req.body;
try {
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobid, { billid, jobid, documentids });
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//Query for the keys of the document IDs
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
//Using the Keys, get all of the S3 links, zip them, and send back to the client.
//Using the Keys, get all the S3 links, zip them, and send back to the client.
const s3client = new S3Client({ region: InstanceRegion() });
const archiveStream = archiver("zip");
archiveStream.on("error", (error) => {
console.error("Archival encountered an error:", error);
throw new Error(error);
});
const passthrough = new stream.PassThrough();
archiveStream.pipe(passthrough);
for (const key of data.documents.map((d) => d.key)) {
const response = await s3client.send(new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }));
// :: `response.Body` is a Buffer
console.log(path.basename(key));
const response = await s3client.send(
new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
})
);
archiveStream.append(response.Body, { name: path.basename(key) });
}
archiveStream.finalize();
await archiveStream.finalize();
const archiveKey = `archives/${jobid}/archive-${new Date().toISOString()}.zip`;
const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`;
const parallelUploads3 = new Upload({
client: s3client,
@@ -192,7 +199,7 @@ exports.downloadFiles = async (req, res) => {
console.log(progress);
});
const uploadResult = await parallelUploads3.done();
await parallelUploads3.done();
//Generate the presigned URL to download it.
const presignedUrl = await getSignedUrl(
s3client,
@@ -203,8 +210,8 @@ exports.downloadFiles = async (req, res) => {
res.json({ success: true, url: presignedUrl });
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
jobid,
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, {
jobId,
billid,
message: error.message,
stack: error.stack
@@ -213,7 +220,7 @@ exports.downloadFiles = async (req, res) => {
}
};
exports.deleteFiles = async (req, res) => {
const deleteFiles = async (req, res) => {
//Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future.
//Mark as deleted from the documents section of the database.
const { ids } = req.body;
@@ -232,7 +239,7 @@ exports.deleteFiles = async (req, res) => {
(async () => {
try {
// Delete the original object
const deleteResult = await s3client.send(
await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.key
@@ -250,7 +257,7 @@ exports.deleteFiles = async (req, res) => {
const result = await Promise.all(deleteTransactions);
const errors = result.filter((d) => d.error);
//Delete only the succesful deletes.
//Delete only the successful deletes.
const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, {
ids: result.filter((t) => !t.error).map((d) => d.id)
});
@@ -266,7 +273,7 @@ exports.deleteFiles = async (req, res) => {
}
};
exports.moveFiles = async (req, res) => {
const moveFiles = async (req, res) => {
const { documents, tojobid } = req.body;
try {
logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid });
@@ -278,7 +285,7 @@ exports.moveFiles = async (req, res) => {
(async () => {
try {
// Copy the object to the new key
const copyresult = await s3client.send(
await s3client.send(
new CopyObjectCommand({
Bucket: imgproxyDestinationBucket,
CopySource: `${imgproxyDestinationBucket}/${document.from}`,
@@ -288,7 +295,7 @@ exports.moveFiles = async (req, res) => {
);
// Delete the original object
const deleteResult = await s3client.send(
await s3client.send(
new DeleteObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: document.from
@@ -297,7 +304,12 @@ exports.moveFiles = async (req, res) => {
return document;
} catch (error) {
return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket };
return {
id: document.id,
from: document.from,
error: error,
bucket: imgproxyDestinationBucket
};
}
})()
);
@@ -307,6 +319,7 @@ exports.moveFiles = async (req, res) => {
const errors = result.filter((d) => d.error);
let mutations = "";
result
.filter((d) => !d.error)
.forEach((d, idx) => {
@@ -327,7 +340,7 @@ exports.moveFiles = async (req, res) => {
}`);
res.json({ errors, mutationResult });
} else {
res.json({ errors: "No images were succesfully moved on remote server. " });
res.json({ errors: "No images were successfully moved on remote server. " });
}
} catch (error) {
logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, {
@@ -340,9 +353,10 @@ exports.moveFiles = async (req, res) => {
}
};
function base64UrlEncode(str) {
return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
}
function createHmacSha256(data) {
return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
}
module.exports = {
generateSignedUploadUrls,
getThumbnailUrls,
downloadFiles,
deleteFiles,
moveFiles
};

View File

@@ -1,14 +1,9 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
var cloudinary = require("cloudinary").v2;
const cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL);
const createSignedUploadURL = (req, res) => {
@@ -16,8 +11,6 @@ const createSignedUploadURL = (req, res) => {
res.send(cloudinary.utils.api_sign_request(req.body, process.env.CLOUDINARY_API_SECRET));
};
exports.createSignedUploadURL = createSignedUploadURL;
const downloadFiles = (req, res) => {
const { ids } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
@@ -28,7 +21,6 @@ const downloadFiles = (req, res) => {
});
res.send(url);
};
exports.downloadFiles = downloadFiles;
const deleteFiles = async (req, res) => {
const { ids } = req.body;
@@ -91,8 +83,6 @@ const deleteFiles = async (req, res) => {
}
};
exports.deleteFiles = deleteFiles;
const renameKeys = async (req, res) => {
const { documents, tojobid } = req.body;
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
@@ -102,13 +92,12 @@ const renameKeys = async (req, res) => {
proms.push(
(async () => {
try {
const res = {
return {
id: d.id,
...(await cloudinary.uploader.rename(d.from, d.to, {
resource_type: DetermineFileType(d.type)
}))
};
return res;
} catch (error) {
return { id: d.id, from: d.from, error: error };
}
@@ -148,10 +137,9 @@ const renameKeys = async (req, res) => {
}`);
res.json({ errors, mutationResult });
} else {
res.json({ errors: "No images were succesfully moved on remote server. " });
res.json({ errors: "No images were successfully moved on remote server. " });
}
};
exports.renameKeys = renameKeys;
//Also needs to be updated in upload utility and mobile app.
function DetermineFileType(filetype) {
@@ -163,3 +151,10 @@ function DetermineFileType(filetype) {
return "auto";
}
module.exports = {
createSignedUploadURL,
downloadFiles,
deleteFiles,
renameKeys
};

View File

@@ -0,0 +1,4 @@
const base64UrlEncode = (str) =>
Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
module.exports = base64UrlEncode;

View File

@@ -0,0 +1,7 @@
const crypto = require("crypto");
const imgproxyKey = process.env.IMGPROXY_KEY;
const createHmacSha256 = (data) => crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
module.exports = createHmacSha256;