const path = require("path"); const logger = require("../utils/logger"); const { Upload } = require("@aws-sdk/lib-storage"); const { getSignedUrl } = require("@aws-sdk/s3-request-presigner"); const { InstanceRegion } = require("../utils/instanceMgr"); const archiver = require("archiver"); const stream = require("node:stream"); const base64UrlEncode = require("./util/base64UrlEncode"); const createHmacSha256 = require("./util/createHmacSha256"); const { S3Client, PutObjectCommand, GetObjectCommand, CopyObjectCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3"); const { GET_DOCUMENTS_BY_JOB, QUERY_TEMPORARY_DOCS, GET_DOCUMENTS_BY_IDS, DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries"); const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN. const imgproxySalt = process.env.IMGPROXY_SALT; const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET; /** * Generate a Signed URL Link for the s3 bucket. * All Uploads must be going to the same Shop and JobId * @param req * @param res * @returns {Promise<*>} */ const generateSignedUploadUrls = async (req, res) => { const { filenames, bodyshopid, jobid } = req.body; try { logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid }); const signedUrls = []; for (const filename of filenames) { const key = filename; const client = new S3Client({ region: InstanceRegion() }); const command = new PutObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key, StorageClass: "INTELLIGENT_TIERING" }); const presignedUrl = await getSignedUrl(client, command, { expiresIn: 360 }); signedUrls.push({ filename, presignedUrl, key }); } logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls }); return res.json({ success: true, signedUrls }); } catch (error) { logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, { message: error.message, stack: error.stack }); return res.status(400).json({ success: false, message: error.message, stack: error.stack }); } }; /** * Get Thumbnail URLS * @param req * @param res * @returns {Promise<*>} */ const getThumbnailUrls = async (req, res) => { const { jobid, billid } = req.body; try { logger.log("imgproxy-thumbnails", "DEBUG", req.user?.email, jobid, { billid, jobid }); //Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components. const client = req.userGraphQLClient; //If there's no jobid and no billid, we're in temporary documents. const data = await (jobid ? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid }) : client.request(QUERY_TEMPORARY_DOCS)); const thumbResizeParams = `rs:fill:250:250:1/g:ce`; const s3client = new S3Client({ region: InstanceRegion() }); const proxiedUrls = []; for (const document of data.documents) { //Format to follow: /////< base 64 URL encoded to image path> //When working with documents from Cloudinary, the URL does not include the extension. let key; if (/\.[^/.]+$/.test(document.key)) { key = document.key; } else { key = `${document.key}.${document.extension.toLowerCase()}`; } // Build the S3 path to the object. const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`; const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path); //Thumbnail Generation Block const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`; const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`); //Full Size URL block const fullSizeProxyPath = `${base64UrlEncodedKeyString}`; const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`); const s3Props = {}; if (!document.type.startsWith("image")) { //If not a picture, we need to get a signed download link to the file using S3 (or cloudfront preferably) const command = new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }); s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 }); const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`; const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`); s3Props.originalUrlViaProxyPath = `${imgproxyBaseUrl}/${originalHmacSalt}/${originalProxyPath}`; } proxiedUrls.push({ originalUrl: `${imgproxyBaseUrl}/${fullSizeHmacSalt}/${fullSizeProxyPath}`, thumbnailUrl: `${imgproxyBaseUrl}/${thumbHmacSalt}/${thumbProxyPath}`, fullS3Path, base64UrlEncodedKeyString, thumbProxyPath, ...s3Props, ...document }); } return res.json(proxiedUrls); //Iterate over them, build the link based on the media type, and return the array. } catch (error) { logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, { jobid, billid, message: error.message, stack: error.stack }); return res.status(400).json({ message: error.message, stack: error.stack }); } }; /** * Download Files * @param req * @param res * @returns {Promise<*>} */ const downloadFiles = async (req, res) => { //Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk const { jobId, billid, documentids } = req.body; try { logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids }); //Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components. const client = req.userGraphQLClient; //Query for the keys of the document IDs const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids }); //Using the Keys, get all the S3 links, zip them, and send back to the client. const s3client = new S3Client({ region: InstanceRegion() }); const archiveStream = archiver("zip"); archiveStream.on("error", (error) => { console.error("Archival encountered an error:", error); throw new Error(error); }); const passThrough = new stream.PassThrough(); archiveStream.pipe(passThrough); for (const key of data.documents.map((d) => d.key)) { const response = await s3client.send( new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }) ); archiveStream.append(response.Body, { name: path.basename(key) }); } await archiveStream.finalize(); const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`; const parallelUploads3 = new Upload({ client: s3client, queueSize: 4, // optional concurrency configuration leavePartsOnError: false, // optional manually handle dropped parts params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough } }); // Disabled progress logging for upload, uncomment if needed // parallelUploads3.on("httpUploadProgress", (progress) => { // console.log(progress); // }); await parallelUploads3.done(); //Generate the presigned URL to download it. const presignedUrl = await getSignedUrl( s3client, new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: archiveKey }), { expiresIn: 360 } ); return res.json({ success: true, url: presignedUrl }); //Iterate over them, build the link based on the media type, and return the array. } catch (error) { logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, { jobId, billid, message: error.message, stack: error.stack }); return res.status(400).json({ message: error.message, stack: error.stack }); } }; /** * Delete Files * @param req * @param res * @returns {Promise<*>} */ const deleteFiles = async (req, res) => { //Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future. //Mark as deleted from the documents section of the database. const { ids } = req.body; try { logger.log("imgproxy-delete-files", "DEBUG", req.user.email, null, { ids }); const client = req.userGraphQLClient; //Do this to make sure that they are only deleting things that they have access to const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: ids }); const s3client = new S3Client({ region: InstanceRegion() }); const deleteTransactions = []; data.documents.forEach((document) => { deleteTransactions.push( (async () => { try { // Delete the original object await s3client.send( new DeleteObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.key }) ); return document; } catch (error) { return { document, error: error, bucket: imgproxyDestinationBucket }; } })() ); }); const result = await Promise.all(deleteTransactions); const errors = result.filter((d) => d.error); //Delete only the successful deletes. const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, { ids: result.filter((t) => !t.error).map((d) => d.id) }); return res.json({ errors, deleteMutationResult }); } catch (error) { logger.log("imgproxy-delete-files-error", "ERROR", req.user.email, null, { ids, message: error.message, stack: error.stack }); return res.status(400).json({ message: error.message, stack: error.stack }); } }; /** * Move Files * @param req * @param res * @returns {Promise<*>} */ const moveFiles = async (req, res) => { const { documents, tojobid } = req.body; try { logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid }); const s3client = new S3Client({ region: InstanceRegion() }); const moveTransactions = []; documents.forEach((document) => { moveTransactions.push( (async () => { try { // Copy the object to the new key await s3client.send( new CopyObjectCommand({ Bucket: imgproxyDestinationBucket, CopySource: `${imgproxyDestinationBucket}/${document.from}`, Key: document.to, StorageClass: "INTELLIGENT_TIERING" }) ); // Delete the original object await s3client.send( new DeleteObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.from }) ); return document; } catch (error) { return { id: document.id, from: document.from, error: error, bucket: imgproxyDestinationBucket }; } })() ); }); const result = await Promise.all(moveTransactions); const errors = result.filter((d) => d.error); let mutations = ""; result .filter((d) => !d.error) .forEach((d, idx) => { //Create mutation text mutations = mutations + ` update_doc${idx}:update_documents_by_pk(pk_columns: { id: "${d.id}" }, _set: {key: "${d.to}", jobid: "${tojobid}"}){ id } `; }); const client = req.userGraphQLClient; if (mutations !== "") { const mutationResult = await client.request(`mutation { ${mutations} }`); return res.json({ errors, mutationResult }); } return res.json({ errors: "No images were successfully moved on remote server. " }); } catch (error) { logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, { documents, tojobid, message: error.message, stack: error.stack }); return res.status(400).json({ message: error.message, stack: error.stack }); } }; module.exports = { generateSignedUploadUrls, getThumbnailUrls, downloadFiles, deleteFiles, moveFiles };