377 lines
12 KiB
JavaScript
377 lines
12 KiB
JavaScript
const path = require("path");
|
|
const logger = require("../utils/logger");
|
|
const { Upload } = require("@aws-sdk/lib-storage");
|
|
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
|
|
const { InstanceRegion } = require("../utils/instanceMgr");
|
|
const archiver = require("archiver");
|
|
const stream = require("node:stream");
|
|
const base64UrlEncode = require("./util/base64UrlEncode");
|
|
const createHmacSha256 = require("./util/createHmacSha256");
|
|
const {
|
|
S3Client,
|
|
PutObjectCommand,
|
|
GetObjectCommand,
|
|
CopyObjectCommand,
|
|
DeleteObjectCommand
|
|
} = require("@aws-sdk/client-s3");
|
|
const {
|
|
GET_DOCUMENTS_BY_JOB,
|
|
QUERY_TEMPORARY_DOCS,
|
|
GET_DOCUMENTS_BY_IDS,
|
|
DELETE_MEDIA_DOCUMENTS
|
|
} = require("../graphql-client/queries");
|
|
const yazl = require("yazl");
|
|
|
|
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN.
|
|
const imgproxySalt = process.env.IMGPROXY_SALT;
|
|
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET;
|
|
|
|
/**
|
|
* Generate a Signed URL Link for the s3 bucket.
|
|
* All Uploads must be going to the same Shop and JobId
|
|
* @param req
|
|
* @param res
|
|
* @returns {Promise<*>}
|
|
*/
|
|
const generateSignedUploadUrls = async (req, res) => {
|
|
const { filenames, bodyshopid, jobid } = req.body;
|
|
try {
|
|
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, {
|
|
filenames,
|
|
bodyshopid,
|
|
jobid
|
|
});
|
|
|
|
const signedUrls = [];
|
|
for (const filename of filenames) {
|
|
const key = filename;
|
|
const client = new S3Client({ region: InstanceRegion() });
|
|
const command = new PutObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
Key: key,
|
|
StorageClass: "INTELLIGENT_TIERING"
|
|
});
|
|
const presignedUrl = await getSignedUrl(client, command, { expiresIn: 360 });
|
|
signedUrls.push({ filename, presignedUrl, key });
|
|
}
|
|
|
|
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
|
|
|
|
return res.json({
|
|
success: true,
|
|
signedUrls
|
|
});
|
|
} catch (error) {
|
|
logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Get Thumbnail URLS
|
|
* @param req
|
|
* @param res
|
|
* @returns {Promise<*>}
|
|
*/
|
|
const getThumbnailUrls = async (req, res) => {
|
|
const { jobid, billid } = req.body;
|
|
|
|
try {
|
|
logger.log("imgproxy-thumbnails", "DEBUG", req.user?.email, jobid, { billid, jobid });
|
|
|
|
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
|
|
const client = req.userGraphQLClient;
|
|
//If there's no jobid and no billid, we're in temporary documents.
|
|
const data = await (jobid
|
|
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
|
|
: client.request(QUERY_TEMPORARY_DOCS));
|
|
|
|
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
|
|
const s3client = new S3Client({ region: InstanceRegion() });
|
|
const proxiedUrls = [];
|
|
|
|
for (const document of data.documents) {
|
|
//Format to follow:
|
|
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
|
|
//When working with documents from Cloudinary, the URL does not include the extension.
|
|
|
|
let key;
|
|
|
|
if (/\.[^/.]+$/.test(document.key)) {
|
|
key = document.key;
|
|
} else {
|
|
key = `${document.key}.${document.extension.toLowerCase()}`;
|
|
}
|
|
// Build the S3 path to the object.
|
|
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
|
|
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
|
|
|
|
//Thumbnail Generation Block
|
|
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
|
|
const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`);
|
|
|
|
//Full Size URL block
|
|
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
|
|
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
|
|
|
|
const s3Props = {};
|
|
if (!document.type.startsWith("image")) {
|
|
//If not a picture, we need to get a signed download link to the file using S3 (or cloudfront preferably)
|
|
const command = new GetObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
Key: key
|
|
});
|
|
|
|
s3Props.presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
|
|
|
|
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
|
|
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
|
|
s3Props.originalUrlViaProxyPath = `${imgproxyBaseUrl}/${originalHmacSalt}/${originalProxyPath}`;
|
|
}
|
|
|
|
proxiedUrls.push({
|
|
originalUrl: `${imgproxyBaseUrl}/${fullSizeHmacSalt}/${fullSizeProxyPath}`,
|
|
thumbnailUrl: `${imgproxyBaseUrl}/${thumbHmacSalt}/${thumbProxyPath}`,
|
|
fullS3Path,
|
|
base64UrlEncodedKeyString,
|
|
thumbProxyPath,
|
|
...s3Props,
|
|
...document
|
|
});
|
|
}
|
|
|
|
return res.json(proxiedUrls);
|
|
//Iterate over them, build the link based on the media type, and return the array.
|
|
} catch (error) {
|
|
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobid, {
|
|
jobid,
|
|
billid,
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
|
|
return res.status(400).json({ message: error.message, stack: error.stack });
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Download Files
|
|
* @param req
|
|
* @param res
|
|
* @returns {Promise<*>}
|
|
*/
|
|
const downloadFiles = async (req, res) => {
|
|
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
|
|
const { jobId, billid, documentids } = req.body;
|
|
|
|
try {
|
|
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
|
|
|
|
const client = req.userGraphQLClient;
|
|
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
|
|
|
|
const s3client = new S3Client({ region: InstanceRegion() });
|
|
const zipfile = new yazl.ZipFile();
|
|
|
|
// Set response headers for zip download
|
|
const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`;
|
|
res.setHeader("Content-Type", "application/zip");
|
|
res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
|
|
|
|
// Pipe the zipfile output directly to the response
|
|
zipfile.outputStream.pipe(res);
|
|
|
|
// Add each file to the zip as a stream
|
|
for (const doc of data.documents) {
|
|
const key = doc.key;
|
|
const response = await s3client.send(
|
|
new GetObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
Key: key
|
|
})
|
|
);
|
|
// response.Body is a readable stream
|
|
zipfile.addReadStream(response.Body, path.basename(key));
|
|
}
|
|
|
|
// Finalize the zip after all files are added
|
|
zipfile.end();
|
|
// No need to send a JSON response, as the zip is streamed directly
|
|
|
|
} catch (error) {
|
|
logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
|
|
jobId,
|
|
billid,
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
|
|
return res.status(400).json({ message: error.message, stack: error.stack });
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Delete Files
|
|
* @param req
|
|
* @param res
|
|
* @returns {Promise<*>}
|
|
*/
|
|
const deleteFiles = async (req, res) => {
|
|
//Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future.
|
|
//Mark as deleted from the documents section of the database.
|
|
const { ids } = req.body;
|
|
try {
|
|
logger.log("imgproxy-delete-files", "DEBUG", req.user.email, null, { ids });
|
|
const client = req.userGraphQLClient;
|
|
|
|
//Do this to make sure that they are only deleting things that they have access to
|
|
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: ids });
|
|
|
|
const s3client = new S3Client({ region: InstanceRegion() });
|
|
|
|
const deleteTransactions = [];
|
|
data.documents.forEach((document) => {
|
|
deleteTransactions.push(
|
|
(async () => {
|
|
try {
|
|
// Delete the original object
|
|
await s3client.send(
|
|
new DeleteObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
Key: document.key
|
|
})
|
|
);
|
|
|
|
return document;
|
|
} catch (error) {
|
|
return { document, error: error, bucket: imgproxyDestinationBucket };
|
|
}
|
|
})()
|
|
);
|
|
});
|
|
|
|
const result = await Promise.all(deleteTransactions);
|
|
const errors = result.filter((d) => d.error);
|
|
|
|
//Delete only the successful deletes.
|
|
const deleteMutationResult = await client.request(DELETE_MEDIA_DOCUMENTS, {
|
|
ids: result.filter((t) => !t.error).map((d) => d.id)
|
|
});
|
|
|
|
return res.json({ errors, deleteMutationResult });
|
|
} catch (error) {
|
|
logger.log("imgproxy-delete-files-error", "ERROR", req.user.email, null, {
|
|
ids,
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
|
|
return res.status(400).json({ message: error.message, stack: error.stack });
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Move Files
|
|
* @param req
|
|
* @param res
|
|
* @returns {Promise<*>}
|
|
*/
|
|
const moveFiles = async (req, res) => {
|
|
const { documents, tojobid } = req.body;
|
|
try {
|
|
logger.log("imgproxy-move-files", "DEBUG", req.user.email, null, { documents, tojobid });
|
|
const s3client = new S3Client({ region: InstanceRegion() });
|
|
|
|
const moveTransactions = [];
|
|
documents.forEach((document) => {
|
|
moveTransactions.push(
|
|
(async () => {
|
|
try {
|
|
// Copy the object to the new key
|
|
await s3client.send(
|
|
new CopyObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
CopySource: `${imgproxyDestinationBucket}/${document.from}`,
|
|
Key: document.to,
|
|
StorageClass: "INTELLIGENT_TIERING"
|
|
})
|
|
);
|
|
|
|
// Delete the original object
|
|
await s3client.send(
|
|
new DeleteObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
Key: document.from
|
|
})
|
|
);
|
|
|
|
return document;
|
|
} catch (error) {
|
|
return {
|
|
id: document.id,
|
|
from: document.from,
|
|
error: error,
|
|
bucket: imgproxyDestinationBucket
|
|
};
|
|
}
|
|
})()
|
|
);
|
|
});
|
|
|
|
const result = await Promise.all(moveTransactions);
|
|
const errors = result.filter((d) => d.error);
|
|
|
|
let mutations = "";
|
|
|
|
result
|
|
.filter((d) => !d.error)
|
|
.forEach((d, idx) => {
|
|
//Create mutation text
|
|
mutations =
|
|
mutations +
|
|
`
|
|
update_doc${idx}:update_documents_by_pk(pk_columns: { id: "${d.id}" }, _set: {key: "${d.to}", jobid: "${tojobid}"}){
|
|
id
|
|
}
|
|
`;
|
|
});
|
|
|
|
const client = req.userGraphQLClient;
|
|
|
|
if (mutations !== "") {
|
|
const mutationResult = await client.request(`mutation {
|
|
${mutations}
|
|
}`);
|
|
|
|
return res.json({ errors, mutationResult });
|
|
}
|
|
|
|
return res.json({ errors: "No images were successfully moved on remote server. " });
|
|
} catch (error) {
|
|
logger.log("imgproxy-move-files-error", "ERROR", req.user.email, null, {
|
|
documents,
|
|
tojobid,
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
|
|
return res.status(400).json({ message: error.message, stack: error.stack });
|
|
}
|
|
};
|
|
|
|
module.exports = {
|
|
generateSignedUploadUrls,
|
|
getThumbnailUrls,
|
|
downloadFiles,
|
|
deleteFiles,
|
|
moveFiles
|
|
};
|