161 lines
6.4 KiB
JavaScript
161 lines
6.4 KiB
JavaScript
const path = require("path");
|
|
require("dotenv").config({
|
|
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
|
|
});
|
|
const logger = require("../utils/logger");
|
|
const { S3Client, PutObjectCommand, GetObjectCommand } = require("@aws-sdk/client-s3");
|
|
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
|
|
const crypto = require("crypto");
|
|
const { InstanceRegion } = require("../utils/instanceMgr");
|
|
const { GET_DOCUMENTS_BY_JOB } = require("../graphql-client/queries");
|
|
//TODO: Remove hardcoded values.
|
|
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL;
|
|
const imgproxyKey = process.env.IMGPROXY_KEY;
|
|
const imgproxySalt = process.env.IMGPROXY_SALT;
|
|
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET;
|
|
|
|
//Generate a signed upload link for the S3 bucket.
|
|
//All uploads must be going to the same shop and jobid.
|
|
exports.generateSignedUploadUrls = async (req, res) => {
|
|
const { filenames, bodyshopid, jobid } = req.body;
|
|
try {
|
|
logger.log("imgproxy-upload-start", "DEBUG", req.user?.email, jobid, { filenames, bodyshopid, jobid });
|
|
|
|
//TODO: Ensure that the user has access to the given bodyshopid.
|
|
//This can be done by querying associations, or, maintaining a REDIS cache of user permissions.
|
|
const hasAccess = true; //TODO: Ensure this is not hardcoded.
|
|
if (!hasAccess) {
|
|
res.send(403);
|
|
return;
|
|
}
|
|
|
|
const signedUrls = [];
|
|
for (const filename of filenames) {
|
|
// TODO: Implement a different, unique file naming convention.
|
|
const key = filename; //GenerateKey({ bodyshopid, jobid, filename });
|
|
const client = new S3Client({ region: InstanceRegion() });
|
|
const command = new PutObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key });
|
|
const presignedUrl = await getSignedUrl(client, command, { expiresIn: 360 });
|
|
signedUrls.push({ filename, presignedUrl, key });
|
|
}
|
|
|
|
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
|
|
res.json({
|
|
success: true,
|
|
signedUrls
|
|
});
|
|
} catch (error) {
|
|
res.status(400).json({
|
|
success: false,
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
logger.log("imgproxy-upload-error", "ERROR", req.user?.email, jobid, {
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
}
|
|
};
|
|
|
|
exports.getThumbnailUrls = async (req, res) => {
|
|
const { jobid, billid } = req.body;
|
|
|
|
try {
|
|
//TODO: Query for all documents related to the job.
|
|
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
|
|
|
|
const client = req.userGraphQLClient;
|
|
const data = await client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid });
|
|
|
|
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
|
|
const s3client = new S3Client({ region: InstanceRegion() });
|
|
const proxiedUrls = [];
|
|
|
|
for (const document of data.documents) {
|
|
//Format to follow:
|
|
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with unencoded/unhashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
|
|
|
|
//When working with documents from Cloudinary, the URL does not include the extension.
|
|
let key;
|
|
if (/\.[^/.]+$/.test(document.key)) {
|
|
key = document.key;
|
|
} else {
|
|
key = `${document.key}.${document.extension.toLowerCase()}`;
|
|
}
|
|
// Build the S3 path to the object.
|
|
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
|
|
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
|
|
//Thumbnail Generation Block
|
|
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
|
|
const thumbHmacSalt = createHmacSha256(`${imgproxySalt}/${thumbProxyPath}`);
|
|
|
|
//Full Size URL block
|
|
|
|
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
|
|
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
|
|
|
|
const s3Props = {};
|
|
if (!document.type.startsWith("image")) {
|
|
//If not a picture, we need to get a signed download link to the file using S3 (or cloudfront preferably)
|
|
const command = new GetObjectCommand({
|
|
Bucket: imgproxyDestinationBucket,
|
|
Key: key
|
|
});
|
|
const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
|
|
s3Props.presignedGetUrl = presignedGetUrl;
|
|
|
|
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
|
|
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
|
|
s3Props.originalUrlViaProxyPath = `${imgproxyBaseUrl}/${originalHmacSalt}/${originalProxyPath}`;
|
|
}
|
|
|
|
proxiedUrls.push({
|
|
originalUrl: `${imgproxyBaseUrl}/${fullSizeHmacSalt}/${fullSizeProxyPath}`,
|
|
thumbnailUrl: `${imgproxyBaseUrl}/${thumbHmacSalt}/${thumbProxyPath}`,
|
|
fullS3Path,
|
|
base64UrlEncodedKeyString,
|
|
thumbProxyPath,
|
|
...s3Props,
|
|
...document
|
|
});
|
|
}
|
|
|
|
res.json(proxiedUrls);
|
|
//Iterate over them, build the link based on the media type, and return the array.
|
|
} catch (error) {
|
|
logger.log("imgproxy-get-proxied-urls-error", "ERROR", req.user?.email, jobid, {
|
|
message: error.message,
|
|
stack: error.stack
|
|
});
|
|
res.status(400).json({ message: error.message, stack: error.stack });
|
|
}
|
|
};
|
|
|
|
exports.getBillFiles = async (req, res) => {
|
|
//Givena bill ID, get the documents associated to it.
|
|
};
|
|
|
|
exports.downloadFiles = async (req, res) => {
|
|
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
|
|
};
|
|
|
|
exports.deleteFiles = async (req, res) => {
|
|
//Mark a file for deletion in s3. Lifecycle deletion will actually delete the copy in the future.
|
|
//Mark as deleted from the documents section of the database.
|
|
};
|
|
|
|
//Gerneate a key for the s3 bucket by popping off the extension, add a timestamp, and add back the extension.
|
|
//This is to prevent any collisions/duplicates in the bucket.
|
|
function GenerateKey({ bodyshopid, jobid, filename }) {
|
|
let nameArray = filename.split(".");
|
|
let extension = nameArray.pop();
|
|
return `${bodyshopid}/${jobid}/${nameArray.join(".")}-${Date.now()}`;
|
|
}
|
|
|
|
function base64UrlEncode(str) {
|
|
return Buffer.from(str).toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
|
|
}
|
|
function createHmacSha256(data) {
|
|
return crypto.createHmac("sha256", imgproxyKey).update(data).digest("base64url");
|
|
}
|