IO-3092 WIP on img proxy thumbnail generation.

This commit is contained in:
Patrick Fic
2025-02-05 11:03:29 -08:00
parent e8ee2a9416
commit 47fe1959b1
6 changed files with 489 additions and 41 deletions

View File

@@ -0,0 +1,123 @@
import { UploadOutlined } from "@ant-design/icons";
import { Progress, Result, Space, Upload } from "antd";
import { useMemo, useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
import { useNotification } from "../../contexts/Notifications/notificationContext.jsx";
import { selectBodyshop, selectCurrentUser } from "../../redux/user/user.selectors";
import formatBytes from "../../utils/formatbytes";
import { HasFeatureAccess } from "../feature-wrapper/feature-wrapper.component";
import LockWrapperComponent from "../lock-wrapper/lock-wrapper.component";
import { handleUpload } from "./documents-upload-imgproxy.utility.js";
const mapStateToProps = createStructuredSelector({
currentUser: selectCurrentUser,
bodyshop: selectBodyshop
});
export function DocumentsUploadImgproxyComponent({
children,
currentUser,
bodyshop,
jobId,
tagsArray,
billId,
callbackAfterUpload,
totalSize,
ignoreSizeLimit = false
}) {
const { t } = useTranslation();
const [fileList, setFileList] = useState([]);
const notification = useNotification();
const pct = useMemo(() => {
return parseInt((totalSize / ((bodyshop && bodyshop.jobsizelimit) || 1)) * 100);
}, [bodyshop, totalSize]);
if (pct > 100 && !ignoreSizeLimit)
return (
<Result
status="error"
title={t("documents.labels.storageexceeded_title")}
subTitle={t("documents.labels.storageexceeded")}
/>
);
const handleDone = (uid) => {
setTimeout(() => {
setFileList((fileList) => fileList.filter((x) => x.uid !== uid));
}, 2000);
};
const hasMediaAccess = HasFeatureAccess({ bodyshop, featureName: "media" });
return (
<Upload.Dragger
multiple={true}
fileList={fileList}
disabled={!hasMediaAccess}
onChange={(f) => {
if (f.event && f.event.percent === 100) handleDone(f.file.uid);
setFileList(f.fileList);
}}
beforeUpload={(file, fileList) => {
if (ignoreSizeLimit) return true;
const newFiles = fileList.reduce((acc, val) => acc + val.size, 0);
const shouldStopUpload = (totalSize + newFiles) / ((bodyshop && bodyshop.jobsizelimit) || 1) >= 1;
//Check to see if old files plus newly uploaded ones will be too much.
if (shouldStopUpload) {
notification.open({
key: "cannotuploaddocuments",
type: "error",
message: t("documents.labels.upload_limitexceeded_title"),
description: t("documents.labels.upload_limitexceeded")
});
return Upload.LIST_IGNORE;
}
return true;
}}
customRequest={(ev) =>
handleUpload(
ev,
{
bodyshop: bodyshop,
uploaded_by: currentUser.email,
jobId: jobId,
billId: billId,
tagsArray: tagsArray,
callback: callbackAfterUpload
},
notification
)
}
accept="audio/*, video/*, image/*, .pdf, .doc, .docx, .xls, .xlsx"
// showUploadList={false}
>
{children || (
<>
<p className="ant-upload-drag-icon">
<UploadOutlined />
</p>
<p className="ant-upload-text">
<LockWrapperComponent featureName="media">{t("documents.labels.dragtoupload")}</LockWrapperComponent>
</p>
{!ignoreSizeLimit && (
<Space wrap className="ant-upload-text">
<Progress type="dashboard" percent={pct} size="small" />
<span>
{t("documents.labels.usage", {
percent: pct,
used: formatBytes(totalSize),
total: formatBytes(bodyshop && bodyshop.jobsizelimit)
})}
</span>
</Space>
)}
</>
)}
</Upload.Dragger>
);
}
export default connect(mapStateToProps, null)(DocumentsUploadImgproxyComponent);

View File

@@ -0,0 +1,181 @@
import axios from "axios";
import exifr from "exifr";
import i18n from "i18next";
import { logImEXEvent } from "../../firebase/firebase.utils";
import { INSERT_NEW_DOCUMENT } from "../../graphql/documents.queries";
import { axiosAuthInterceptorId } from "../../utils/CleanAxios";
import client from "../../utils/GraphQLClient";
//Context: currentUserEmail, bodyshop, jobid, invoiceid
//Required to prevent headers from getting set and rejected from Cloudinary.
var cleanAxios = axios.create();
cleanAxios.interceptors.request.eject(axiosAuthInterceptorId);
export const handleUpload = (ev, context, notification) => {
logImEXEvent("document_upload", { filetype: ev.file.type });
const { onError, onSuccess, onProgress } = ev;
const { bodyshop, jobId } = context;
const fileName = ev.file.name || ev.filename;
let key = replaceAccents(fileName).replace(/[^A-Z0-9.]+/gi, "_");
let extension = fileName.split(".").pop();
uploadToS3(key, extension, ev.file.type, ev.file, onError, onSuccess, onProgress, context, notification);
};
//Handles only 1 file at a time.
export const uploadToS3 = async (
key,
extension,
fileType,
file,
onError,
onSuccess,
onProgress,
context,
notification
) => {
const { bodyshop, jobId, billId, uploaded_by, callback, tagsArray } = context;
//Set variables for getting the signed URL.
let timestamp = Math.floor(Date.now() / 1000);
//Get the signed url.
const signedURLResponse = await axios.post("/media/proxy/sign", {
filenames: [key],
bodyshopid: bodyshop.id,
jobid: jobId
});
if (signedURLResponse.status !== 200) {
if (onError) onError(signedURLResponse.statusText);
notification["error"]({
message: i18n.t("documents.errors.getpresignurl", {
message: signedURLResponse.statusText
})
});
return;
}
const { presignedUrl: preSignedUploadUrlToS3, key: s3Key } = signedURLResponse.data.signedUrls[0];
//Build request to end to cloudinary.
var options = {
// headers: { "X-Requested-With": "XMLHttpRequest" },
onUploadProgress: (e) => {
if (onProgress) onProgress({ percent: (e.loaded / e.total) * 100 });
}
};
// const formData = new FormData();
// formData.append("file", file);
// formData.append("upload_preset", upload_preset);
// formData.append("api_key", import.meta.env.VITE_APP_CLOUDINARY_API_KEY);
// formData.append("public_id", public_id);
// formData.append("tags", tags);
// formData.append("timestamp", timestamp);
// formData.append("signature", signature);
const cloudinaryUploadResponse = await cleanAxios.put(preSignedUploadUrlToS3, file, options);
//Insert the document with the matching key.
let takenat;
if (fileType.includes("image")) {
try {
const exif = await exifr.parse(file);
takenat = exif && exif.DateTimeOriginal;
} catch (error) {
console.log("Unable to parse image file for EXIF Data");
}
}
const documentInsert = await client.mutate({
mutation: INSERT_NEW_DOCUMENT,
variables: {
docInput: [
{
...(jobId ? { jobid: jobId } : {}),
...(billId ? { billid: billId } : {}),
uploaded_by: uploaded_by,
key: s3Key,
type: fileType,
extension: cloudinaryUploadResponse.data.format || extension,
bodyshopid: bodyshop.id,
size: cloudinaryUploadResponse.data.bytes || file.size,
takenat
}
]
}
});
if (!documentInsert.errors) {
if (onSuccess)
onSuccess({
//TODO: Since this may go server side, we can just manage the state locally.
uid: documentInsert.data.insert_documents.returning[0].id,
name: documentInsert.data.insert_documents.returning[0].name,
status: "done",
key: documentInsert.data.insert_documents.returning[0].key
});
notification.open({
type: "success",
key: "docuploadsuccess",
message: i18n.t("documents.successes.insert")
});
if (callback) {
callback();
}
} else {
if (onError) onError(JSON.stringify(documentInsert.errors));
notification["error"]({
message: i18n.t("documents.errors.insert", {
message: JSON.stringify(documentInsert.errors)
})
});
return;
}
};
//Also needs to be updated in media JS and mobile app.
export function DetermineFileType(filetype) {
if (!filetype) return "auto";
else if (filetype.startsWith("image")) return "image";
else if (filetype.startsWith("video")) return "video";
else if (filetype.startsWith("application/pdf")) return "image";
else if (filetype.startsWith("application")) return "raw";
return "auto";
}
function replaceAccents(str) {
// Verifies if the String has accents and replace them
if (str.search(/[\xC0-\xFF]/g) > -1) {
str = str
.replace(/[\xC0-\xC5]/g, "A")
.replace(/[\xC6]/g, "AE")
.replace(/[\xC7]/g, "C")
.replace(/[\xC8-\xCB]/g, "E")
.replace(/[\xCC-\xCF]/g, "I")
.replace(/[\xD0]/g, "D")
.replace(/[\xD1]/g, "N")
.replace(/[\xD2-\xD6\xD8]/g, "O")
.replace(/[\xD9-\xDC]/g, "U")
.replace(/[\xDD]/g, "Y")
.replace(/[\xDE]/g, "P")
.replace(/[\xE0-\xE5]/g, "a")
.replace(/[\xE6]/g, "ae")
.replace(/[\xE7]/g, "c")
.replace(/[\xE8-\xEB]/g, "e")
.replace(/[\xEC-\xEF]/g, "i")
.replace(/[\xF1]/g, "n")
.replace(/[\xF2-\xF6\xF8]/g, "o")
.replace(/[\xF9-\xFC]/g, "u")
.replace(/[\xFE]/g, "p")
.replace(/[\xFD\xFF]/g, "y");
}
return str;
}

View File

@@ -1,24 +1,25 @@
import { EditFilled, FileExcelFilled, SyncOutlined } from "@ant-design/icons";
import { Button, Card, Col, Row, Space } from "antd";
import axios from "axios";
import React, { useEffect, useState } from "react";
import { Gallery } from "react-grid-gallery";
import { useTranslation } from "react-i18next";
import Lightbox from "react-image-lightbox";
import "react-image-lightbox/style.css";
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
import { selectBodyshop } from "../../redux/user/user.selectors";
import DocumentsUploadImgproxyComponent from "../documents-upload-imgproxy/documents-upload-imgproxy.component";
import DocumentsUploadComponent from "../documents-upload/documents-upload.component";
import { DetermineFileType } from "../documents-upload/documents-upload.utility";
import { HasFeatureAccess } from "../feature-wrapper/feature-wrapper.component";
import UpsellComponent, { upsellEnum } from "../upsell/upsell.component";
import { GenerateSrcUrl, GenerateThumbUrl } from "./job-documents.utility";
import JobsDocumentsDownloadButton from "./jobs-document-gallery.download.component";
import JobsDocumentsGalleryReassign from "./jobs-document-gallery.reassign.component";
import JobsDocumentsDeleteButton from "./jobs-documents-gallery.delete.component";
import JobsDocumentsGallerySelectAllComponent from "./jobs-documents-gallery.selectall.component";
import Lightbox from "react-image-lightbox";
import "react-image-lightbox/style.css";
import { HasFeatureAccess } from "../feature-wrapper/feature-wrapper.component";
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
import { selectBodyshop } from "../../redux/user/user.selectors";
import UpsellComponent, { upsellEnum } from "../upsell/upsell.component";
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
});
@@ -114,6 +115,89 @@ function JobsDocumentsComponent({
);
setgalleryImages(documents);
}, [data, setgalleryImages, t]);
const getProxiedUrls = async () => {
const result = await axios.post("/media/proxy/thumbnails", { jobid: jobId });
result.data.forEach((r) => console.log(r.thumbnailUrl));
let documents = result.data.reduce(
(acc, value) => {
const fileType = DetermineFileType(value.type);
if (value.type.startsWith("image")) {
acc.images.push({
src: value.thumbnailUrl,
fullsize: value.originalUrl,
height: 225,
width: 225,
isSelected: false,
key: value.key,
extension: value.extension,
id: value.id,
type: value.type,
size: value.size,
tags: [{ value: value.type, title: value.type }]
});
} else {
let thumb;
switch (fileType) {
case "raw":
thumb = `${window.location.origin}/file.png`;
break;
default:
thumb = GenerateThumbUrl(value);
break;
}
const fileName = value.key.split("/").pop();
acc.other.push({
source: value.originalUrlViaProxyPath,
src: value.thumbnailUrl,
fullsize: value.presignedGetUrl,
tags: [
{
value: fileName,
title: fileName
},
{ value: value.type, title: value.type },
...(value.bill
? [
{
value: value.bill.vendor.name,
title: t("vendors.fields.name")
},
{ value: value.bill.date, title: t("bills.fields.date") },
{
value: value.bill.invoice_number,
title: t("bills.fields.invoice_number")
}
]
: [])
],
height: 225,
width: 225,
isSelected: false,
extension: value.extension,
key: value.key,
id: value.id,
type: value.type,
size: value.size
});
}
return acc;
},
{ images: [], other: [] }
);
console.log("*** ~ file: jobs-documents-gallery.component.jsx:198 ~ getProxiedUrls ~ documents:", documents);
setgalleryImages(documents);
};
// useEffect(() => {
// getProxiedUrls();
// }, [galleryImages]);
const hasMediaAccess = HasFeatureAccess({ bodyshop, featureName: "media" });
const hasMobileAccess = HasFeatureAccess({ bodyshop, featureName: "mobile" });
return (
@@ -138,6 +222,18 @@ function JobsDocumentsComponent({
</Col>
)}
<Col span={24}>
<Button onClick={getProxiedUrls}>Get Proxied URLs</Button>
<Card title="IMG PROXY UPLOADER">
<DocumentsUploadImgproxyComponent
jobId={jobId}
totalSize={totalSize}
billId={billId}
callbackAfterUpload={billsCallback || refetch}
ignoreSizeLimit={ignoreSizeLimit}
/>
</Card>
</Col>
<Col span={24}>
<Card>
<DocumentsUploadComponent

View File

@@ -2252,7 +2252,7 @@ exports.UPDATE_PARTS_CRITICAL = `mutation UPDATE_PARTS_CRITICAL ($IdsToMarkCriti
notcritical: update_joblines(where: {id: {_nin: $IdsToMarkCritical}, jobid: {_eq: $jobid}}, _set: {critical: false}) {
affected_rows
}
}`
}`;
exports.ACTIVE_SHOP_BY_USER = `query ACTIVE_SHOP_BY_USER($user: String) {
associations(where: {active: {_eq: true}, useremail: {_eq: $user}}) {
@@ -2618,7 +2618,6 @@ exports.CREATE_CONVERSATION = `mutation CREATE_CONVERSATION($conversation: [conv
}
`;
exports.STATUS_UPDATE = `query STATUS_UPDATE($period: timestamptz!, $today: timestamptz!) {
bodyshops(where: { created_at: { _gte: $period } }) {
shopname
@@ -2689,4 +2688,37 @@ exports.STATUS_UPDATE = `query STATUS_UPDATE($period: timestamptz!, $today: time
}
}
}
`
`;
exports.GET_DOCUMENTS_BY_JOB = `
query GET_DOCUMENTS_BY_JOB($jobId: uuid!) {
jobs_by_pk(id: $jobId) {
id
ro_number
}
documents_aggregate(where: { jobid: { _eq: $jobId } }) {
aggregate {
sum {
size
}
}
}
documents(order_by: { takenat: desc }, where: { jobid: { _eq: $jobId } }) {
id
name
key
type
size
takenat
extension
bill {
id
invoice_number
date
vendor {
id
name
}
}
}
}`;

View File

@@ -3,13 +3,16 @@ require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../utils/logger");
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
const { S3Client, PutObjectCommand, GetObjectCommand } = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const crypto = require("crypto");
const { InstanceRegion } = require("../utils/instanceMgr");
const { GET_DOCUMENTS_BY_JOB } = require("../graphql-client/queries");
//TODO: Remove hardcoded values.
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL || `https://d3ictiiutovkvi.cloudfront.net`;
const imgproxyBaseUrl =
process.env.IMGPROXY_BASE_URL ||
// `https://k2car6fha7w5cbgry3j2td56ra0kdmwn.lambda-url.ca-central-1.on.aws` ||
`https://d3ictiiutovkvi.cloudfront.net`;
const imgproxyKey = process.env.IMGPROXY_KEY || `secret`;
const imgproxySalt = process.env.IMGPROXY_SALT || `salt`;
const imgproxyDestinationBucket = process.env.IMGPROXY_DESTINATION_BUCKET || `imex-shop-media`;
@@ -36,7 +39,7 @@ exports.generateSignedUploadUrls = async (req, res) => {
const client = new S3Client({ region: InstanceRegion() });
const command = new PutObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key });
const presignedUrl = await getSignedUrl(client, command, { expiresIn: 360 });
signedUrls.push({ filename, presignedUrl });
signedUrls.push({ filename, presignedUrl, key });
}
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
@@ -58,30 +61,25 @@ exports.generateSignedUploadUrls = async (req, res) => {
};
exports.getThumbnailUrls = async (req, res) => {
const { jobid } = req.body;
const { jobid, billid } = req.body;
try {
//TODO: Query for all documents related to the job.
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
// const { data } = await client.query({
// query: queries.GET_DOCUMENTS_BY_JOBID,
// variables: { jobid }
// });
//Mocked Keys.
const keys = [
"shopid/jobid/test2.jpg-1737502469411",
"shopid/jobid/test2.jpg-1737502469411",
"shopid/jobid/movie.mov-1737504997897",
"shopid/jobid/pdf.pdf-1737504944260"
];
const client = req.userGraphQLClient;
const data = await client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid });
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const proxiedUrls = keys.map((key) => {
const s3client = new S3Client({ region: InstanceRegion() });
const proxiedUrls = [];
for (const document of data.documents) {
//Format to follow:
//<Cloudfront_to_lambdal>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with unencoded/unhashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const fullS3Path = `s3://${imgproxyDestinationBucket}/${document.key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
//Thumbnail Generation Block
const thumbProxyPath = `${thumbResizeParams}/${base64UrlEncodedKeyString}`;
@@ -92,15 +90,30 @@ exports.getThumbnailUrls = async (req, res) => {
const fullSizeProxyPath = `${base64UrlEncodedKeyString}`;
const fullSizeHmacSalt = createHmacSha256(`${imgproxySalt}/${fullSizeProxyPath}`);
//If not a picture, we need to get a signed download link to the file using S3 (or cloudfront preferably)
const s3Props = {};
if (!document.type.startsWith("image")) {
//If not a picture, we need to get a signed download link to the file using S3 (or cloudfront preferably)
const command = new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: document.key });
const presignedGetUrl = await getSignedUrl(s3client, command, { expiresIn: 360 });
s3Props.presignedGetUrl = presignedGetUrl;
return {
const originalProxyPath = `raw:1/${base64UrlEncodedKeyString}`;
const originalHmacSalt = createHmacSha256(`${imgproxySalt}/${originalProxyPath}`);
s3Props.originalUrlViaProxyPath = `${imgproxyBaseUrl}/${originalHmacSalt}/${originalProxyPath}`;
}
proxiedUrls.push({
originalUrl: `${imgproxyBaseUrl}/${fullSizeHmacSalt}/${fullSizeProxyPath}`,
thumbnailUrl: `${imgproxyBaseUrl}/${thumbHmacSalt}/${thumbProxyPath}`
};
});
thumbnailUrl: `${imgproxyBaseUrl}/${thumbHmacSalt}/${thumbProxyPath}`,
fullS3Path,
base64UrlEncodedKeyString,
thumbProxyPath,
...s3Props,
...document
});
}
res.json({ proxiedUrls });
res.json(proxiedUrls);
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) {
logger.log("imgproxy-get-proxied-urls-error", "ERROR", req.user?.email, jobid, {
@@ -124,8 +137,12 @@ exports.deleteFiles = async (req, res) => {
//Mark as deleted from the documents section of the database.
};
//Gerneate a key for the s3 bucket by popping off the extension, add a timestamp, and add back the extension.
//This is to prevent any collisions/duplicates in the bucket.
function GenerateKey({ bodyshopid, jobid, filename }) {
return `${bodyshopid}/${jobid}/${filename}-${Date.now()}`;
let nameArray = filename.split(".");
let extension = nameArray.pop();
return `${bodyshopid}/${jobid}/${nameArray.join(".")}-${Date.now()}.${extension}`;
}
function base64UrlEncode(str) {

View File

@@ -1,13 +1,12 @@
const express = require("express");
const router = express.Router();
const { createSignedUploadURL, downloadFiles, renameKeys, deleteFiles } = require("../media/media");
const {
generateSignedUploadUrls,
getThumbnailUrls
} = require("../media/imgprox-media");
const { generateSignedUploadUrls, getThumbnailUrls } = require("../media/imgprox-media");
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
router.use(validateFirebaseIdTokenMiddleware);
router.use(withUserGraphQLClientMiddleware);
router.post("/sign", createSignedUploadURL);
router.post("/download", downloadFiles);
@@ -15,6 +14,6 @@ router.post("/rename", renameKeys);
router.post("/delete", deleteFiles);
router.post("/proxy/sign", generateSignedUploadUrls);
router.post("/proxy/get", getThumbnailUrls);
router.post("/proxy/thumbnails", getThumbnailUrls);
module.exports = router;