Merge branch 'master-AIO' into feature/IO-3255-simplified-part-management

This commit is contained in:
Dave Richer
2025-06-26 14:17:04 -04:00
7 changed files with 118 additions and 94 deletions

View File

@@ -46,32 +46,40 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i
} }
function standardMediaDownload(bufferData) { function standardMediaDownload(bufferData) {
const a = document.createElement("a"); try {
const url = window.URL.createObjectURL(new Blob([bufferData])); const a = document.createElement("a");
a.href = url; const url = window.URL.createObjectURL(new Blob([bufferData]));
a.download = `${identifier || "documents"}.zip`; a.href = url;
a.click(); a.download = `${identifier || "documents"}.zip`;
a.click();
} catch (error) {
setLoading(false);
setDownload(null);
}
} }
const handleDownload = async () => { const handleDownload = async () => {
logImEXEvent("jobs_documents_download"); logImEXEvent("jobs_documents_download");
setLoading(true); setLoading(true);
const zipUrl = await axios({ try {
url: "/media/imgproxy/download", const response = await axios({
method: "POST", url: "/media/imgproxy/download",
data: { jobId, documentids: imagesToDownload.map((_) => _.id) } method: "POST",
}); responseType: "blob",
data: { jobId, documentids: imagesToDownload.map((_) => _.id) },
onDownloadProgress: downloadProgress
});
const theDownloadedZip = await cleanAxios({ setLoading(false);
url: zipUrl.data.url, setDownload(null);
method: "GET",
responseType: "arraybuffer",
onDownloadProgress: downloadProgress
});
setLoading(false);
setDownload(null);
standardMediaDownload(theDownloadedZip.data); // Use the response data (Blob) to trigger download
standardMediaDownload(response.data);
} catch (error) {
setLoading(false);
setDownload(null);
// handle error (optional)
}
}; };
return ( return (

View File

@@ -98,7 +98,13 @@ function JobsDocumentsImgproxyComponent({
jobId={jobId} jobId={jobId}
totalSize={totalSize} totalSize={totalSize}
billId={billId} billId={billId}
callbackAfterUpload={billsCallback || fetchThumbnails || refetch} callbackAfterUpload={
billsCallback ||
function () {
isFunction(refetch) && refetch();
isFunction(fetchThumbnails) && fetchThumbnails();
}
}
ignoreSizeLimit={ignoreSizeLimit} ignoreSizeLimit={ignoreSizeLimit}
/> />
</Card> </Card>

View File

@@ -383,7 +383,7 @@ export function ShopEmployeesFormComponent({ bodyshop }) {
title={() => <ShopEmployeeAddVacation employee={data && data.employees_by_pk} />} title={() => <ShopEmployeeAddVacation employee={data && data.employees_by_pk} />}
columns={columns} columns={columns}
rowKey={"id"} rowKey={"id"}
dataSource={data ? data.employees_by_pk.employee_vacations : []} dataSource={data?.employees_by_pk?.employee_vacations ?? []}
/> />
</Card> </Card>
); );

12
package-lock.json generated
View File

@@ -63,7 +63,8 @@
"winston": "^3.17.0", "winston": "^3.17.0",
"winston-cloudwatch": "^6.3.0", "winston-cloudwatch": "^6.3.0",
"xml2js": "^0.6.2", "xml2js": "^0.6.2",
"xmlbuilder2": "^3.1.1" "xmlbuilder2": "^3.1.1",
"yazl": "^3.3.1"
}, },
"devDependencies": { "devDependencies": {
"@eslint/js": "^9.29.0", "@eslint/js": "^9.29.0",
@@ -13072,6 +13073,15 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/yazl": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/yazl/-/yazl-3.3.1.tgz",
"integrity": "sha512-BbETDVWG+VcMUle37k5Fqp//7SDOK2/1+T7X8TD96M3D9G8jK5VLUdQVdVjGi8im7FGkazX7kk5hkU8X4L5Bng==",
"license": "MIT",
"dependencies": {
"buffer-crc32": "^1.0.0"
}
},
"node_modules/yocto-queue": { "node_modules/yocto-queue": {
"version": "0.1.0", "version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View File

@@ -70,7 +70,8 @@
"winston": "^3.17.0", "winston": "^3.17.0",
"winston-cloudwatch": "^6.3.0", "winston-cloudwatch": "^6.3.0",
"xml2js": "^0.6.2", "xml2js": "^0.6.2",
"xmlbuilder2": "^3.1.1" "xmlbuilder2": "^3.1.1",
"yazl": "^3.3.1"
}, },
"devDependencies": { "devDependencies": {
"@eslint/js": "^9.29.0", "@eslint/js": "^9.29.0",

View File

@@ -35,7 +35,7 @@ exports.default = async (req, res) => {
//Query the usage data. //Query the usage data.
const queryResults = await client.request(queries.STATUS_UPDATE, { const queryResults = await client.request(queries.STATUS_UPDATE, {
today: moment().startOf("day").subtract(7, "days"), today: moment().startOf("day").subtract(7, "days"),
period: moment().subtract(90, "days").startOf("day") period: moment().subtract(365, "days").startOf("day")
}); });
//Massage the data. //Massage the data.
@@ -66,7 +66,7 @@ exports.default = async (req, res) => {
Usage Report for ${moment().format("MM/DD/YYYY")} for Rome Online Customers. Usage Report for ${moment().format("MM/DD/YYYY")} for Rome Online Customers.
Notes: Notes:
- Days Since Creation: The number of days since the shop was created. Only shops created in the last 90 days are included. - Days Since Creation: The number of days since the shop was created. Only shops created in the last 365 days are included.
- Updated values should be higher than created values. - Updated values should be higher than created values.
- Counts are inclusive of the last 7 days of data. - Counts are inclusive of the last 7 days of data.
`, `,

View File

@@ -20,6 +20,7 @@ const {
GET_DOCUMENTS_BY_IDS, GET_DOCUMENTS_BY_IDS,
DELETE_MEDIA_DOCUMENTS DELETE_MEDIA_DOCUMENTS
} = require("../graphql-client/queries"); } = require("../graphql-client/queries");
const yazl = require("yazl");
const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN. const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN.
const imgproxySalt = process.env.IMGPROXY_SALT; const imgproxySalt = process.env.IMGPROXY_SALT;
@@ -102,13 +103,7 @@ const getThumbnailUrls = async (req, res) => {
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path> //<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension. //When working with documents from Cloudinary, the URL does not include the extension.
let key; let key = keyStandardize(document)
if (/\.[^/.]+$/.test(document.key)) {
key = document.key;
} else {
key = `${document.key}.${document.extension.toLowerCase()}`;
}
// Build the S3 path to the object. // Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`; const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path); const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
@@ -168,78 +163,73 @@ const getThumbnailUrls = async (req, res) => {
* @returns {Promise<*>} * @returns {Promise<*>}
*/ */
const downloadFiles = async (req, res) => { const downloadFiles = async (req, res) => {
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
const { jobId, billid, documentids } = req.body; const { jobId, billid, documentids } = req.body;
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
const client = req.userGraphQLClient;
let data;
try { try {
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids }); data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//Query for the keys of the document IDs
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
//Using the Keys, get all the S3 links, zip them, and send back to the client.
const s3client = new S3Client({ region: InstanceRegion() });
const archiveStream = archiver("zip");
archiveStream.on("error", (error) => {
console.error("Archival encountered an error:", error);
throw new Error(error);
});
const passThrough = new stream.PassThrough();
archiveStream.pipe(passThrough);
for (const key of data.documents.map((d) => d.key)) {
const response = await s3client.send(
new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
})
);
archiveStream.append(response.Body, { name: path.basename(key) });
}
await archiveStream.finalize();
const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`;
const parallelUploads3 = new Upload({
client: s3client,
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough }
});
// Disabled progress logging for upload, uncomment if needed
// parallelUploads3.on("httpUploadProgress", (progress) => {
// console.log(progress);
// });
await parallelUploads3.done();
//Generate the presigned URL to download it.
const presignedUrl = await getSignedUrl(
s3client,
new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: archiveKey }),
{ expiresIn: 360 }
);
return res.json({ success: true, url: presignedUrl });
//Iterate over them, build the link based on the media type, and return the array.
} catch (error) { } catch (error) {
logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, { logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
jobId, jobId,
billid, billid,
message: error.message, message: error.message,
stack: error.stack stack: error.stack
}); });
return res.status(400).json({ message: error.message });
}
return res.status(400).json({ message: error.message, stack: error.stack }); const s3client = new S3Client({ region: InstanceRegion() });
const zipfile = new yazl.ZipFile();
const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`;
res.setHeader("Content-Type", "application/zip");
res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
zipfile.outputStream.pipe(res);
try {
for (const doc of data.documents) {
let key = keyStandardize(doc)
let response;
try {
response = await s3client.send(
new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
}
zipfile.end();
} catch (error) {
logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
jobId,
billid,
message: error.message,
stack: error.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(error);
} }
}; };
@@ -392,6 +382,15 @@ const moveFiles = async (req, res) => {
} }
}; };
const keyStandardize = (doc) => {
if (/\.[^/.]+$/.test(doc.key)) {
return doc.key;
} else {
return `${doc.key}.${doc.extension.toLowerCase()}`;
}
};
module.exports = { module.exports = {
generateSignedUploadUrls, generateSignedUploadUrls,
getThumbnailUrls, getThumbnailUrls,