diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx index 8644115fd..9e49d0d97 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx @@ -56,22 +56,25 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i const handleDownload = async () => { logImEXEvent("jobs_documents_download"); setLoading(true); - const zipUrl = await axios({ - url: "/media/imgproxy/download", - method: "POST", - data: { jobId, documentids: imagesToDownload.map((_) => _.id) } - }); + try { + const response = await axios({ + url: "/media/imgproxy/download", + method: "POST", + responseType: "blob", + data: { jobId, documentids: imagesToDownload.map((_) => _.id) }, + onDownloadProgress: downloadProgress + }); - const theDownloadedZip = await cleanAxios({ - url: zipUrl.data.url, - method: "GET", - responseType: "arraybuffer", - onDownloadProgress: downloadProgress - }); - setLoading(false); - setDownload(null); + setLoading(false); + setDownload(null); - standardMediaDownload(theDownloadedZip.data); + // Use the response data (Blob) to trigger download + standardMediaDownload(response.data); + } catch (error) { + setLoading(false); + setDownload(null); + // handle error (optional) + } }; return ( diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx index f99485dc8..8ada3616f 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx @@ -98,7 +98,13 @@ function JobsDocumentsImgproxyComponent({ jobId={jobId} totalSize={totalSize} billId={billId} - callbackAfterUpload={billsCallback || fetchThumbnails || refetch} + callbackAfterUpload={ + billsCallback || + function () { + isFunction(refetch) && refetch(); + isFunction(fetchThumbnails) && fetchThumbnails(); + } + } ignoreSizeLimit={ignoreSizeLimit} /> diff --git a/client/src/components/shop-employees/shop-employees-form.component.jsx b/client/src/components/shop-employees/shop-employees-form.component.jsx index 563ad835b..a44e26d4a 100644 --- a/client/src/components/shop-employees/shop-employees-form.component.jsx +++ b/client/src/components/shop-employees/shop-employees-form.component.jsx @@ -383,7 +383,7 @@ export function ShopEmployeesFormComponent({ bodyshop }) { title={() => } columns={columns} rowKey={"id"} - dataSource={data ? data.employees_by_pk.employee_vacations : []} + dataSource={data?.employees_by_pk?.employee_vacations ?? []} /> ); diff --git a/package-lock.json b/package-lock.json index 82ec89bb2..79cc67430 100644 --- a/package-lock.json +++ b/package-lock.json @@ -63,7 +63,8 @@ "winston": "^3.17.0", "winston-cloudwatch": "^6.3.0", "xml2js": "^0.6.2", - "xmlbuilder2": "^3.1.1" + "xmlbuilder2": "^3.1.1", + "yazl": "^3.3.1" }, "devDependencies": { "@eslint/js": "^9.28.0", @@ -13057,6 +13058,15 @@ "node": ">=8" } }, + "node_modules/yazl": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/yazl/-/yazl-3.3.1.tgz", + "integrity": "sha512-BbETDVWG+VcMUle37k5Fqp//7SDOK2/1+T7X8TD96M3D9G8jK5VLUdQVdVjGi8im7FGkazX7kk5hkU8X4L5Bng==", + "license": "MIT", + "dependencies": { + "buffer-crc32": "^1.0.0" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index b57c9f6b2..7b9279bbf 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,8 @@ "winston": "^3.17.0", "winston-cloudwatch": "^6.3.0", "xml2js": "^0.6.2", - "xmlbuilder2": "^3.1.1" + "xmlbuilder2": "^3.1.1", + "yazl": "^3.3.1" }, "devDependencies": { "@eslint/js": "^9.28.0", diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index e30aee90e..b11f4b539 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -20,6 +20,7 @@ const { GET_DOCUMENTS_BY_IDS, DELETE_MEDIA_DOCUMENTS } = require("../graphql-client/queries"); +const yazl = require("yazl"); const imgproxyBaseUrl = process.env.IMGPROXY_BASE_URL; // `https://u4gzpp5wm437dnm75qa42tvza40fguqr.lambda-url.ca-central-1.on.aws` //Direct Lambda function access to bypass CDN. const imgproxySalt = process.env.IMGPROXY_SALT; @@ -174,65 +175,39 @@ const downloadFiles = async (req, res) => { try { logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids }); - //Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components. const client = req.userGraphQLClient; - - //Query for the keys of the document IDs const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids }); - //Using the Keys, get all the S3 links, zip them, and send back to the client. const s3client = new S3Client({ region: InstanceRegion() }); - const archiveStream = archiver("zip"); + const zipfile = new yazl.ZipFile(); - archiveStream.on("error", (error) => { - console.error("Archival encountered an error:", error); - throw new Error(error); - }); + // Set response headers for zip download + const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`; + res.setHeader("Content-Type", "application/zip"); + res.setHeader("Content-Disposition", `attachment; filename="${filename}"`); - const passThrough = new stream.PassThrough(); + // Pipe the zipfile output directly to the response + zipfile.outputStream.pipe(res); - archiveStream.pipe(passThrough); - - for (const key of data.documents.map((d) => d.key)) { + // Add each file to the zip as a stream + for (const doc of data.documents) { + const key = doc.key; const response = await s3client.send( new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: key }) ); - - archiveStream.append(response.Body, { name: path.basename(key) }); + // response.Body is a readable stream + zipfile.addReadStream(response.Body, path.basename(key)); } - await archiveStream.finalize(); + // Finalize the zip after all files are added + zipfile.end(); + // No need to send a JSON response, as the zip is streamed directly - const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`; - - const parallelUploads3 = new Upload({ - client: s3client, - queueSize: 4, // optional concurrency configuration - leavePartsOnError: false, // optional manually handle dropped parts - params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough } - }); - - // Disabled progress logging for upload, uncomment if needed - // parallelUploads3.on("httpUploadProgress", (progress) => { - // console.log(progress); - // }); - - await parallelUploads3.done(); - - //Generate the presigned URL to download it. - const presignedUrl = await getSignedUrl( - s3client, - new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: archiveKey }), - { expiresIn: 360 } - ); - - return res.json({ success: true, url: presignedUrl }); - //Iterate over them, build the link based on the media type, and return the array. } catch (error) { - logger.log("imgproxy-thumbnails-error", "ERROR", req.user?.email, jobId, { + logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, { jobId, billid, message: error.message,