diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx index 8644115fd..9e49d0d97 100644 --- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx +++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.download.component.jsx @@ -56,22 +56,25 @@ export function JobsDocumentsImgproxyDownloadButton({ bodyshop, galleryImages, i const handleDownload = async () => { logImEXEvent("jobs_documents_download"); setLoading(true); - const zipUrl = await axios({ - url: "/media/imgproxy/download", - method: "POST", - data: { jobId, documentids: imagesToDownload.map((_) => _.id) } - }); + try { + const response = await axios({ + url: "/media/imgproxy/download", + method: "POST", + responseType: "blob", + data: { jobId, documentids: imagesToDownload.map((_) => _.id) }, + onDownloadProgress: downloadProgress + }); - const theDownloadedZip = await cleanAxios({ - url: zipUrl.data.url, - method: "GET", - responseType: "arraybuffer", - onDownloadProgress: downloadProgress - }); - setLoading(false); - setDownload(null); + setLoading(false); + setDownload(null); - standardMediaDownload(theDownloadedZip.data); + // Use the response data (Blob) to trigger download + standardMediaDownload(response.data); + } catch (error) { + setLoading(false); + setDownload(null); + // handle error (optional) + } }; return ( diff --git a/client/src/components/shop-employees/shop-employees-form.component.jsx b/client/src/components/shop-employees/shop-employees-form.component.jsx index 563ad835b..a44e26d4a 100644 --- a/client/src/components/shop-employees/shop-employees-form.component.jsx +++ b/client/src/components/shop-employees/shop-employees-form.component.jsx @@ -383,7 +383,7 @@ export function ShopEmployeesFormComponent({ bodyshop }) { title={() => } columns={columns} rowKey={"id"} - dataSource={data ? data.employees_by_pk.employee_vacations : []} + dataSource={data?.employees_by_pk?.employee_vacations ?? []} /> ); diff --git a/server/media/imgproxy-media.js b/server/media/imgproxy-media.js index c6ea1a9ce..b11f4b539 100644 --- a/server/media/imgproxy-media.js +++ b/server/media/imgproxy-media.js @@ -180,10 +180,14 @@ const downloadFiles = async (req, res) => { const s3client = new S3Client({ region: InstanceRegion() }); const zipfile = new yazl.ZipFile(); - const passThrough = new stream.PassThrough(); - // Pipe the zipfile output to the passThrough stream - zipfile.outputStream.pipe(passThrough); + // Set response headers for zip download + const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`; + res.setHeader("Content-Type", "application/zip"); + res.setHeader("Content-Disposition", `attachment; filename="${filename}"`); + + // Pipe the zipfile output directly to the response + zipfile.outputStream.pipe(res); // Add each file to the zip as a stream for (const doc of data.documents) { @@ -200,27 +204,8 @@ const downloadFiles = async (req, res) => { // Finalize the zip after all files are added zipfile.end(); + // No need to send a JSON response, as the zip is streamed directly - const archiveKey = `archives/${jobId || "na"}/archive-${new Date().toISOString()}.zip`; - - // Upload the zip stream to S3 - const parallelUploads3 = new Upload({ - client: s3client, - queueSize: 4, - leavePartsOnError: false, - params: { Bucket: imgproxyDestinationBucket, Key: archiveKey, Body: passThrough } - }); - - await parallelUploads3.done(); - - // Generate the presigned URL to download it. - const presignedUrl = await getSignedUrl( - s3client, - new GetObjectCommand({ Bucket: imgproxyDestinationBucket, Key: archiveKey }), - { expiresIn: 360 } - ); - - return res.json({ success: true, url: presignedUrl }); } catch (error) { logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, { jobId,