IO-3281 missed file in previous commit.

This commit is contained in:
Patrick Fic
2025-06-25 15:48:06 -07:00
parent f2a2653eae
commit 0c80abb3ca

View File

@@ -169,43 +169,14 @@ const getThumbnailUrls = async (req, res) => {
* @returns {Promise<*>}
*/
const downloadFiles = async (req, res) => {
//Given a series of document IDs or keys, generate a file (or a link) to download all images in bulk
const { jobId, billid, documentids } = req.body;
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
const client = req.userGraphQLClient;
let data;
try {
logger.log("imgproxy-download", "DEBUG", req.user?.email, jobId, { billid, jobId, documentids });
const client = req.userGraphQLClient;
const data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
const s3client = new S3Client({ region: InstanceRegion() });
const zipfile = new yazl.ZipFile();
// Set response headers for zip download
const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`;
res.setHeader("Content-Type", "application/zip");
res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
// Pipe the zipfile output directly to the response
zipfile.outputStream.pipe(res);
// Add each file to the zip as a stream
for (const doc of data.documents) {
const key = doc.key;
const response = await s3client.send(
new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
})
);
// response.Body is a readable stream
zipfile.addReadStream(response.Body, path.basename(key));
}
// Finalize the zip after all files are added
zipfile.end();
// No need to send a JSON response, as the zip is streamed directly
data = await client.request(GET_DOCUMENTS_BY_IDS, { documentIds: documentids });
} catch (error) {
logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
jobId,
@@ -213,8 +184,58 @@ const downloadFiles = async (req, res) => {
message: error.message,
stack: error.stack
});
return res.status(400).json({ message: error.message });
}
return res.status(400).json({ message: error.message, stack: error.stack });
const s3client = new S3Client({ region: InstanceRegion() });
const zipfile = new yazl.ZipFile();
const filename = `archive-${jobId || "na"}-${new Date().toISOString().replace(/[:.]/g, "-")}.zip`;
res.setHeader("Content-Type", "application/zip");
res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
zipfile.outputStream.pipe(res);
try {
for (const doc of data.documents) {
const key = doc.key;
let response;
try {
response = await s3client.send(
new GetObjectCommand({
Bucket: imgproxyDestinationBucket,
Key: key
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
}
zipfile.end();
} catch (error) {
logger.log("imgproxy-download-error", "ERROR", req.user?.email, jobId, {
jobId,
billid,
message: error.message,
stack: error.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(error);
}
};