Lint all the things

This commit is contained in:
Dave
2025-08-19 16:23:29 -04:00
parent f6d6b548be
commit 33fb60ca1a
640 changed files with 2129 additions and 3927 deletions

View File

@@ -1,10 +1,7 @@
const path = require("path");
const logger = require("../utils/logger");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { InstanceRegion } = require("../utils/instanceMgr");
const archiver = require("archiver");
const stream = require("node:stream");
const base64UrlEncode = require("./util/base64UrlEncode");
const createHmacSha256 = require("./util/createHmacSha256");
const {
@@ -91,11 +88,11 @@ const getThumbnailUrls = async (req, res) => {
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//If there's no jobid and no billid, we're in temporary documents.
const data = await (
billid ? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid }) :
jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const data = await (billid
? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid })
: jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const s3client = new S3Client({ region: InstanceRegion() });
@@ -106,7 +103,7 @@ const getThumbnailUrls = async (req, res) => {
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key = keyStandardize(document)
let key = keyStandardize(document);
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
@@ -193,7 +190,10 @@ const downloadFiles = async (req, res) => {
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, {
message: err.message,
stack: err.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
@@ -202,7 +202,7 @@ const downloadFiles = async (req, res) => {
try {
for (const doc of data.documents) {
let key = keyStandardize(doc)
let key = keyStandardize(doc);
let response;
try {
response = await s3client.send(
@@ -212,13 +212,21 @@ const downloadFiles = async (req, res) => {
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
@@ -393,7 +401,6 @@ const keyStandardize = (doc) => {
}
};
module.exports = {
generateSignedUploadUrls,
getThumbnailUrls,