IO-3092 Address PR concerns.

This commit is contained in:
Patrick Fic
2025-02-27 13:54:16 -08:00
parent f13a70a22f
commit ace0039429
6 changed files with 96 additions and 83 deletions

View File

@@ -67,9 +67,8 @@ export function DocumentsUploadImgproxyComponent({
//Check to see if old files plus newly uploaded ones will be too much.
if (shouldStopUpload) {
notification.open({
notification.error({
key: "cannotuploaddocuments",
type: "error",
message: t("documents.labels.upload_limitexceeded_title"),
description: t("documents.labels.upload_limitexceeded")
});

View File

@@ -5,6 +5,7 @@ import { logImEXEvent } from "../../firebase/firebase.utils";
import { INSERT_NEW_DOCUMENT } from "../../graphql/documents.queries";
import { axiosAuthInterceptorId } from "../../utils/CleanAxios";
import client from "../../utils/GraphQLClient";
import { error } from "logrocket";
//Context: currentUserEmail, bodyshop, jobid, invoiceid
@@ -13,17 +14,26 @@ var cleanAxios = axios.create();
cleanAxios.interceptors.request.eject(axiosAuthInterceptorId);
export const handleUpload = (ev, context, notification) => {
logImEXEvent("document_upload", { filetype: ev.file.type });
logImEXEvent("document_upload", { filetype: ev.file?.type });
const { onError, onSuccess, onProgress } = ev;
const { bodyshop, jobId } = context;
const fileName = ev.file.name || ev.filename;
const fileName = ev.file?.name || ev.filename;
let extension = fileName.split(".").pop();
let key = `${bodyshop.id}/${jobId}/${replaceAccents(fileName).replace(/[^A-Z0-9]+/gi, "_")}-${new Date().getTime()}.${extension}`;
uploadToS3(key, extension, ev.file.type, ev.file, onError, onSuccess, onProgress, context, notification);
uploadToS3(key, extension, ev.file.type, ev.file, onError, onSuccess, onProgress, context, notification).catch(
(error) => {
console.error("Error uploading file to S3", error);
notification.error({
message: i18n.t("documents.errors.insert", {
message: error.message
})
});
}
);
};
//Handles only 1 file at a time.
@@ -49,7 +59,7 @@ export const uploadToS3 = async (
if (signedURLResponse.status !== 200) {
if (onError) onError(signedURLResponse.statusText);
notification["error"]({
notification.error({
message: i18n.t("documents.errors.getpresignurl", {
message: signedURLResponse.statusText
})
@@ -60,67 +70,76 @@ export const uploadToS3 = async (
//Key should be same as we provided to maintain backwards compatibility.
const { presignedUrl: preSignedUploadUrlToS3, key: s3Key } = signedURLResponse.data.signedUrls[0];
var options = {
const options = {
onUploadProgress: (e) => {
if (onProgress) onProgress({ percent: (e.loaded / e.total) * 100 });
}
};
const s3UploadResponse = await cleanAxios.put(preSignedUploadUrlToS3, file, options);
//Insert the document with the matching key.
let takenat;
if (fileType.includes("image")) {
try {
const exif = await exifr.parse(file);
takenat = exif && exif.DateTimeOriginal;
} catch (error) {
console.log("Unable to parse image file for EXIF Data", error.message);
try {
const s3UploadResponse = await cleanAxios.put(preSignedUploadUrlToS3, file, options);
//Insert the document with the matching key.
let takenat;
if (fileType.includes("image")) {
try {
const exif = await exifr.parse(file);
takenat = exif && exif.DateTimeOriginal;
} catch (error) {
console.log("Unable to parse image file for EXIF Data", error.message);
}
}
}
const documentInsert = await client.mutate({
mutation: INSERT_NEW_DOCUMENT,
variables: {
docInput: [
{
...(jobId ? { jobid: jobId } : {}),
...(billId ? { billid: billId } : {}),
uploaded_by: uploaded_by,
key: s3Key,
type: fileType,
extension: s3UploadResponse.data.format || extension,
bodyshopid: bodyshop.id,
size: s3UploadResponse.data.bytes || file.size, //Leftover from Cloudinary. We don't do any optimization on upload, so it will always be file.size.
takenat
}
]
}
});
if (!documentInsert.errors) {
if (onSuccess)
onSuccess({
uid: documentInsert.data.insert_documents.returning[0].id,
name: documentInsert.data.insert_documents.returning[0].name,
status: "done",
key: documentInsert.data.insert_documents.returning[0].key
});
notification.open({
type: "success",
key: "docuploadsuccess",
message: i18n.t("documents.successes.insert")
const documentInsert = await client.mutate({
mutation: INSERT_NEW_DOCUMENT,
variables: {
docInput: [
{
...(jobId ? { jobid: jobId } : {}),
...(billId ? { billid: billId } : {}),
uploaded_by: uploaded_by,
key: s3Key,
type: fileType,
extension: s3UploadResponse.data.format || extension,
bodyshopid: bodyshop.id,
size: s3UploadResponse.data.bytes || file.size, //Leftover from Cloudinary. We don't do any optimization on upload, so it will always be file.size.
takenat
}
]
}
});
if (callback) {
callback();
if (!documentInsert.errors) {
if (onSuccess)
onSuccess({
uid: documentInsert.data.insert_documents.returning[0].id,
name: documentInsert.data.insert_documents.returning[0].name,
status: "done",
key: documentInsert.data.insert_documents.returning[0].key
});
notification.success({
key: "docuploadsuccess",
message: i18n.t("documents.successes.insert")
});
if (callback) {
callback();
}
} else {
if (onError) onError(JSON.stringify(documentInsert.errors));
notification.error({
message: i18n.t("documents.errors.insert", {
message: JSON.stringify(documentInsert.errors)
})
});
return;
}
} else {
if (onError) onError(JSON.stringify(documentInsert.errors));
notification["error"]({
} catch (error) {
console.log("Error uploading file to S3", error.message, error.stack);
notification.error({
message: i18n.t("documents.errors.insert", {
message: JSON.stringify(documentInsert.errors)
message: error.message
})
});
return;
if (onError) onError(JSON.stringify(error.message));
}
};