diff --git a/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.component.jsx b/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.component.jsx
index 259ca5831..f71760e3b 100644
--- a/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.component.jsx
+++ b/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.component.jsx
@@ -67,9 +67,8 @@ export function DocumentsUploadImgproxyComponent({
//Check to see if old files plus newly uploaded ones will be too much.
if (shouldStopUpload) {
- notification.open({
+ notification.error({
key: "cannotuploaddocuments",
- type: "error",
message: t("documents.labels.upload_limitexceeded_title"),
description: t("documents.labels.upload_limitexceeded")
});
diff --git a/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.utility.js b/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.utility.js
index ed64f5efb..8fa7cb001 100644
--- a/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.utility.js
+++ b/client/src/components/documents-upload-imgproxy/documents-upload-imgproxy.utility.js
@@ -5,6 +5,7 @@ import { logImEXEvent } from "../../firebase/firebase.utils";
import { INSERT_NEW_DOCUMENT } from "../../graphql/documents.queries";
import { axiosAuthInterceptorId } from "../../utils/CleanAxios";
import client from "../../utils/GraphQLClient";
+import { error } from "logrocket";
//Context: currentUserEmail, bodyshop, jobid, invoiceid
@@ -13,17 +14,26 @@ var cleanAxios = axios.create();
cleanAxios.interceptors.request.eject(axiosAuthInterceptorId);
export const handleUpload = (ev, context, notification) => {
- logImEXEvent("document_upload", { filetype: ev.file.type });
+ logImEXEvent("document_upload", { filetype: ev.file?.type });
const { onError, onSuccess, onProgress } = ev;
const { bodyshop, jobId } = context;
- const fileName = ev.file.name || ev.filename;
+ const fileName = ev.file?.name || ev.filename;
let extension = fileName.split(".").pop();
let key = `${bodyshop.id}/${jobId}/${replaceAccents(fileName).replace(/[^A-Z0-9]+/gi, "_")}-${new Date().getTime()}.${extension}`;
- uploadToS3(key, extension, ev.file.type, ev.file, onError, onSuccess, onProgress, context, notification);
+ uploadToS3(key, extension, ev.file.type, ev.file, onError, onSuccess, onProgress, context, notification).catch(
+ (error) => {
+ console.error("Error uploading file to S3", error);
+ notification.error({
+ message: i18n.t("documents.errors.insert", {
+ message: error.message
+ })
+ });
+ }
+ );
};
//Handles only 1 file at a time.
@@ -49,7 +59,7 @@ export const uploadToS3 = async (
if (signedURLResponse.status !== 200) {
if (onError) onError(signedURLResponse.statusText);
- notification["error"]({
+ notification.error({
message: i18n.t("documents.errors.getpresignurl", {
message: signedURLResponse.statusText
})
@@ -60,67 +70,76 @@ export const uploadToS3 = async (
//Key should be same as we provided to maintain backwards compatibility.
const { presignedUrl: preSignedUploadUrlToS3, key: s3Key } = signedURLResponse.data.signedUrls[0];
- var options = {
+ const options = {
onUploadProgress: (e) => {
if (onProgress) onProgress({ percent: (e.loaded / e.total) * 100 });
}
};
- const s3UploadResponse = await cleanAxios.put(preSignedUploadUrlToS3, file, options);
- //Insert the document with the matching key.
- let takenat;
- if (fileType.includes("image")) {
- try {
- const exif = await exifr.parse(file);
- takenat = exif && exif.DateTimeOriginal;
- } catch (error) {
- console.log("Unable to parse image file for EXIF Data", error.message);
+ try {
+ const s3UploadResponse = await cleanAxios.put(preSignedUploadUrlToS3, file, options);
+ //Insert the document with the matching key.
+ let takenat;
+ if (fileType.includes("image")) {
+ try {
+ const exif = await exifr.parse(file);
+ takenat = exif && exif.DateTimeOriginal;
+ } catch (error) {
+ console.log("Unable to parse image file for EXIF Data", error.message);
+ }
}
- }
- const documentInsert = await client.mutate({
- mutation: INSERT_NEW_DOCUMENT,
- variables: {
- docInput: [
- {
- ...(jobId ? { jobid: jobId } : {}),
- ...(billId ? { billid: billId } : {}),
- uploaded_by: uploaded_by,
- key: s3Key,
- type: fileType,
- extension: s3UploadResponse.data.format || extension,
- bodyshopid: bodyshop.id,
- size: s3UploadResponse.data.bytes || file.size, //Leftover from Cloudinary. We don't do any optimization on upload, so it will always be file.size.
- takenat
- }
- ]
- }
- });
-
- if (!documentInsert.errors) {
- if (onSuccess)
- onSuccess({
- uid: documentInsert.data.insert_documents.returning[0].id,
- name: documentInsert.data.insert_documents.returning[0].name,
- status: "done",
- key: documentInsert.data.insert_documents.returning[0].key
- });
- notification.open({
- type: "success",
- key: "docuploadsuccess",
- message: i18n.t("documents.successes.insert")
+ const documentInsert = await client.mutate({
+ mutation: INSERT_NEW_DOCUMENT,
+ variables: {
+ docInput: [
+ {
+ ...(jobId ? { jobid: jobId } : {}),
+ ...(billId ? { billid: billId } : {}),
+ uploaded_by: uploaded_by,
+ key: s3Key,
+ type: fileType,
+ extension: s3UploadResponse.data.format || extension,
+ bodyshopid: bodyshop.id,
+ size: s3UploadResponse.data.bytes || file.size, //Leftover from Cloudinary. We don't do any optimization on upload, so it will always be file.size.
+ takenat
+ }
+ ]
+ }
});
- if (callback) {
- callback();
+
+ if (!documentInsert.errors) {
+ if (onSuccess)
+ onSuccess({
+ uid: documentInsert.data.insert_documents.returning[0].id,
+ name: documentInsert.data.insert_documents.returning[0].name,
+ status: "done",
+ key: documentInsert.data.insert_documents.returning[0].key
+ });
+ notification.success({
+ key: "docuploadsuccess",
+ message: i18n.t("documents.successes.insert")
+ });
+ if (callback) {
+ callback();
+ }
+ } else {
+ if (onError) onError(JSON.stringify(documentInsert.errors));
+ notification.error({
+ message: i18n.t("documents.errors.insert", {
+ message: JSON.stringify(documentInsert.errors)
+ })
+ });
+ return;
}
- } else {
- if (onError) onError(JSON.stringify(documentInsert.errors));
- notification["error"]({
+ } catch (error) {
+ console.log("Error uploading file to S3", error.message, error.stack);
+ notification.error({
message: i18n.t("documents.errors.insert", {
- message: JSON.stringify(documentInsert.errors)
+ message: error.message
})
});
- return;
+ if (onError) onError(JSON.stringify(error.message));
}
};
diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.reassign.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.reassign.component.jsx
index 4ce2d12a2..2fcd55a9f 100644
--- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.reassign.component.jsx
+++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-document-imgproxy-gallery.reassign.component.jsx
@@ -9,7 +9,7 @@ import { useNotification } from "../../contexts/Notifications/notificationContex
import { GET_DOC_SIZE_BY_JOB } from "../../graphql/documents.queries.js";
import { selectBodyshop } from "../../redux/user/user.selectors.js";
import JobSearchSelect from "../job-search-select/job-search-select.component.jsx";
-
+import { isFunction } from "lodash";
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
});
@@ -54,9 +54,8 @@ export function JobsDocumentsImgproxyGalleryReassign({ bodyshop, galleryImages,
bodyshop.jobsizelimit - newJobData.data.documents_aggregate.aggregate.sum.size < transferedDocSizeTotal;
if (shouldPreventTransfer) {
- notification.open({
+ notification.error({
key: "cannotuploaddocuments",
- type: "error",
message: t("documents.labels.reassign_limitexceeded_title"),
description: t("documents.labels.reassign_limitexceeded")
});
@@ -81,17 +80,17 @@ export function JobsDocumentsImgproxyGalleryReassign({ bodyshop, galleryImages,
})
});
//Add in confirmation & errors.
- if (callback) callback();
+ if (isFunction(callback)) callback();
if (res.errors) {
- notification["error"]({
+ notification.error({
message: t("documents.errors.updating", {
message: JSON.stringify(res.errors)
})
});
}
if (!res.mutationResult?.errors) {
- notification["success"]({
+ notification.success({
message: t("documents.successes.updated")
});
}
diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx
index 156362ff1..61b009f13 100644
--- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx
+++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.component.jsx
@@ -17,6 +17,7 @@ import JobsDocumentsGalleryReassign from "./jobs-document-imgproxy-gallery.reass
import JobsDocumentsDeleteButton from "./jobs-documents-imgproxy-gallery.delete.component";
import JobsDocumentsGallerySelectAllComponent from "./jobs-documents-imgproxy-gallery.selectall.component";
import i18n from "i18next";
+import { isFunction } from "lodash";
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
@@ -40,19 +41,19 @@ function JobsDocumentsImgproxyComponent({
downloadIdentifier,
ignoreSizeLimit
}) {
- const [galleryImages, setgalleryImages] = useState({ images: [], other: [] });
+ const [galleryImages, setGalleryImages] = useState({ images: [], other: [] });
const { t } = useTranslation();
const [modalState, setModalState] = useState({ open: false, index: 0 });
const fetchThumbnails = () => {
- fetchImgproxyThumbnails({ setStateCallback: setgalleryImages, jobId });
+ fetchImgproxyThumbnails({ setStateCallback: setGalleryImages, jobId });
};
useEffect(() => {
if (data) {
fetchThumbnails();
}
- }, [data, setgalleryImages]);
+ }, [data]);
const hasMediaAccess = HasFeatureAccess({ bodyshop, featureName: "media" });
const hasMobileAccess = HasFeatureAccess({ bodyshop, featureName: "mobile" });
@@ -65,7 +66,7 @@ function JobsDocumentsImgproxyComponent({
onClick={() => {
//Handle any doc refresh.
- refetch && refetch();
+ isFunction(refetch) && refetch();
//Do the imgproxy refresh too
fetchThumbnails();
@@ -73,7 +74,7 @@ function JobsDocumentsImgproxyComponent({
>
-
+
{
- setgalleryImages({
+ setGalleryImages({
...galleryImages,
images: galleryImages.images.map((g, idx) =>
index === idx ? { ...g, isSelected: !g.isSelected } : g
@@ -148,7 +149,7 @@ function JobsDocumentsImgproxyComponent({
window.open(galleryImages.other[index].source, "_blank", "toolbar=0,location=0,menubar=0");
}}
onSelect={(index) => {
- setgalleryImages({
+ setGalleryImages({
...galleryImages,
other: galleryImages.other.map((g, idx) => (index === idx ? { ...g, isSelected: !g.isSelected } : g))
});
@@ -160,6 +161,7 @@ function JobsDocumentsImgproxyComponent({
{
const newWindow = window.open(
`${window.location.protocol}//${window.location.host}/edit?documentId=${
@@ -202,7 +204,7 @@ export default connect(mapStateToProps, mapDispatchToProps)(JobsDocumentsImgprox
export const fetchImgproxyThumbnails = async ({ setStateCallback, jobId, imagesOnly }) => {
const result = await axios.post("/media/imgproxy/thumbnails", { jobid: jobId });
- let documents = result.data.reduce(
+ const documents = result.data.reduce(
(acc, value) => {
if (value.type.startsWith("image")) {
acc.images.push({
@@ -259,9 +261,6 @@ export const fetchImgproxyThumbnails = async ({ setStateCallback, jobId, imagesO
},
{ images: [], other: [] }
);
- if (imagesOnly) {
- setStateCallback(documents.images);
- } else {
- setStateCallback(documents);
- }
+
+ setStateCallback(imagesOnly ? documents.images : documents);
};
diff --git a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.delete.component.jsx b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.delete.component.jsx
index 72143531b..4701bca67 100644
--- a/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.delete.component.jsx
+++ b/client/src/components/jobs-documents-imgproxy-gallery/jobs-documents-imgproxy-gallery.delete.component.jsx
@@ -5,7 +5,7 @@ import { useState } from "react";
import { useTranslation } from "react-i18next";
import { logImEXEvent } from "../../firebase/firebase.utils.js";
import { useNotification } from "../../contexts/Notifications/notificationContext.jsx";
-
+import { isFunction } from "lodash";
/*
################################################################################################
@@ -34,22 +34,21 @@ export default function JobsDocumentsImgproxyDeleteButton({ galleryImages, delet
});
if (res.data.error) {
- notification["error"]({
+ notification.error({
message: t("documents.errors.deleting", {
error: JSON.stringify(res.data.error.response.errors)
})
});
} else {
- notification.open({
+ notification.success({
key: "docdeletedsuccesfully",
- type: "success",
message: t("documents.successes.delete")
});
- if (deletionCallback) deletionCallback();
+ if (isFunction(deletionCallback)) deletionCallback();
}
} catch (error) {
- notification["error"]({
+ notification.error({
message: t("documents.errors.deleting", {
error: error.message
})
diff --git a/server/graphql-client/queries.js b/server/graphql-client/queries.js
index 633a8a3c6..7d52542e5 100644
--- a/server/graphql-client/queries.js
+++ b/server/graphql-client/queries.js
@@ -2253,7 +2253,7 @@ exports.UPDATE_PARTS_CRITICAL = `mutation UPDATE_PARTS_CRITICAL ($IdsToMarkCriti
notcritical: update_joblines(where: {id: {_nin: $IdsToMarkCritical}, jobid: {_eq: $jobid}}, _set: {critical: false}) {
affected_rows
}
-}`;;
+}`;
exports.ACTIVE_SHOP_BY_USER = `query ACTIVE_SHOP_BY_USER($user: String) {
associations(where: {active: {_eq: true}, useremail: {_eq: $user}}) {
@@ -2706,8 +2706,6 @@ exports.INSERT_AUDIT_TRAIL = `
}
`;
-
-
exports.GET_DOCUMENTS_BY_JOB = `
query GET_DOCUMENTS_BY_JOB($jobId: uuid!) {
jobs_by_pk(id: $jobId) {