Merge branch 'release/1.7.0' into rome/1.7.0

This commit is contained in:
Patrick Fic
2025-06-19 14:14:40 -07:00
27 changed files with 6996 additions and 6800 deletions

View File

@@ -1,11 +1,12 @@
import * as Sentry from "@sentry/react-native";
import axios from "axios";
import * as MediaLibrary from "expo-media-library";
import env from "../env";
import { client } from "../graphql/client";
import { INSERT_NEW_DOCUMENT } from "../graphql/documents.queries";
import { axiosAuthInterceptorId } from "./CleanAxios";
import * as MediaLibrary from "expo-media-library";
import { gql } from "@apollo/client";
import * as Sentry from '@sentry/react-native';
import { splitClient } from "../components/screen-main/screen-main.component";
import * as FileSystem from "expo-file-system";
//Context: currentUserEmail, bodyshop, jobid, invoiceid
@@ -16,17 +17,82 @@ cleanAxios.interceptors.request.eject(axiosAuthInterceptorId);
export const handleUpload = async (ev, context) => {
const { mediaId, onError, onSuccess, onProgress } = ev;
const { bodyshop, jobId } = context;
try {
const imageData = await MediaLibrary.getAssetInfoAsync(mediaId);
const imageUri = imageData.localUri || imageData.uri
const newFile = await (
await fetch(imageUri)
).blob();
let extension = imageData.filename.split(".").pop();
//Default to Cloudinary in case of split treatment errors.
let destination =
splitClient?.getTreatment("Imgproxy") === "on" ? "imgproxy" : "cloudinary";
let key =
destination === "imgproxy"
? `${bodyshop.id}/${jobId}/${replaceAccents(
imageData.filename || imageUri.split("/").pop()
).replace(/[^A-Z0-9]+/gi, "_")}-${new Date().getTime()}.${extension}`
: `${bodyshop.id}/${jobId}/${(
imageData.filename || imageUri.split("/").pop()
).replace(/\.[^/.]+$/, "")}-${new Date().getTime()}`;
const res =
destination === "imgproxy"
? await uploadToImgproxy(
key,
mediaId,
imageData,
extension,
newFile.type, //Filetype
newFile, //File
onError,
onSuccess,
onProgress,
context
)
: await uploadToCloudinary(
key,
mediaId,
imageData,
extension,
newFile.type, //Filetype
newFile, //File
onError,
onSuccess,
onProgress,
context
);
return res;
} catch (error) {
console.log("Error creating upload promise", error.message, error.stack);
if (onError) onError(error.message);
Sentry.captureException(error);
return {
success: false,
error: error.message,
stack: error.stack,
mediaId,
};
}
};
export const handleUploadImgproxy = async (ev, context) => {
const { mediaId, onError, onSuccess, onProgress } = ev;
const { bodyshop, jobId } = context;
const imageData = await MediaLibrary.getAssetInfoAsync(mediaId);
const imageUri = imageData.localUri || imageData.uri
const newFile = await (
await fetch(imageData.localUri || imageData.uri)
await fetch(imageUri)
).blob();
let extension = imageData.localUri.split(".").pop();
let extension = imageUri.split(".").pop();
let key = `${bodyshop.id}/${jobId}/${(
imageData.filename || imageData.uri.split("/").pop()
imageData.filename || imageUri.split("/").pop()
).replace(/\.[^/.]+$/, "")}-${new Date().getTime()}`;
const res = await uploadToCloudinary(
const res = await uploadToImgproxy(
key,
mediaId,
imageData,
@@ -41,6 +107,129 @@ export const handleUpload = async (ev, context) => {
return res;
};
export const uploadToImgproxy = async (
key,
mediaId,
imageData,
extension,
fileType,
file,
onError,
onSuccess,
onProgress,
context
) => {
const { bodyshop, jobId, uploaded_by } = context;
//Get the signed url allowing us to PUT to S3.
const signedURLResponse = await axios.post(
`${env.API_URL}/media/imgproxy/sign`,
{
filenames: [key],
bodyshopid: bodyshop.id,
jobid: jobId,
}
);
if (signedURLResponse.status !== 200) {
console.log("Error Getting Signed URL", signedURLResponse.statusText);
if (onError) onError(signedURLResponse.statusText);
return { success: false, error: signedURLResponse.statusText };
}
const { presignedUrl: preSignedUploadUrlToS3, key: s3Key } =
signedURLResponse.data.signedUrls[0];
var options = {
headers: {
"Content-Type": fileType,
"Content-Length": file.size,
},
transformRequest: [(data) => data], //Dave had this magical solution because Axios makes no sense.
onUploadProgress: (e) => {
if (onProgress) onProgress({ percent: e.loaded / e.total, loaded: e.loaded });
},
};
try {
await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.open("PUT", preSignedUploadUrlToS3);
xhr.setRequestHeader("Content-Type", fileType);
xhr.upload.onprogress = (event) => {
console.log("*** ~ awaitnewPromise ~ event:", event);
if (onProgress && event.lengthComputable) {
onProgress({ percent: event.loaded / event.total, loaded: event.loaded });
}
};
xhr.onload = () => {
if (xhr.status === 200) {
resolve();
} else {
reject(new Error(`Upload failed: ${xhr.statusText}`));
}
};
xhr.onerror = (req, event) => {
reject(new Error("Network error"));
};
xhr.send(file);
});
} catch (error) {
console.log("Error uploading to S3", error.message, error.stack);
if (onError) onError(error.message);
Sentry.captureException(error);
return {
success: false,
error: error.message,
stack: error.stack,
mediaId,
};
}
const documentInsert = await client.mutate({
mutation: INSERT_NEW_DOCUMENT,
variables: {
docInput: [
{
...(jobId ? { jobid: jobId } : {}),
uploaded_by: uploaded_by,
key: s3Key,
type: fileType,
extension: extension,
bodyshopid: bodyshop.id,
size: file.size,
...(imageData.creationTime
? { takenat: new Date(imageData.creationTime) }
: {}),
},
],
},
});
if (!documentInsert.errors) {
if (onSuccess)
onSuccess({
uid: documentInsert.data.insert_documents.returning[0].id,
name: documentInsert.data.insert_documents.returning[0].name,
status: "done",
key: documentInsert.data.insert_documents.returning[0].key,
});
} else {
if (onError) onError(JSON.stringify(documentInsert.errors));
return {
success: false,
error: JSON.stringify(documentInsert.errors),
mediaId,
};
}
return { success: true, mediaId };
};
export const uploadToCloudinary = async (
key,
mediaId,
@@ -72,7 +261,7 @@ export const uploadToCloudinary = async (
});
} catch (error) {
console.log("ERROR GETTING SIGNED URL", error);
Sentry.Native.captureException(error);
Sentry.captureException(error);
return { success: false, error: error };
}
@@ -121,7 +310,7 @@ export const uploadToCloudinary = async (
);
} catch (error) {
console.log("CLOUDINARY error", error.response, cloudinaryUploadResponse);
Sentry.Native.captureException(error);
Sentry.captureException(error);
if (onError) onError(error.message);
return { success: false, error: error };
@@ -188,13 +377,46 @@ export function DetermineFileType(filetype) {
}
export function formatBytes(a, b = 2) {
if (0 === a || !a) return "0 Bytes";
if (0 === a || !a || isNaN(a)) return "0 Bytes";
const c = 0 > b ? 0 : b,
d = Math.floor(Math.log(a) / Math.log(1024));
const parsedFloat = parseFloat((a / Math.pow(1024, d)).toFixed(c))
if (isNaN(parsedFloat)) {
return "0 Bytes";
}
return (
parseFloat((a / Math.pow(1024, d)).toFixed(c)) +
parsedFloat +
" " +
["Bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"][d]
);
}
function replaceAccents(str) {
// Verifies if the String has accents and replace them
if (str.search(/[\xC0-\xFF]/g) > -1) {
str = str
.replace(/[\xC0-\xC5]/g, "A")
.replace(/[\xC6]/g, "AE")
.replace(/[\xC7]/g, "C")
.replace(/[\xC8-\xCB]/g, "E")
.replace(/[\xCC-\xCF]/g, "I")
.replace(/[\xD0]/g, "D")
.replace(/[\xD1]/g, "N")
.replace(/[\xD2-\xD6\xD8]/g, "O")
.replace(/[\xD9-\xDC]/g, "U")
.replace(/[\xDD]/g, "Y")
.replace(/[\xDE]/g, "P")
.replace(/[\xE0-\xE5]/g, "a")
.replace(/[\xE6]/g, "ae")
.replace(/[\xE7]/g, "c")
.replace(/[\xE8-\xEB]/g, "e")
.replace(/[\xEC-\xEF]/g, "i")
.replace(/[\xF1]/g, "n")
.replace(/[\xF2-\xF6\xF8]/g, "o")
.replace(/[\xF9-\xFC]/g, "u")
.replace(/[\xFE]/g, "p")
.replace(/[\xFD\xFF]/g, "y");
}
return str;
}

View File

@@ -2,7 +2,7 @@ import axios from "axios";
import { store } from "../redux/store";
import mime from "mime";
import * as MediaLibrary from "expo-media-library";
import * as Sentry from '@sentry/react-native';
import * as Sentry from "@sentry/react-native";
axios.interceptors.request.use(
function (config) {
@@ -45,8 +45,9 @@ export const handleLocalUpload = async ({
ims_token: bodyshop.localmediatoken,
},
onUploadProgress: (e) => {
if (onProgress)
if (onProgress) {
onProgress({ percent: e.loaded / e.total, loaded: e.loaded });
}
},
};
@@ -95,14 +96,14 @@ export const handleLocalUpload = async ({
});
}
} catch (error) {
Sentry.Native.captureException(error);
Sentry.captureException(error);
console.log("Error uploading documents:", error.message);
onError && onError({ error: error.message });
}
} catch (error) {
console.log("Uncaught error", error);
Sentry.Native.captureException(error);
Sentry.captureException(error);
onError && onError({ error: error.message });
}