Merge branch 'release/2024-11-15' into feature/IO-2920-cash-discounting

This commit is contained in:
Patrick Fic
2024-11-15 10:58:22 -08:00
51 changed files with 2387 additions and 773 deletions

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
* text eol=lf

View File

@@ -0,0 +1,24 @@
#!/bin/bash
# Install required packages
dnf install -y fontconfig freetype
# Move to the /tmp directory for temporary download and extraction
cd /tmp
# Download the Montserrat font zip file
wget https://images.imex.online/fonts/montserrat.zip -O montserrat.zip
# Unzip the downloaded font file
unzip montserrat.zip -d montserrat
# Move the font files to the system fonts directory
mv montserrat/montserrat/*.ttf /usr/share/fonts
# Rebuild the font cache
fc-cache -fv
# Clean up
rm -rf /tmp/montserrat /tmp/montserrat.zip
echo "Montserrat fonts installed and cached successfully."

30
.vscode/settings.json vendored
View File

@@ -8,5 +8,35 @@
"pattern": "**/IMEX.xml",
"systemId": "logs/IMEX.xsd"
}
],
"cSpell.words": [
"antd",
"appointmentconfirmation",
"appt",
"autohouse",
"autohouseid",
"billlines",
"bodyshop",
"bodyshopid",
"bodyshops",
"CIECA",
"claimscorp",
"claimscorpid",
"Dinero",
"driveable",
"IMEX",
"imexshopid",
"jobid",
"joblines",
"Kaizen",
"labhrs",
"larhrs",
"mixdata",
"ownr",
"promanager",
"shopname",
"smartscheduling",
"timetickets",
"touchtime"
]
}

View File

@@ -1,10 +1,10 @@
import { Card, Table, Tag } from "antd";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import { useTranslation } from "react-i18next";
import React, { useEffect, useState } from "react";
import dayjs from "../../../utils/day";
import DashboardRefreshRequired from "../refresh-required.component";
import axios from "axios";
import React, { useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import dayjs from "../../../utils/day";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import DashboardRefreshRequired from "../refresh-required.component";
const fortyFiveDaysAgo = () => dayjs().subtract(45, "day").toLocaleString();
@@ -46,6 +46,11 @@ export default function JobLifecycleDashboardComponent({ data, bodyshop, ...card
dataIndex: "humanReadable",
key: "humanReadable"
},
{
title: t("job_lifecycle.columns.average_human_readable"),
dataIndex: "averageHumanReadable",
key: "averageHumanReadable"
},
{
title: t("job_lifecycle.columns.status_count"),
key: "statusCount",

View File

@@ -1,4 +1,4 @@
import { DatePicker } from "antd";
import { DatePicker, Space, TimePicker } from "antd";
import PropTypes from "prop-types";
import React, { useCallback, useState } from "react";
import { useTranslation } from "react-i18next";
@@ -20,6 +20,7 @@ const DateTimePicker = ({
onlyFuture,
onlyToday,
isDateOnly = false,
isSeparatedTime = false,
bodyshop,
...restProps
}) => {
@@ -87,24 +88,56 @@ const DateTimePicker = ({
return (
<div onKeyDown={handleKeyDown} id={id} style={{ width: "100%" }}>
<DatePicker
showTime={
isDateOnly
? false
: {
format: "hh:mm a",
minuteStep: 15,
defaultValue: dayjs(dayjs(), "HH:mm:ss")
}
}
format={isDateOnly ? "MM/DD/YYYY" : "MM/DD/YYYY hh:mm a"}
value={value ? dayjs(value) : null}
onChange={handleChange}
placeholder={isDateOnly ? t("general.labels.date") : t("general.labels.datetime")}
onBlur={onBlur || handleBlur}
disabledDate={handleDisabledDate}
{...restProps}
/>
{isSeparatedTime && (
<Space direction="vertical" style={{ width: "100%" }}>
<DatePicker
showTime={false}
format="MM/DD/YYYY"
value={value ? dayjs(value) : null}
onChange={handleChange}
placeholder={t("general.labels.date")}
onBlur={handleBlur}
disabledDate={handleDisabledDate}
isDateOnly={true}
{...restProps}
/>
{value && (
<TimePicker
format="hh:mm a"
minuteStep={15}
defaultOpenValue={dayjs()
.minute(Math.floor(dayjs().minute() / 15) * 15)
.second(0)}
onChange={(value) => {
handleChange(value);
onBlur();
}}
placeholder={t("general.labels.time")}
{...restProps}
/>
)}
</Space>
)}
{!isSeparatedTime && (
<DatePicker
showTime={
isDateOnly
? false
: {
format: "hh:mm a",
minuteStep: 15,
defaultValue: dayjs(dayjs(), "HH:mm:ss")
}
}
format={isDateOnly ? "MM/DD/YYYY" : "MM/DD/YYYY hh:mm a"}
value={value ? dayjs(value) : null}
onChange={handleChange}
placeholder={isDateOnly ? t("general.labels.date") : t("general.labels.datetime")}
onBlur={onBlur || handleBlur}
disabledDate={handleDisabledDate}
{...restProps}
/>
)}
</div>
);
};
@@ -116,7 +149,8 @@ DateTimePicker.propTypes = {
id: PropTypes.string,
onlyFuture: PropTypes.bool,
onlyToday: PropTypes.bool,
isDateOnly: PropTypes.bool
isDateOnly: PropTypes.bool,
isSeparatedTime: PropTypes.bool
};
export default connect(mapStateToProps, null)(DateTimePicker);

View File

@@ -118,8 +118,7 @@ export function JobLinesComponent({
...(record.critical ? { boxShadow: " -.5em 0 0 #FFC107" } : {})
}
}),
sortOrder: state.sortedInfo.columnKey === "line_desc" && state.sortedInfo.order,
ellipsis: true
sortOrder: state.sortedInfo.columnKey === "line_desc" && state.sortedInfo.order
},
{
title: t("joblines.fields.oem_partno"),

View File

@@ -45,7 +45,8 @@ export default function JobLineNotePopup({ jobline, disabled }) {
if (editing)
return (
<div>
<Input
<Input.TextArea
autoSize
autoFocus
suffix={loading ? <LoadingSpinner /> : null}
value={note}

View File

@@ -1,10 +1,10 @@
import { useSplitTreatments } from "@splitsoftware/splitio-react";
import { Form, Input, InputNumber, Modal, Select, Switch } from "antd";
import React, { useEffect } from "react";
import { useTranslation } from "react-i18next";
import InputCurrency from "../form-items-formatted/currency-form-item.component";
import LayoutFormRow from "../layout-form-row/layout-form-row.component";
import JoblinesPreset from "../job-lines-preset-button/job-lines-preset-button.component";
import { useSplitTreatments } from "@splitsoftware/splitio-react";
import LayoutFormRow from "../layout-form-row/layout-form-row.component";
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
@@ -61,7 +61,7 @@ export function JobLinesUpsertModalComponent({ bodyshop, open, jobLine, handleCa
]}
name="line_desc"
>
<Input />
<Input.TextArea autoSize />
</Form.Item>
<JoblinesPreset form={form} />
</LayoutFormRow>

View File

@@ -5,6 +5,7 @@ import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
import { selectJobReadOnly } from "../../redux/application/application.selectors";
import { selectBodyshop } from "../../redux/user/user.selectors";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component.jsx";
import CurrencyInput from "../form-items-formatted/currency-form-item.component";
import FormItemEmail from "../form-items-formatted/email-form-item.component";
import FormItemPhone, { PhoneItemFormatterValidation } from "../form-items-formatted/phone-form-item.component";
@@ -12,7 +13,6 @@ import Car from "../job-damage-visual/job-damage-visual.component";
import JobsDetailChangeEstimator from "../jobs-detail-change-estimator/jobs-detail-change-estimator.component";
import JobsDetailChangeFileHandler from "../jobs-detail-change-filehandler/jobs-detail-change-filehandler.component";
import FormRow from "../layout-form-row/layout-form-row.component";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component.jsx";
const mapStateToProps = createStructuredSelector({
jobRO: selectJobReadOnly,
@@ -185,6 +185,9 @@ export function JobsDetailGeneral({ bodyshop, jobRO, job, form }) {
<Form.Item label={t("jobs.fields.towin")} name="towin" valuePropName="checked">
<Switch disabled={jobRO} />
</Form.Item>
<Form.Item label={t("jobs.fields.tlos_ind")} name="tlos_ind" valuePropName="checked">
<Switch disabled={jobRO} />
</Form.Item>
</FormRow>
</Col>
<Col {...lossColDamage}>

View File

@@ -1,6 +1,5 @@
import { Button, Col, Form, Input, Row, Select, Space, Switch, Typography } from "antd";
import axios from "axios";
import dayjs from "../../utils/day";
import React, { useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -8,13 +7,14 @@ import { createStructuredSelector } from "reselect";
import { calculateScheduleLoad } from "../../redux/application/application.actions";
import { selectBodyshop } from "../../redux/user/user.selectors";
import { DateFormatter } from "../../utils/DateFormatter";
import dayjs from "../../utils/day";
import InstanceRenderManager from "../../utils/instanceRenderMgr";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component";
import EmailInput from "../form-items-formatted/email-form-item.component";
import LayoutFormRow from "../layout-form-row/layout-form-row.component";
import ScheduleDayViewContainer from "../schedule-day-view/schedule-day-view.container";
import ScheduleExistingAppointmentsList from "../schedule-existing-appointments-list/schedule-existing-appointments-list.component";
import "./schedule-job-modal.scss";
import InstanceRenderManager from "../../utils/instanceRenderMgr";
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
@@ -84,7 +84,7 @@ export function ScheduleJobModalComponent({
}
]}
>
<DateTimePicker onBlur={handleDateBlur} onlyFuture />
<DateTimePicker onBlur={handleDateBlur} onlyFuture isSeparatedTime />
</Form.Item>
<Form.Item
name="scheduled_completion"

View File

@@ -1,8 +1,9 @@
import { useEffect, useState, useRef } from "react";
import { useEffect, useRef, useState } from "react";
import SocketIO from "socket.io-client";
import { auth } from "../../firebase/firebase.utils";
import { store } from "../../redux/store";
import { setWssStatus } from "../../redux/application/application.actions";
import { addAlerts, setWssStatus } from "../../redux/application/application.actions";
const useSocket = (bodyshop) => {
const socketRef = useRef(null);
const [clientId, setClientId] = useState(null);
@@ -31,6 +32,14 @@ const useSocket = (bodyshop) => {
socketRef.current = socketInstance;
const handleBodyshopMessage = (message) => {
if (!message || !message?.type) return;
switch (message.type) {
case "alert-update":
store.dispatch(addAlerts(message.payload));
break;
}
if (!import.meta.env.DEV) return;
console.log(`Received message for bodyshop ${bodyshop.id}:`, message);
};
@@ -39,22 +48,22 @@ const useSocket = (bodyshop) => {
console.log("Socket connected:", socketInstance.id);
socketInstance.emit("join-bodyshop-room", bodyshop.id);
setClientId(socketInstance.id);
store.dispatch(setWssStatus("connected"))
store.dispatch(setWssStatus("connected"));
};
const handleReconnect = (attempt) => {
console.log(`Socket reconnected after ${attempt} attempts`);
store.dispatch(setWssStatus("connected"))
store.dispatch(setWssStatus("connected"));
};
const handleConnectionError = (err) => {
console.error("Socket connection error:", err);
store.dispatch(setWssStatus("error"))
store.dispatch(setWssStatus("error"));
};
const handleDisconnect = () => {
console.log("Socket disconnected");
store.dispatch(setWssStatus("disconnected"))
store.dispatch(setWssStatus("disconnected"));
};
socketInstance.on("connect", handleConnect);

View File

@@ -692,6 +692,7 @@ export const GET_JOB_BY_PK = gql`
tax_str_rt
tax_sub_rt
tax_tow_rt
tlos_ind
towin
towing_payable
unit_number

View File

@@ -1,4 +1,4 @@
import { FloatButton, Layout, Spin } from "antd";
import { FloatButton, Layout, notification, Spin } from "antd";
// import preval from "preval.macro";
import React, { lazy, Suspense, useContext, useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
@@ -21,11 +21,12 @@ import ShopSubStatusComponent from "../../components/shop-sub-status/shop-sub-st
import { requestForToken } from "../../firebase/firebase.utils";
import SocketContext from "../../contexts/SocketIO/socketContext.jsx";
import { selectBodyshop, selectInstanceConflict } from "../../redux/user/user.selectors";
import UpdateAlert from "../../components/update-alert/update-alert.component";
import InstanceRenderManager from "../../utils/instanceRenderMgr.js";
import "./manage.page.styles.scss";
import WssStatusDisplayComponent from "../../components/wss-status-display/wss-status-display.component.jsx";
import { selectAlerts } from "../../redux/application/application.selectors.js";
import { addAlerts } from "../../redux/application/application.actions.js";
const JobsPage = lazy(() => import("../jobs/jobs.page"));
@@ -104,16 +105,80 @@ const { Content, Footer } = Layout;
const mapStateToProps = createStructuredSelector({
conflict: selectInstanceConflict,
bodyshop: selectBodyshop
bodyshop: selectBodyshop,
alerts: selectAlerts
});
const mapDispatchToProps = (dispatch) => ({});
const ALERT_FILE_URL = InstanceRenderManager({
imex: "https://images.imex.online/alerts/alerts-imex.json",
rome: "https://images.imex.online/alerts/alerts-rome.json"
});
export function Manage({ conflict, bodyshop }) {
const mapDispatchToProps = (dispatch) => ({
setAlerts: (alerts) => dispatch(addAlerts(alerts))
});
export function Manage({ conflict, bodyshop, alerts, setAlerts }) {
const { t } = useTranslation();
const [chatVisible] = useState(false);
const { socket, clientId } = useContext(SocketContext);
// State to track displayed alerts
const [displayedAlertIds, setDisplayedAlertIds] = useState([]);
// Fetch displayed alerts from localStorage on mount
useEffect(() => {
const displayedAlerts = JSON.parse(localStorage.getItem("displayedAlerts") || "[]");
setDisplayedAlertIds(displayedAlerts);
}, []);
// Fetch alerts from the JSON file and dispatch to Redux store
useEffect(() => {
const fetchAlerts = async () => {
try {
const response = await fetch(ALERT_FILE_URL);
const fetchedAlerts = await response.json();
setAlerts(fetchedAlerts);
} catch (error) {
console.error("Error fetching alerts:", error);
}
};
fetchAlerts();
}, []);
// Use useEffect to watch for new alerts
useEffect(() => {
if (alerts && Object.keys(alerts).length > 0) {
// Convert the alerts object into an array
const alertArray = Object.values(alerts);
// Filter out alerts that have already been dismissed
const newAlerts = alertArray.filter((alert) => !displayedAlertIds.includes(alert.id));
newAlerts.forEach((alert) => {
// Display the notification
notification.open({
key: "notification-alerts-" + alert.id,
message: alert.message,
description: alert.description,
type: alert.type || "info",
duration: 0,
placement: "bottomRight",
closable: true,
onClose: () => {
// When the notification is closed, update displayed alerts state and localStorage
setDisplayedAlertIds((prevIds) => {
const updatedIds = [...prevIds, alert.id];
localStorage.setItem("displayedAlerts", JSON.stringify(updatedIds));
return updatedIds;
});
}
});
});
}
}, [alerts, displayedAlertIds]);
useEffect(() => {
const widgetId = InstanceRenderManager({
imex: "IABVNO4scRKY11XBQkNr",

View File

@@ -67,6 +67,12 @@ export const setUpdateAvailable = (isUpdateAvailable) => ({
type: ApplicationActionTypes.SET_UPDATE_AVAILABLE,
payload: isUpdateAvailable
});
export const addAlerts = (alerts) => ({
type: ApplicationActionTypes.ADD_ALERTS,
payload: alerts
});
export const setWssStatus = (status) => ({
type: ApplicationActionTypes.SET_WSS_STATUS,
payload: status

View File

@@ -15,7 +15,8 @@ const INITIAL_STATE = {
error: null
},
jobReadOnly: false,
partnerVersion: null
partnerVersion: null,
alerts: {}
};
const applicationReducer = (state = INITIAL_STATE, action) => {
@@ -91,6 +92,18 @@ const applicationReducer = (state = INITIAL_STATE, action) => {
case ApplicationActionTypes.SET_WSS_STATUS: {
return { ...state, wssStatus: action.payload };
}
case ApplicationActionTypes.ADD_ALERTS: {
const newAlertsMap = { ...state.alerts };
action.payload.alerts.forEach((alert) => {
newAlertsMap[alert.id] = alert;
});
return {
...state,
alerts: newAlertsMap
};
}
default:
return state;
}

View File

@@ -23,3 +23,4 @@ export const selectOnline = createSelector([selectApplication], (application) =>
export const selectProblemJobs = createSelector([selectApplication], (application) => application.problemJobs);
export const selectUpdateAvailable = createSelector([selectApplication], (application) => application.updateAvailable);
export const selectWssStatus = createSelector([selectApplication], (application) => application.wssStatus);
export const selectAlerts = createSelector([selectApplication], (application) => application.alerts);

View File

@@ -13,6 +13,7 @@ const ApplicationActionTypes = {
INSERT_AUDIT_TRAIL: "INSERT_AUDIT_TRAIL",
SET_PROBLEM_JOBS: "SET_PROBLEM_JOBS",
SET_UPDATE_AVAILABLE: "SET_UPDATE_AVAILABLE",
SET_WSS_STATUS: "SET_WSS_STATUS"
SET_WSS_STATUS: "SET_WSS_STATUS",
ADD_ALERTS: "ADD_ALERTS"
};
export default ApplicationActionTypes;

View File

@@ -1257,6 +1257,7 @@
"sunday": "Sunday",
"text": "Text",
"thursday": "Thursday",
"time": "Select Time",
"total": "Total",
"totals": "Totals",
"tuesday": "Tuesday",
@@ -1341,6 +1342,8 @@
},
"job_lifecycle": {
"columns": {
"average_human_readable": "Average Human Readable",
"average_value": "Average Value",
"duration": "Duration",
"end": "End",
"human_readable": "Human Readable",
@@ -1874,6 +1877,7 @@
"tax_str_rt": "Storage Tax Rate",
"tax_sub_rt": "Sublet Tax Rate",
"tax_tow_rt": "Towing Tax Rate",
"tlos_ind": "Total Loss Indicator",
"towin": "Tow In",
"towing_payable": "Towing Payable",
"unitnumber": "Unit #",

View File

@@ -1257,6 +1257,7 @@
"sunday": "",
"text": "",
"thursday": "",
"time": "",
"total": "",
"totals": "",
"tuesday": "",
@@ -1341,6 +1342,8 @@
},
"job_lifecycle": {
"columns": {
"average_human_readable": "",
"average_value": "",
"duration": "",
"end": "",
"human_readable": "",
@@ -1874,6 +1877,7 @@
"tax_str_rt": "",
"tax_sub_rt": "",
"tax_tow_rt": "",
"tlos_ind": "",
"towin": "",
"towing_payable": "Remolque a pagar",
"unitnumber": "Unidad #",

View File

@@ -1257,6 +1257,7 @@
"sunday": "",
"text": "",
"thursday": "",
"time": "",
"total": "",
"totals": "",
"tuesday": "",
@@ -1341,6 +1342,8 @@
},
"job_lifecycle": {
"columns": {
"average_human_readable": "",
"average_value": "",
"duration": "",
"end": "",
"human_readable": "",
@@ -1874,6 +1877,7 @@
"tax_str_rt": "",
"tax_sub_rt": "",
"tax_tow_rt": "",
"tlos_ind": "",
"towin": "",
"towing_payable": "Remorquage à payer",
"unitnumber": "Unité #",

View File

@@ -74,7 +74,7 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- SERVICES=ses,secretsmanager,cloudwatch,logs
- SERVICES=s3,ses,secretsmanager,cloudwatch,logs
- DEBUG=0
- AWS_ACCESS_KEY_ID=test
- AWS_SECRET_ACCESS_KEY=test
@@ -115,7 +115,8 @@ services:
aws --endpoint-url=http://localstack:4566 ses verify-domain-identity --domain imex.online --region ca-central-1
aws --endpoint-url=http://localstack:4566 ses verify-email-identity --email-address noreply@imex.online --region ca-central-1
aws --endpoint-url=http://localstack:4566 secretsmanager create-secret --name CHATTER_PRIVATE_KEY --secret-string file:///tmp/certs/id_rsa
aws --endpoint-url=http://localstack:4566 logs create-log-group --log-group-name development --region ca-central-1
aws --endpoint-url=http://localstack:4566 logs create-log-group --log-group-name development --region ca-central-1
aws --endpoint-url=http://localstack:4566 s3api create-bucket --bucket imex-large-log --create-bucket-configuration LocationConstraint=ca-central-1
"
# Node App: The Main IMEX API
node-app:
@@ -167,6 +168,28 @@ services:
# volumes:
# - redis-insight-data:/db
# ##Optional Container for SFTP/SSH Server for testing
# ssh-sftp-server:
# image: atmoz/sftp:alpine # Using an image with SFTP support
# container_name: ssh-sftp-server
# hostname: ssh-sftp-server
# networks:
# - redis-cluster-net
# ports:
# - "2222:22" # Expose port 22 for SSH/SFTP (mapped to 2222 on the host)
# volumes:
# - ./certs/id_rsa.pub:/home/user/.ssh/keys/id_rsa.pub:ro # Mount the SSH public key
# - ./upload:/home/user/upload # Mount a local directory for SFTP uploads
# environment:
# - SFTP_USERS=user:password:1000::upload
# command: >
# /bin/sh -c "
# chmod -R 007 /home/user/upload &&
# echo 'Match User user' >> /etc/ssh/sshd_config &&
# sed -i -e 's#ForceCommand internal-sftp#ForceCommand internal-sftp -d /upload#' /etc/ssh/sshd_config &&
# /usr/sbin/sshd -D
# "
networks:
redis-cluster-net:
driver: bridge

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1 @@
CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1,9 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
-- CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,7 @@
CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1 @@
CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1,4 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
-- WHERE read = false AND isoutbound = false;

View File

@@ -0,0 +1,2 @@
CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
WHERE read = false AND isoutbound = false;

View File

@@ -0,0 +1,10 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX jobs_search_gin_ro_number ON jobs USING GIN ((ro_number) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_ownrfn ON jobs USING GIN ((ownr_fn) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_clm_no ON jobs USING GIN ((clm_no) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_plate_no ON jobs USING GIN ((plate_no) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_v_make_desc ON jobs USING GIN (( v_make_desc) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_v_model_desc ON jobs USING GIN (( v_model_desc) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_ownr_ln ON jobs USING GIN (( ownr_ln) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_ownr_co_nm ON jobs USING GIN (( ownr_co_nm) gin_trgm_ops);

View File

@@ -0,0 +1,8 @@
CREATE INDEX jobs_search_gin_ro_number ON jobs USING GIN ((ro_number) gin_trgm_ops);
CREATE INDEX jobs_search_gin_ownrfn ON jobs USING GIN ((ownr_fn) gin_trgm_ops);
CREATE INDEX jobs_search_gin_clm_no ON jobs USING GIN ((clm_no) gin_trgm_ops);
CREATE INDEX jobs_search_gin_plate_no ON jobs USING GIN ((plate_no) gin_trgm_ops);
CREATE INDEX jobs_search_gin_v_make_desc ON jobs USING GIN (( v_make_desc) gin_trgm_ops);
CREATE INDEX jobs_search_gin_v_model_desc ON jobs USING GIN (( v_model_desc) gin_trgm_ops);
CREATE INDEX jobs_search_gin_ownr_ln ON jobs USING GIN (( ownr_ln) gin_trgm_ops);
CREATE INDEX jobs_search_gin_ownr_co_nm ON jobs USING GIN (( ownr_co_nm) gin_trgm_ops);

1286
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -21,6 +21,7 @@
"dependencies": {
"@aws-sdk/client-cloudwatch-logs": "^3.679.0",
"@aws-sdk/client-elasticache": "^3.675.0",
"@aws-sdk/client-s3": "^3.689.0",
"@aws-sdk/client-secrets-manager": "^3.675.0",
"@aws-sdk/client-ses": "^3.675.0",
"@aws-sdk/credential-provider-node": "^3.675.0",

View File

@@ -21,7 +21,7 @@ const { applyRedisHelpers } = require("./server/utils/redisHelpers");
const { applyIOHelpers } = require("./server/utils/ioHelpers");
const { redisSocketEvents } = require("./server/web-sockets/redisSocketEvents");
const { ElastiCacheClient, DescribeCacheClustersCommand } = require("@aws-sdk/client-elasticache");
const { default: InstanceManager } = require("./server/utils/instanceMgr");
const { InstanceRegion } = require("./server/utils/instanceMgr");
const CLUSTER_RETRY_BASE_DELAY = 100;
const CLUSTER_RETRY_MAX_DELAY = 5000;
@@ -114,10 +114,7 @@ const applyRoutes = ({ app }) => {
*/
const getRedisNodesFromAWS = async () => {
const client = new ElastiCacheClient({
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region: InstanceRegion()
});
const params = {

View File

@@ -167,7 +167,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
const Vendor = {
DisplayName: bill.vendor.name
DisplayName: StandardizeName(bill.vendor.name)
};
try {
const result = await oauthClient.makeApiCall({

View File

@@ -219,6 +219,7 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef,
PaymentMethodRef: {
value: paymentMethods[payment.type]
},
PrivateNote: payment.memo.length > 4000 ? payment.memo.substring(0, 4000).trim() : payment.memo.trim(),
PaymentRefNum: payment.transactionid,
...(invoices && invoices.length === 1 && invoices[0]
? {

View File

@@ -10,7 +10,7 @@ function urlBuilder(realmId, object, query = null) {
}
function StandardizeName(str) {
return str.replace(new RegExp(/'/g), "\\'");
return str.replace(new RegExp(/'/g), "\\'").trim();
}
exports.urlBuilder = urlBuilder;

View File

@@ -0,0 +1,76 @@
const axios = require("axios");
const _ = require("lodash");
const { default: InstanceMgr } = require("../utils/instanceMgr"); // For deep object comparison
// Constants
const ALERTS_REDIS_KEY = "alerts_data"; // The key under which we'll store alerts in Redis
const GLOBAL_SOCKET_ID = "global"; // Use 'global' as a socketId to store global data
const ALERT_FILE_URL = InstanceMgr({
imex: "https://images.imex.online/alerts/alerts-imex.json",
rome: "https://images.imex.online/alerts/alerts-rome.json"
});
const alertCheck = async (req, res) => {
// Access Redis helper functions
const { ioRedis, logger } = req;
const { getSessionData, setSessionData } = req.sessionUtils;
try {
// Get the JSON Alert file from the server
const response = await axios.get(ALERT_FILE_URL);
const currentAlerts = response.data;
// Retrieve stored alerts from Redis using a global socketId
const storedAlerts = await getSessionData(GLOBAL_SOCKET_ID, ALERTS_REDIS_KEY);
if (!storedAlerts) {
// Alerts not in Redis, store them
await setSessionData(GLOBAL_SOCKET_ID, ALERTS_REDIS_KEY, currentAlerts);
logger.logger.debug("Alerts added to Redis for the first time.");
// Emit to clients
if (ioRedis) {
ioRedis.emit("bodyshop-message", {
type: "alert-update",
payload: currentAlerts
});
logger.logger.debug("Alerts emitted to clients for the first time.");
} else {
logger.log("Socket.IO instance not found. (1)", "error");
}
return res.status(200).send("Alerts added to Redis and emitted to clients.");
} else {
// Alerts are in Redis, compare them
if (!_.isEqual(currentAlerts, storedAlerts)) {
// Alerts are different, update Redis and emit to clients
await setSessionData(GLOBAL_SOCKET_ID, ALERTS_REDIS_KEY, currentAlerts);
logger.logger.debug("Alerts updated in Redis.");
// Emit the new alerts to all connected clients
if (ioRedis) {
ioRedis.emit("bodyshop-message", {
type: "alert-update",
payload: currentAlerts
});
logger.logger.debug("Alerts emitted to clients after update.");
} else {
logger.log("Socket.IO instance not found. (2)", "error");
}
return res.status(200).send("Alerts updated in Redis and emitted to clients.");
} else {
return res.status(200).send("No changes in alerts.");
}
}
} catch (error) {
logger.log("Error in alertCheck:", "error", null, null, {
error: {
message: error.message,
stack: error.stack
}
});
return res.status(500).send("Internal server error.");
}
};
module.exports = { alertCheck };

View File

@@ -13,6 +13,7 @@ let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const AHDineroFormat = "0.00";
const AhDateFormat = "MMDDYYYY";
@@ -26,170 +27,180 @@ const ftpSetup = {
password: process.env.AUTOHOUSE_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"]
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("autohouse-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("autohouse-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("autohouse-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("autohouse-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const autoHouseObject = {
AutoHouseExport: {
RepairOrder: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
};
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
autoHouseObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autuhouseid: bodyshop.autuhouseid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
...errors
})
);
try {
//Connect to the FTP and upload all.
const autoHouseObject = {
AutoHouseExport: {
RepairOrder: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
};
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, autoHouseObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("autohouse-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -287,8 +298,8 @@ const CreateRepairOrderTag = (job, errorCallback) => {
InsuranceCo: job.ins_co_nm || "",
CompanyName: job.ins_co_nm || "",
Address: job.ins_addr1 || "",
City: job.ins_addr1 || "",
State: job.ins_city || "",
City: job.ins_city || "",
State: job.ins_st || "",
Zip: job.ins_zip || "",
Phone: job.ins_ph1 || "",
Fax: job.ins_fax || "",
@@ -601,10 +612,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
logger.log("autohouse-job-calculate-error", "ERROR", "api", null, {
error
});
logger.log("autohouse-job-calculate-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};

View File

@@ -22,135 +22,133 @@ const ftpSetup = {
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allcsvsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("chatter-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const allcsvsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
logger.log("chatter-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CHATTER_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const chatterObject = jobs.map((j) => {
return {
poc_trigger_code: bodyshops_by_pk.chatterid,
firstname: j.ownr_co_nm ? null : j.ownr_fn,
lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
transaction_id: j.ro_number,
email: j.ownr_ea,
phone_number: j.ownr_ph1
};
});
const batchSize = 10;
const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" });
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("chatter-shopsToProcess-generated", "DEBUG", "api", null, null);
allcsvsToUpload.push({
count: chatterObject.length,
csv: ret,
filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv`
});
logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname
});
}
}
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
fs.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
}
sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.json(allcsvsToUpload);
if (shopsToProcess.length === 0) {
logger.log("chatter-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const sftp = new Client();
sftp.on("error", (errors) => logger.log("chatter-sftp-error", "ERROR", "api", null, { ...errors }));
try {
//Get the private key from AWS Secrets Manager.
ftpSetup.privateKey = await getPrivateKey();
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const csvObj of allcsvsToUpload) {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename });
const uploadResult = await sftp.put(Buffer.from(csvObj.xml), `/${csvObj.filename}`);
logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, { uploadResult });
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { ...error });
} finally {
sftp.end();
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const csvObj of allcsvsToUpload) {
await fs.promises.writeFile(`./logs/${csvObj.filename}`, csvObj.csv);
}
} else {
await uploadViaSFTP(allcsvsToUpload);
}
})();
batchPromises.push(batchPromise);
}
sendServerEmail({
await Promise.all(batchPromises);
await sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allcsvsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
res.sendStatus(200);
logger.log("chatter-end", "DEBUG", "api", null, null);
} catch (error) {
res.status(200).json(error);
logger.log("chatter-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
try {
logger.log("chatter-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const { jobs, bodyshops_by_pk } = await client.request(queries.CHATTER_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(1, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const chatterObject = jobs.map((j) => {
return {
poc_trigger_code: bodyshops_by_pk.chatterid,
firstname: j.ownr_co_nm ? null : j.ownr_fn,
lastname: j.ownr_co_nm ? j.ownr_co_nm : j.ownr_ln,
transaction_id: j.ro_number,
email: j.ownr_ea,
phone_number: j.ownr_ph1
};
});
const ret = converter.json2csv(chatterObject, { emptyFieldValue: "" });
allcsvsToUpload.push({
count: chatterObject.length,
csv: ret,
filename: `${bodyshop.shopname}_solicitation_${moment().format("YYYYMMDD")}.csv`
});
logger.log("chatter-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("chatter-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname
});
}
}
}
async function getPrivateKey() {
// Connect to AWS Secrets Manager
const client = new SecretsManagerClient({ region: "ca-central-1" });
@@ -160,10 +158,46 @@ async function getPrivateKey() {
try {
const { SecretString, SecretBinary } = await client.send(command);
if (SecretString || SecretBinary) logger.log("chatter-retrieved-private-key", "DEBUG", "api", null, null);
const chatterPrivateKey = SecretString ? JSON.parse(SecretString) : JSON.parse(Buffer.from(SecretBinary, "base64").toString("ascii"));
return chatterPrivateKey.private_key;
const chatterPrivateKey = SecretString ? SecretString : Buffer.from(SecretBinary, "base64").toString("ascii");
return chatterPrivateKey;
} catch (error) {
logger.log("chatter-get-private-key", "ERROR", "api", null, error);
throw err;
logger.log("chatter-get-private-key", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
}
}
async function uploadViaSFTP(allcsvsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("chatter-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Get the private key from AWS Secrets Manager.
const privateKey = await getPrivateKey();
//Connect to the FTP and upload all.
await sftp.connect({ ...ftpSetup, privateKey });
for (const csvObj of allcsvsToUpload) {
try {
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("chatter-sftp-upload", "DEBUG", "api", null, {
filename: csvObj.filename,
result: csvObj.result
});
} catch (error) {
logger.log("chatter-sftp-upload-error", "ERROR", "api", null, {
filename: csvObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("chatter-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}

View File

@@ -26,174 +26,184 @@ const ftpSetup = {
password: process.env.CLAIMSCORP_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"]
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("claimscorp-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CLAIMSCORP_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("claimscorp-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CLAIMSCORP_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("claimscorp-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("claimscorp-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("claimscorp-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("claimscorp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("claimscorp-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.CLAIMSCORP_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const claimsCorpObject = {
DataFeed: {
ShopInfo: {
ShopID: bodyshops_by_pk.claimscorpid,
ShopName: bodyshops_by_pk.shopname,
RO: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
if (erroredJobs.length > 0) {
logger.log("claimscorp-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
claimsCorpObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: claimsCorpObject.DataFeed.ShopInfo.RO.length,
xml: ret,
filename: `${bodyshop.claimscorpid}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("claimscorp-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("claimscorp-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
const { jobs, bodyshops_by_pk } = await client.request(queries.CLAIMSCORP_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("claimscorp-sftp-error", "ERROR", "api", null, {
...errors
})
);
try {
//Connect to the FTP and upload all.
const claimsCorpObject = {
DataFeed: {
ShopInfo: {
ShopID: bodyshops_by_pk.claimscorpid,
ShopName: bodyshops_by_pk.shopname,
RO: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("claimscorp-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("claimscorp-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("claimscorp-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, claimsCorpObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: claimsCorpObject.DataFeed.ShopInfo.RO.length,
xml: ret,
filename: `${bodyshop.claimscorpid}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("claimscorp-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("claimscorp-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("claimscorp-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("claimscorp-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("claimscorp-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("claimscorp-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("claimscorp-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -445,10 +455,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
logger.log("claimscorp-job-calculate-error", "ERROR", "api", null, {
error
});
logger.log("claimscorp-job-calculate-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};

View File

@@ -16,8 +16,7 @@ const { sendServerEmail } = require("../email/sendemail");
const DineroFormat = "0,0.00";
const DateFormat = "MM/DD/YYYY";
const repairOpCodes = ["OP4", "OP9", "OP10"];
const replaceOpCodes = ["OP2", "OP5", "OP11", "OP12"];
const kaizenShopsIDs = ["SUMMIT", "STRATHMORE", "SUNRIDGE", "SHAW"];
const ftpSetup = {
host: process.env.KAIZEN_HOST,
@@ -30,173 +29,179 @@ const ftpSetup = {
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("kaizen-start", "DEBUG", "api", null, null);
const kaizenShopsIDs = ["SUMMIT", "STRATHMORE", "SUNRIDGE", "SHAW"];
const { bodyshops } = await client.request(queries.GET_KAIZEN_SHOPS, {
imexshopid: kaizenShopsIDs
});
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("kaizen-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_KAIZEN_SHOPS, { imexshopid: kaizenShopsIDs }); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("kaizen-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("kaizen-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("kaizen-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("kaizen-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("kaizen-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.KAIZEN_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const kaizenObject = {
DataFeed: {
ShopInfo: {
ShopName: bodyshops_by_pk.shopname,
Jobs: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
if (erroredJobs.length > 0) {
logger.log("kaizen-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
kaizenObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: kaizenObject.DataFeed.ShopInfo.Jobs.length,
xml: ret,
filename: `${bodyshop.shopname}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("kaizen-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("kaizen-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
const { jobs, bodyshops_by_pk } = await client.request(queries.KAIZEN_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("kaizen-sftp-error", "ERROR", "api", null, {
...errors
})
);
try {
//Connect to the FTP and upload all.
const kaizenObject = {
DataFeed: {
ShopInfo: {
ShopName: bodyshops_by_pk.shopname,
Jobs: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("kaizen-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("kaizen-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("kaizen-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, kaizenObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: kaizenObject.DataFeed.ShopInfo.Jobs.length,
xml: ret,
filename: `${bodyshop.shopname}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("kaizen-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("kaizen-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("kaizen-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("kaizen-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("kaizen-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("kaizen-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("kaizen-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -420,10 +425,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
logger.log("kaizen-job-calculate-error", "ERROR", "api", null, {
error
});
logger.log("kaizen-job-calculate-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};

View File

@@ -1,6 +1,6 @@
const { isString, isEmpty } = require("lodash");
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { default: InstanceManager } = require("../utils/instanceMgr");
const { InstanceRegion } = require("../utils/instanceMgr");
const aws = require("@aws-sdk/client-ses");
const nodemailer = require("nodemailer");
const logger = require("../utils/logger");
@@ -10,12 +10,7 @@ const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.e
const sesConfig = {
apiVersion: "latest",
credentials: defaultProvider(),
region: isLocal
? "ca-central-1"
: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region: InstanceRegion()
};
if (isLocal) {

View File

@@ -17,12 +17,10 @@ require("dotenv").config({
const domain = process.env.NODE_ENV ? "secure" : "test";
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
const { InstanceRegion } = require("../utils/instanceMgr");
const client = new SecretsManagerClient({
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region: InstanceRegion()
});
const gqlClient = require("../graphql-client/graphql-client").client;

View File

@@ -78,16 +78,20 @@ const jobLifecycle = async (req, res) => {
Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = (value / finalTotal) * 100;
const percentage = finalTotal > 0 ? (value / finalTotal) * 100 : 0;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
const averageValue = _.size(jobIDs) > 0 ? value / jobIDs.length : 0;
const averageHumanReadable = durationToHumanReadable(moment.duration(averageValue));
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage
roundedPercentage,
averageValue,
averageHumanReadable
});
});
@@ -100,7 +104,12 @@ const jobLifecycle = async (req, res) => {
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal))
humanReadable: durationToHumanReadable(moment.duration(finalTotal)),
averageValue: _.size(jobIDs) > 0 ? finalTotal / jobIDs.length : 0,
averageHumanReadable:
_.size(jobIDs) > 0
? durationToHumanReadable(moment.duration(finalTotal / jobIDs.length))
: durationToHumanReadable(moment.duration(0))
}
});
};

View File

@@ -12,9 +12,10 @@ const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebas
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const { taskAssignedEmail, tasksRemindEmail } = require("../email/tasksEmails");
const { canvastest } = require("../render/canvas-handler");
const { alertCheck } = require("../alerts/alertcheck");
//Test route to ensure Express is responding.
router.get("/test", async function (req, res) {
router.get("/test", eventAuthorizationMiddleware, async function (req, res) {
const commit = require("child_process").execSync("git rev-parse --short HEAD");
// console.log(app.get('trust proxy'));
// console.log("remoteAddress", req.socket.remoteAddress);
@@ -31,6 +32,32 @@ router.get("/test", async function (req, res) {
res.status(200).send(`OK - ${commit}`);
});
router.get("/test-logs", eventAuthorizationMiddleware, (req, res) => {
const { logger } = req;
// // Test 1: Log with a message that exceeds the size limit, triggering an upload to S3.
const largeMessage = "A".repeat(256 * 1024 + 1); // Message larger than the log size limit
logger.log(largeMessage, "error", "user123", null, { detail: "large log entry" });
// Test 2: Log with a message that is within the size limit, should log directly using winston.
const smallMessage = "A small log message";
logger.log(smallMessage, "info", "user123", null, { detail: "small log entry" });
// Test 3: Log with the `upload` flag set to `true`, forcing the log to be uploaded to S3.
logger.log(
"This log will be uploaded to S3 regardless of size",
"warning",
"user123",
null,
{ detail: "upload log" },
true
);
// Test 4: Log with a message that doesn't exceed the size limit and doesn't require an upload.
logger.log("Normal log entry", "debug", "user123", { id: 4 }, { detail: "normal log entry" });
return res.status(500).send("Logs tested.");
});
// Search
router.post("/search", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, os.search);
router.post("/opensearch", eventAuthorizationMiddleware, os.handler);
@@ -53,4 +80,7 @@ router.post("/taskHandler", validateFirebaseIdTokenMiddleware, taskHandler.taskH
// Canvas Test
router.post("/canvastest", validateFirebaseIdTokenMiddleware, canvastest);
// Alert Check
router.post("/alertcheck", eventAuthorizationMiddleware, alertCheck);
module.exports = router;

View File

@@ -44,4 +44,10 @@ function InstanceManager({ args, instance, debug, executeFunction, rome, promana
return propToReturn === undefined ? null : propToReturn;
}
exports.InstanceRegion = () =>
InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
});
exports.default = InstanceManager;

View File

@@ -9,6 +9,9 @@ const winston = require("winston");
const WinstonCloudWatch = require("winston-cloudwatch");
const { isString, isEmpty } = require("lodash");
const { networkInterfaces, hostname } = require("node:os");
const { uploadFileToS3 } = require("./s3");
const { v4 } = require("uuid");
const { InstanceRegion } = require("./instanceMgr");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
@@ -20,6 +23,30 @@ const LOG_LEVELS = {
silly: { level: 6, name: "silly" }
};
const LOG_LENGTH_LIMIT = 256 * 1024; // 256KB
const S3_BUCKET_NAME = InstanceManager({
imex: "imex-large-log",
rome: "rome-large-log"
});
const region = InstanceRegion();
const estimateLogSize = (logEntry) => {
let estimatedSize = 0;
for (const key in logEntry) {
if (logEntry.hasOwnProperty(key)) {
const value = logEntry[key];
if (value === undefined || value === null) {
estimatedSize += key.length; // Only count the key length if value is undefined or null
} else {
estimatedSize += key.length + (typeof value === "string" ? value.length : JSON.stringify(value).length);
}
}
}
return estimatedSize;
};
const normalizeLevel = (level) => (level ? level.toLowerCase() : LOG_LEVELS.debug.name);
const createLogger = () => {
@@ -30,10 +57,7 @@ const createLogger = () => {
const winstonCloudwatchTransportDefaults = {
logGroupName: logGroupName,
awsOptions: {
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region
},
jsonMessage: true
};
@@ -124,15 +148,66 @@ const createLogger = () => {
);
}
const log = (message, type, user, record, meta) => {
winstonLogger.log({
const log = (message, type, user, record, meta, upload) => {
const logEntry = {
level: normalizeLevel(type),
message,
user,
record,
hostname: internalHostname,
meta
});
};
const uploadLogToS3 = (logEntry, message, type, user) => {
const uniqueId = v4();
const dateTimeString = new Date().toISOString().replace(/:/g, "-");
const envName = process.env?.NODE_ENV ? process.env.NODE_ENV : "";
const logStreamName = `${envName}-${internalHostname}-${dateTimeString}-${uniqueId}.json`;
const logString = JSON.stringify(logEntry);
const webPath = isLocal
? `https://${S3_BUCKET_NAME}.s3.localhost.localstack.cloud:4566/${logStreamName}`
: `https://${S3_BUCKET_NAME}.s3.${region}.amazonaws.com/${logStreamName}`;
uploadFileToS3({ bucketName: S3_BUCKET_NAME, key: logStreamName, content: logString })
.then(() => {
log("A log file has been uploaded to S3", "info", "S3", null, {
logStreamName,
webPath,
message: message?.slice(0, 200),
type,
user
});
})
.catch((err) => {
log("Error in S3 Upload", "error", "S3", null, {
logStreamName,
webPath,
message: message?.slice(0, 100),
type,
user,
errorMessage: err?.message?.slice(0, 100)
});
});
};
const checkAndUploadLog = () => {
const estimatedSize = estimateLogSize(logEntry);
if (estimatedSize > LOG_LENGTH_LIMIT * 0.9 || estimatedSize > LOG_LENGTH_LIMIT) {
uploadLogToS3(logEntry, message, type, user);
return true;
}
return false;
};
// Upload log immediately if upload is true, otherwise check the log size.
if (upload) {
uploadLogToS3(logEntry, message, type, user);
return;
}
if (checkAndUploadLog()) return;
winstonLogger.log(logEntry);
};
return {

109
server/utils/s3.js Normal file
View File

@@ -0,0 +1,109 @@
const {
S3Client,
PutObjectCommand,
GetObjectCommand,
ListObjectsV2Command,
DeleteObjectCommand,
CopyObjectCommand
} = require("@aws-sdk/client-s3");
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { InstanceRegion } = require("./instanceMgr");
const { isString, isEmpty } = require("lodash");
const createS3Client = () => {
const S3Options = {
region: InstanceRegion(),
credentials: defaultProvider()
};
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
if (isLocal) {
S3Options.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
S3Options.forcePathStyle = true; // Needed for LocalStack to avoid bucket name as hostname
}
const s3Client = new S3Client(S3Options);
/**
* Uploads a file to the specified S3 bucket and key.
*/
const uploadFileToS3 = async ({ bucketName, key, content, contentType }) => {
const params = {
Bucket: bucketName,
Key: key,
Body: content,
ContentType: contentType ?? "application/json"
};
const command = new PutObjectCommand(params);
return await s3Client.send(command);
};
/**
* Downloads a file from the specified S3 bucket and key.
*/
const downloadFileFromS3 = async ({ bucketName, key }) => {
const params = { Bucket: bucketName, Key: key };
const command = new GetObjectCommand(params);
const data = await s3Client.send(command);
return data.Body;
};
/**
* Lists objects in the specified S3 bucket.
*/
const listFilesInS3Bucket = async (bucketName, prefix = "") => {
const params = { Bucket: bucketName, Prefix: prefix };
const command = new ListObjectsV2Command(params);
const data = await s3Client.send(command);
return data.Contents || [];
};
/**
* Deletes a file from the specified S3 bucket and key.
*/
const deleteFileFromS3 = async ({ bucketName, key }) => {
const params = { Bucket: bucketName, Key: key };
const command = new DeleteObjectCommand(params);
return await s3Client.send(command);
};
/**
* Copies a file within S3 from a source bucket/key to a destination bucket/key.
*/
const copyFileInS3 = async ({ sourceBucket, sourceKey, destinationBucket, destinationKey }) => {
const params = {
CopySource: `/${sourceBucket}/${sourceKey}`,
Bucket: destinationBucket,
Key: destinationKey
};
const command = new CopyObjectCommand(params);
return await s3Client.send(command);
};
/**
* Checks if a file exists in the specified S3 bucket and key.
*/
const fileExistsInS3 = async ({ bucketName, key }) => {
try {
await downloadFileFromS3({ bucketName, key });
return true;
} catch (error) {
if (error.name === "NoSuchKey" || error.name === "NotFound") {
return false;
}
throw error;
}
};
return {
uploadFileToS3,
downloadFileFromS3,
listFilesInS3Bucket,
deleteFileFromS3,
copyFileInS3,
fileExistsInS3,
...s3Client
};
};
module.exports = createS3Client();

View File

@@ -155,10 +155,17 @@ function createJsonEvent(socket, level, message, json) {
message
});
}
logger.log("ws-log-event-json", level, socket.user.email, socket.recordid, {
wsmessage: message,
json
});
logger.log(
"ws-log-event-json",
level,
socket.user.email,
socket.recordid,
{
wsmessage: message,
json
},
true
);
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
@@ -189,7 +196,8 @@ function createXmlEvent(socket, xml, message, isError = false) {
{
wsmessage: message,
xml
}
},
true
);
if (socket.logEvents && isArray(socket.logEvents)) {

2
upload/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
*
!.gitignore