Merge branch 'master-AIO' into feature/IO-2921-CARSTAR-Canada-Chatter-Integration

Signed-off-by: Allan Carr <allan.carr@thinkimex.com>
This commit is contained in:
Allan Carr
2024-11-20 10:56:02 -08:00
67 changed files with 13638 additions and 11632 deletions

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
* text eol=lf

View File

@@ -0,0 +1,24 @@
#!/bin/bash
# Install required packages
dnf install -y fontconfig freetype
# Move to the /tmp directory for temporary download and extraction
cd /tmp
# Download the Montserrat font zip file
wget https://images.imex.online/fonts/montserrat.zip -O montserrat.zip
# Unzip the downloaded font file
unzip montserrat.zip -d montserrat
# Move the font files to the system fonts directory
mv montserrat/montserrat/*.ttf /usr/share/fonts
# Rebuild the font cache
fc-cache -fv
# Clean up
rm -rf /tmp/montserrat /tmp/montserrat.zip
echo "Montserrat fonts installed and cached successfully."

30
.vscode/settings.json vendored
View File

@@ -8,5 +8,35 @@
"pattern": "**/IMEX.xml",
"systemId": "logs/IMEX.xsd"
}
],
"cSpell.words": [
"antd",
"appointmentconfirmation",
"appt",
"autohouse",
"autohouseid",
"billlines",
"bodyshop",
"bodyshopid",
"bodyshops",
"CIECA",
"claimscorp",
"claimscorpid",
"Dinero",
"driveable",
"IMEX",
"imexshopid",
"jobid",
"joblines",
"Kaizen",
"labhrs",
"larhrs",
"mixdata",
"ownr",
"promanager",
"shopname",
"smartscheduling",
"timetickets",
"touchtime"
]
}

View File

@@ -11156,6 +11156,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>imexpay</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>insurancecos</name>
<definition_loaded>false</definition_loaded>
@@ -11198,27 +11219,6 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>intellipay</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>intellipay_cash_discount</name>
<definition_loaded>false</definition_loaded>
@@ -11747,6 +11747,48 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>ttl_adjustment</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>ttl_tax_adjustment</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
</children>
</folder_node>
<folder_node>
@@ -11775,6 +11817,27 @@
</concept_node>
</children>
</folder_node>
<concept_node>
<name>romepay</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>scheduling</name>
<definition_loaded>false</definition_loaded>
@@ -36253,6 +36316,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_cust_payable_cash_discount</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_repairs</name>
<definition_loaded>false</definition_loaded>
@@ -48360,6 +48444,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>tasks_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>tasks_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48402,6 +48507,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_amount_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_amount_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48444,6 +48570,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_hours_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_hours_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48465,6 +48612,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_jobs_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_jobs_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48507,6 +48675,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lab_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lab_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48549,6 +48738,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lar_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lar_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48724,6 +48934,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>tasks_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>tasks_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48766,6 +48997,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_amount_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_amount_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48808,6 +49060,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_hours_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_hours_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48829,6 +49102,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_jobs_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_jobs_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48871,6 +49165,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lab_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lab_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -48913,6 +49228,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lar_in_view</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>total_lar_on_board</name>
<definition_loaded>false</definition_loaded>
@@ -51761,6 +52097,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>production_not_production_status</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>production_over_time</name>
<definition_loaded>false</definition_loaded>
@@ -54225,6 +54582,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>created_by</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>description</name>
<definition_loaded>false</definition_loaded>
@@ -54487,6 +54865,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>related_items</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>remind_at</name>
<definition_loaded>false</definition_loaded>

View File

@@ -1,10 +1,10 @@
import { Card, Table, Tag } from "antd";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import { useTranslation } from "react-i18next";
import React, { useEffect, useState } from "react";
import dayjs from "../../../utils/day";
import DashboardRefreshRequired from "../refresh-required.component";
import axios from "axios";
import React, { useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import dayjs from "../../../utils/day";
import LoadingSkeleton from "../../loading-skeleton/loading-skeleton.component";
import DashboardRefreshRequired from "../refresh-required.component";
const fortyFiveDaysAgo = () => dayjs().subtract(45, "day").toLocaleString();
@@ -46,6 +46,11 @@ export default function JobLifecycleDashboardComponent({ data, bodyshop, ...card
dataIndex: "humanReadable",
key: "humanReadable"
},
{
title: t("job_lifecycle.columns.average_human_readable"),
dataIndex: "averageHumanReadable",
key: "averageHumanReadable"
},
{
title: t("job_lifecycle.columns.status_count"),
key: "statusCount",

View File

@@ -44,7 +44,7 @@ function LogLevelHierarchy(level) {
return "orange";
case "INFO":
return "blue";
case "WARNING":
case "WARN":
return "yellow";
case "ERROR":
return "red";

View File

@@ -1,4 +1,4 @@
import { DatePicker } from "antd";
import { DatePicker, Space, TimePicker } from "antd";
import PropTypes from "prop-types";
import React, { useCallback, useState } from "react";
import { useTranslation } from "react-i18next";
@@ -20,6 +20,7 @@ const DateTimePicker = ({
onlyFuture,
onlyToday,
isDateOnly = false,
isSeparatedTime = false,
bodyshop,
...restProps
}) => {
@@ -29,7 +30,7 @@ const DateTimePicker = ({
const handleChange = useCallback(
(newDate) => {
if (onChange) {
onChange(bodyshop?.timezone ? dayjs(newDate).tz(bodyshop.timezone, true) : newDate);
onChange(bodyshop?.timezone && newDate ? dayjs(newDate).tz(bodyshop.timezone, true) : newDate);
}
setIsManualInput(false);
},
@@ -87,24 +88,57 @@ const DateTimePicker = ({
return (
<div onKeyDown={handleKeyDown} id={id} style={{ width: "100%" }}>
<DatePicker
showTime={
isDateOnly
? false
: {
format: "hh:mm a",
minuteStep: 15,
defaultValue: dayjs(dayjs(), "HH:mm:ss")
}
}
format={isDateOnly ? "MM/DD/YYYY" : "MM/DD/YYYY hh:mm a"}
value={value ? dayjs(value) : null}
onChange={handleChange}
placeholder={isDateOnly ? t("general.labels.date") : t("general.labels.datetime")}
onBlur={onBlur || handleBlur}
disabledDate={handleDisabledDate}
{...restProps}
/>
{isSeparatedTime && (
<Space direction="vertical" style={{ width: "100%" }}>
<DatePicker
showTime={false}
format="MM/DD/YYYY"
value={value ? dayjs(value) : null}
onChange={handleChange}
placeholder={t("general.labels.date")}
onBlur={handleBlur}
disabledDate={handleDisabledDate}
isDateOnly={true}
{...restProps}
/>
{value && (
<TimePicker
format="hh:mm a"
minuteStep={15}
defaultOpenValue={dayjs(value)
.hour(dayjs().hour())
.minute(Math.floor(dayjs().minute() / 15) * 15)
.second(0)}
onChange={(value) => {
handleChange(value);
onBlur();
}}
placeholder={t("general.labels.time")}
{...restProps}
/>
)}
</Space>
)}
{!isSeparatedTime && (
<DatePicker
showTime={
isDateOnly
? false
: {
format: "hh:mm a",
minuteStep: 15,
defaultValue: dayjs(dayjs(), "HH:mm:ss")
}
}
format={isDateOnly ? "MM/DD/YYYY" : "MM/DD/YYYY hh:mm a"}
value={value ? dayjs(value) : null}
onChange={handleChange}
placeholder={isDateOnly ? t("general.labels.date") : t("general.labels.datetime")}
onBlur={onBlur || handleBlur}
disabledDate={handleDisabledDate}
{...restProps}
/>
)}
</div>
);
};
@@ -116,7 +150,8 @@ DateTimePicker.propTypes = {
id: PropTypes.string,
onlyFuture: PropTypes.bool,
onlyToday: PropTypes.bool,
isDateOnly: PropTypes.bool
isDateOnly: PropTypes.bool,
isSeparatedTime: PropTypes.bool
};
export default connect(mapStateToProps, null)(DateTimePicker);

View File

@@ -118,8 +118,7 @@ export function JobLinesComponent({
...(record.critical ? { boxShadow: " -.5em 0 0 #FFC107" } : {})
}
}),
sortOrder: state.sortedInfo.columnKey === "line_desc" && state.sortedInfo.order,
ellipsis: true
sortOrder: state.sortedInfo.columnKey === "line_desc" && state.sortedInfo.order
},
{
title: t("joblines.fields.oem_partno"),

View File

@@ -45,7 +45,8 @@ export default function JobLineNotePopup({ jobline, disabled }) {
if (editing)
return (
<div>
<Input
<Input.TextArea
autoSize
autoFocus
suffix={loading ? <LoadingSpinner /> : null}
value={note}

View File

@@ -1,10 +1,10 @@
import { useSplitTreatments } from "@splitsoftware/splitio-react";
import { Form, Input, InputNumber, Modal, Select, Switch } from "antd";
import React, { useEffect } from "react";
import { useTranslation } from "react-i18next";
import InputCurrency from "../form-items-formatted/currency-form-item.component";
import LayoutFormRow from "../layout-form-row/layout-form-row.component";
import JoblinesPreset from "../job-lines-preset-button/job-lines-preset-button.component";
import { useSplitTreatments } from "@splitsoftware/splitio-react";
import LayoutFormRow from "../layout-form-row/layout-form-row.component";
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
@@ -61,7 +61,7 @@ export function JobLinesUpsertModalComponent({ bodyshop, open, jobLine, handleCa
]}
name="line_desc"
>
<Input />
<Input.TextArea autoSize />
</Form.Item>
<JoblinesPreset form={form} />
</LayoutFormRow>

View File

@@ -141,14 +141,16 @@ export function JobTotalsTableTotals({ bodyshop, job }) {
key: t("jobs.fields.ded_amt"),
total: job.job_totals.totals.custPayable.deductible
},
...(InstanceRenderManager({
imex: [{
key: t("jobs.fields.federal_tax_payable"),
total: job.job_totals.totals.custPayable.federal_tax
}],
...InstanceRenderManager({
imex: [
{
key: t("jobs.fields.federal_tax_payable"),
total: job.job_totals.totals.custPayable.federal_tax
}
],
rome: [],
promanager: "USE_ROME"
})),
}),
{
key: t("jobs.fields.other_amount_payable"),
total: job.job_totals.totals.custPayable.other_customer_amount
@@ -158,11 +160,32 @@ export function JobTotalsTableTotals({ bodyshop, job }) {
total: job.job_totals.totals.custPayable.dep_taxes
},
{
key: t("jobs.labels.total_cust_payable"),
total: job.job_totals.totals.custPayable.total,
bold: true
},
...(bodyshop.intellipay_config?.enable_cash_discount
? [
{
key: t("jobs.labels.total_cust_payable_cash_discount"),
total: job.job_totals.totals.custPayable.total,
bold: true
},
{
key: t("jobs.labels.total_cust_payable"),
total: Dinero(job.job_totals.totals.custPayable.total)
.add(
Dinero(job.job_totals.totals.custPayable.total).percentage(
bodyshop.intellipay_config?.cash_discount_percentage || 0
)
)
.toJSON(),
bold: true
}
]
: [
{
key: t("jobs.labels.total_cust_payable"),
total: job.job_totals.totals.custPayable.total,
bold: true
}
]),
{
key: t("jobs.labels.net_repairs"),
total: job.job_totals.totals.net_repairs,

View File

@@ -5,6 +5,7 @@ import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
import { selectJobReadOnly } from "../../redux/application/application.selectors";
import { selectBodyshop } from "../../redux/user/user.selectors";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component.jsx";
import CurrencyInput from "../form-items-formatted/currency-form-item.component";
import FormItemEmail from "../form-items-formatted/email-form-item.component";
import FormItemPhone, { PhoneItemFormatterValidation } from "../form-items-formatted/phone-form-item.component";
@@ -12,7 +13,6 @@ import Car from "../job-damage-visual/job-damage-visual.component";
import JobsDetailChangeEstimator from "../jobs-detail-change-estimator/jobs-detail-change-estimator.component";
import JobsDetailChangeFileHandler from "../jobs-detail-change-filehandler/jobs-detail-change-filehandler.component";
import FormRow from "../layout-form-row/layout-form-row.component";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component.jsx";
const mapStateToProps = createStructuredSelector({
jobRO: selectJobReadOnly,
@@ -185,6 +185,9 @@ export function JobsDetailGeneral({ bodyshop, jobRO, job, form }) {
<Form.Item label={t("jobs.fields.towin")} name="towin" valuePropName="checked">
<Switch disabled={jobRO} />
</Form.Item>
<Form.Item label={t("jobs.fields.tlos_ind")} name="tlos_ind" valuePropName="checked">
<Switch disabled={jobRO} />
</Form.Item>
</FormRow>
</Col>
<Col {...lossColDamage}>

View File

@@ -1,6 +1,5 @@
import { Button, Col, Form, Input, Row, Select, Space, Switch, Typography } from "antd";
import axios from "axios";
import dayjs from "../../utils/day";
import React, { useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -8,13 +7,14 @@ import { createStructuredSelector } from "reselect";
import { calculateScheduleLoad } from "../../redux/application/application.actions";
import { selectBodyshop } from "../../redux/user/user.selectors";
import { DateFormatter } from "../../utils/DateFormatter";
import dayjs from "../../utils/day";
import InstanceRenderManager from "../../utils/instanceRenderMgr";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component";
import EmailInput from "../form-items-formatted/email-form-item.component";
import LayoutFormRow from "../layout-form-row/layout-form-row.component";
import ScheduleDayViewContainer from "../schedule-day-view/schedule-day-view.container";
import ScheduleExistingAppointmentsList from "../schedule-existing-appointments-list/schedule-existing-appointments-list.component";
import "./schedule-job-modal.scss";
import InstanceRenderManager from "../../utils/instanceRenderMgr";
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
@@ -84,7 +84,7 @@ export function ScheduleJobModalComponent({
}
]}
>
<DateTimePicker onBlur={handleDateBlur} onlyFuture />
<DateTimePicker onBlur={handleDateBlur} onlyFuture isSeparatedTime />
</Form.Item>
<Form.Item
name="scheduled_completion"

View File

@@ -142,7 +142,7 @@ export function ShopInfoComponent({ bodyshop, form, saveLoading }) {
rome: [
{
key: "intellipay",
label: t("bodyshop.labels.intellipay"),
label: InstanceRenderManager({ rome: t("bodyshop.labels.romepay"), imex: t("bodyshop.labels.imexpay") }),
children: <ShopInfoIntellipay form={form} />
}
],

View File

@@ -676,7 +676,7 @@ export function ShopInfoGeneral({ form, bodyshop }) {
}
]}
>
<Input.TextArea rows={3} />
<Input.TextArea autoSize />
</Form.Item>
<Space wrap>
<DeleteFilled
@@ -737,7 +737,7 @@ export function ShopInfoGeneral({ form, bodyshop }) {
}
]}
>
<Input.TextArea rows={3} />
<Input.TextArea autoSize />
</Form.Item>
<Space wrap>
<DeleteFilled
@@ -1187,7 +1187,7 @@ export function ShopInfoGeneral({ form, bodyshop }) {
key={`${index}line_desc`}
name={[field.name, "line_desc"]}
>
<Input />
<Input.TextArea autoSize />
</Form.Item>
<Form.Item
label={t("joblines.fields.mod_lbr_ty")}
@@ -1330,7 +1330,7 @@ export function ShopInfoGeneral({ form, bodyshop }) {
}
]}
>
<Input />
<Input.TextArea autoSize />
</Form.Item>
<Space wrap>

View File

@@ -39,14 +39,13 @@ export function ShopInfoIntellipay({ bodyshop, form }) {
</Form.Item>
<Form.Item
label={t("bodyshop.fields.intellipay_config.cash_discount_percentage")}
valuePropName="checked"
dependencies={[["intellipay_config", "enable_cash_discount"]]}
name={["intellipay_config", "cash_discount_percentage"]}
rules={[
({ getFieldsValue }) => ({ required: form.getFieldValue(["intellipay_config", "enable_cash_discount"]) })
]}
>
<InputNumber min={0} max={100} precision={1} suffix='%'/>
<InputNumber min={0} max={100} precision={1} suffix="%" />
</Form.Item>
</LayoutFormRow>
</>

View File

@@ -144,7 +144,7 @@ function TaskListComponent({
title: t("tasks.fields.created_by"),
dataIndex: "created_by",
key: "created_by",
width: "10%",
width: "8%",
defaultSortOrder: "descend",
sorter: true,
sortOrder: sortcolumn === "created_by" && sortorder,
@@ -166,65 +166,70 @@ function TaskListComponent({
});
}
if (showRo) {
columns.push({
title: t("tasks.fields.job.ro_number"),
dataIndex: ["job", "ro_number"],
key: "job.ro_number",
width: "8%",
render: (text, record) =>
record.job ? (
<Link to={`/manage/jobs/${record.job.id}?tab=tasks`}>{record.job.ro_number || t("general.labels.na")}</Link>
) : (
t("general.labels.na")
)
});
}
columns.push({
title: t("tasks.fields.related_items"),
key: "related_items",
width: "12%",
render: (text, record) => {
const items = [];
// Job
if (showRo && record.job) {
items.push(
<Link key="job" to={`/manage/jobs/${record.job.id}?tab=tasks`}>
{t("tasks.fields.job.ro_number")}: {record.job.ro_number}
</Link>
);
}
if (showRo && !record.job) {
items.push(`${t("tasks.fields.job.ro_number")}: ${t("general.labels.na")}`);
}
// Jobline
if (record.jobline?.line_desc) {
items.push(
<span key="jobline">
{t("tasks.fields.jobline")}: {record.jobline.line_desc}
</span>
);
}
// Parts Order
if (record.parts_order) {
const { order_number, vendor } = record.parts_order;
const partsOrderText =
order_number && vendor?.name ? `${order_number} - ${vendor.name}` : t("general.labels.na");
items.push(
<Link
key="parts_order"
to={`/manage/jobs/${record.job.id}?partsorderid=${record.parts_order.id}&tab=partssublet`}
>
{t("tasks.fields.parts_order")}: {partsOrderText}
</Link>
);
}
// Bill
if (record.bill) {
const { invoice_number, vendor } = record.bill;
const billText = invoice_number && vendor?.name ? `${invoice_number} - ${vendor.name}` : t("general.labels.na");
items.push(
<Link key="bill" to={`/manage/jobs/${record.job.id}?billid=${record.bill.id}&tab=partssublet`}>
{t("tasks.fields.bill")}: {billText}
</Link>
);
}
return items.length > 0 ? <Space direction="vertical">{items}</Space> : null;
}
});
columns.push(
{
title: t("tasks.fields.jobline"),
dataIndex: ["jobline", "id"],
key: "jobline.id",
width: "8%",
render: (text, record) => record?.jobline?.line_desc || ""
},
{
title: t("tasks.fields.parts_order"),
dataIndex: ["parts_order", "id"],
key: "part_order.id",
width: "8%",
render: (text, record) =>
record.parts_order ? (
<Link to={`/manage/jobs/${record.job.id}?partsorderid=${record.parts_order.id}&tab=partssublet`}>
{record.parts_order.order_number && record.parts_order.vendor && record.parts_order.vendor.name
? `${record.parts_order.order_number} - ${record.parts_order.vendor.name}`
: t("general.labels.na")}
</Link>
) : (
""
)
},
{
title: t("tasks.fields.bill"),
dataIndex: ["bill", "id"],
key: "bill.id",
width: "10%",
render: (text, record) =>
record.bill ? (
<Link to={`/manage/jobs/${record.job.id}?billid=${record.bill.id}&tab=partssublet`}>
{record.bill.invoice_number && record.bill.vendor && record.bill.vendor.name
? `${record.bill.invoice_number} - ${record.bill.vendor.name}`
: t("general.labels.na")}
</Link>
) : (
""
)
},
{
title: t("tasks.fields.title"),
dataIndex: "title",
key: "title",
minWidth: "20%",
sorter: true,
sortOrder: sortcolumn === "title" && sortorder
},
@@ -258,7 +263,7 @@ function TaskListComponent({
{
title: t("tasks.fields.actions"),
key: "toggleCompleted",
width: "5%",
width: "8%",
render: (text, record) => (
<Space direction="horizontal">
<Button

View File

@@ -1,7 +1,7 @@
import { useMutation, useQuery } from "@apollo/client";
import { Button, Form, Modal, notification, Space } from "antd";
import { PageHeader } from "@ant-design/pro-layout";
import dayjs from "../../utils/day";
import { useMutation, useQuery } from "@apollo/client";
import { useSplitTreatments } from "@splitsoftware/splitio-react";
import { Button, Form, Modal, notification, Space } from "antd";
import React, { useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -11,9 +11,9 @@ import { INSERT_NEW_TIME_TICKET, UPDATE_TIME_TICKET } from "../../graphql/timeti
import { toggleModalVisible } from "../../redux/modals/modals.actions";
import { selectTimeTicket } from "../../redux/modals/modals.selectors";
import { selectBodyshop } from "../../redux/user/user.selectors";
import TimeTicketModalComponent from "./time-ticket-modal.component";
import dayjs from "../../utils/day";
import TimeTicketsCommitToggleComponent from "../time-tickets-commit-toggle/time-tickets-commit-toggle.component";
import { useSplitTreatments } from "@splitsoftware/splitio-react";
import TimeTicketModalComponent from "./time-ticket-modal.component";
const mapStateToProps = createStructuredSelector({
timeTicketModal: selectTimeTicket,
@@ -87,7 +87,7 @@ export function TimeTicketModalContainer({ timeTicketModal, toggleModalVisible,
if (enterAgain) {
//Capture the existing information and repopulate it.
const prev = form.getFieldsValue(["date", "employeeid"]);
const prev = form.getFieldsValue(["date", "employeeid", "flat_rate"]);
form.resetFields();

View File

@@ -1,8 +1,9 @@
import { useEffect, useState, useRef } from "react";
import { useEffect, useRef, useState } from "react";
import SocketIO from "socket.io-client";
import { auth } from "../../firebase/firebase.utils";
import { store } from "../../redux/store";
import { setWssStatus } from "../../redux/application/application.actions";
import { addAlerts, setWssStatus } from "../../redux/application/application.actions";
const useSocket = (bodyshop) => {
const socketRef = useRef(null);
const [clientId, setClientId] = useState(null);
@@ -31,6 +32,14 @@ const useSocket = (bodyshop) => {
socketRef.current = socketInstance;
const handleBodyshopMessage = (message) => {
if (!message || !message?.type) return;
switch (message.type) {
case "alert-update":
store.dispatch(addAlerts(message.payload));
break;
}
if (!import.meta.env.DEV) return;
console.log(`Received message for bodyshop ${bodyshop.id}:`, message);
};
@@ -39,22 +48,22 @@ const useSocket = (bodyshop) => {
console.log("Socket connected:", socketInstance.id);
socketInstance.emit("join-bodyshop-room", bodyshop.id);
setClientId(socketInstance.id);
store.dispatch(setWssStatus("connected"))
store.dispatch(setWssStatus("connected"));
};
const handleReconnect = (attempt) => {
console.log(`Socket reconnected after ${attempt} attempts`);
store.dispatch(setWssStatus("connected"))
store.dispatch(setWssStatus("connected"));
};
const handleConnectionError = (err) => {
console.error("Socket connection error:", err);
store.dispatch(setWssStatus("error"))
store.dispatch(setWssStatus("error"));
};
const handleDisconnect = () => {
console.log("Socket disconnected");
store.dispatch(setWssStatus("disconnected"))
store.dispatch(setWssStatus("disconnected"));
};
socketInstance.on("connect", handleConnect);

View File

@@ -81,14 +81,14 @@ export const logImEXEvent = (eventName, additionalParams, stateProp = null) => {
user: (state.user && state.user.currentUser && state.user.currentUser.email) || null,
...additionalParams
};
axios.post("/ioevent", {
useremail: (state.user && state.user.currentUser && state.user.currentUser.email) || null,
bodyshopid: (state.user && state.user.bodyshop && state.user.bodyshop.id) || null,
operationName: eventName,
variables: additionalParams,
dbevent: false,
env: `master-AIO|${import.meta.env.VITE_APP_GIT_SHA_DATE}`
});
// axios.post("/ioevent", {
// useremail: (state.user && state.user.currentUser && state.user.currentUser.email) || null,
// bodyshopid: (state.user && state.user.bodyshop && state.user.bodyshop.id) || null,
// operationName: eventName,
// variables: additionalParams,
// dbevent: false,
// env: `master-AIO|${import.meta.env.VITE_APP_GIT_SHA_DATE}`
// });
// console.log(
// "%c[Analytics]",
// "background-color: green ;font-weight:bold;",

View File

@@ -692,6 +692,7 @@ export const GET_JOB_BY_PK = gql`
tax_str_rt
tax_sub_rt
tax_tow_rt
tlos_ind
towin
towing_payable
unit_number

View File

@@ -71,7 +71,7 @@ export function DmsContainer({ bodyshop, setBreadcrumbs, setSelectedHeader }) {
...logs,
{
timestamp: new Date(),
level: "WARNING",
level: "WARN",
message: "Reconnected to CDK Export Service"
}
];
@@ -125,7 +125,7 @@ export function DmsContainer({ bodyshop, setBreadcrumbs, setSelectedHeader }) {
>
<Select.Option key="DEBUG">DEBUG</Select.Option>
<Select.Option key="INFO">INFO</Select.Option>
<Select.Option key="WARNING">WARNING</Select.Option>
<Select.Option key="WARN">WARN</Select.Option>
<Select.Option key="ERROR">ERROR</Select.Option>
</Select>
<Button onClick={() => setLogs([])}>Clear Logs</Button>

View File

@@ -90,7 +90,7 @@ export function DmsContainer({ bodyshop, setBreadcrumbs, setSelectedHeader, inse
...logs,
{
timestamp: new Date(),
level: "WARNING",
level: "warn",
message: "Reconnected to CDK Export Service"
}
];
@@ -175,7 +175,7 @@ export function DmsContainer({ bodyshop, setBreadcrumbs, setSelectedHeader, inse
>
<Select.Option key="DEBUG">DEBUG</Select.Option>
<Select.Option key="INFO">INFO</Select.Option>
<Select.Option key="WARNING">WARNING</Select.Option>
<Select.Option key="WARN">WARN</Select.Option>
<Select.Option key="ERROR">ERROR</Select.Option>
</Select>
<Button onClick={() => setLogs([])}>Clear Logs</Button>

View File

@@ -1,4 +1,4 @@
import { FloatButton, Layout, Spin } from "antd";
import { FloatButton, Layout, notification, Spin } from "antd";
// import preval from "preval.macro";
import React, { lazy, Suspense, useContext, useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
@@ -21,11 +21,12 @@ import ShopSubStatusComponent from "../../components/shop-sub-status/shop-sub-st
import { requestForToken } from "../../firebase/firebase.utils";
import SocketContext from "../../contexts/SocketIO/socketContext.jsx";
import { selectBodyshop, selectInstanceConflict } from "../../redux/user/user.selectors";
import UpdateAlert from "../../components/update-alert/update-alert.component";
import InstanceRenderManager from "../../utils/instanceRenderMgr.js";
import "./manage.page.styles.scss";
import WssStatusDisplayComponent from "../../components/wss-status-display/wss-status-display.component.jsx";
import { selectAlerts } from "../../redux/application/application.selectors.js";
import { addAlerts } from "../../redux/application/application.actions.js";
const JobsPage = lazy(() => import("../jobs/jobs.page"));
@@ -104,16 +105,80 @@ const { Content, Footer } = Layout;
const mapStateToProps = createStructuredSelector({
conflict: selectInstanceConflict,
bodyshop: selectBodyshop
bodyshop: selectBodyshop,
alerts: selectAlerts
});
const mapDispatchToProps = (dispatch) => ({});
const ALERT_FILE_URL = InstanceRenderManager({
imex: "https://images.imex.online/alerts/alerts-imex.json",
rome: "https://images.imex.online/alerts/alerts-rome.json"
});
export function Manage({ conflict, bodyshop }) {
const mapDispatchToProps = (dispatch) => ({
setAlerts: (alerts) => dispatch(addAlerts(alerts))
});
export function Manage({ conflict, bodyshop, alerts, setAlerts }) {
const { t } = useTranslation();
const [chatVisible] = useState(false);
const { socket, clientId } = useContext(SocketContext);
// State to track displayed alerts
const [displayedAlertIds, setDisplayedAlertIds] = useState([]);
// Fetch displayed alerts from localStorage on mount
useEffect(() => {
const displayedAlerts = JSON.parse(localStorage.getItem("displayedAlerts") || "[]");
setDisplayedAlertIds(displayedAlerts);
}, []);
// Fetch alerts from the JSON file and dispatch to Redux store
useEffect(() => {
const fetchAlerts = async () => {
try {
const response = await fetch(ALERT_FILE_URL);
const fetchedAlerts = await response.json();
setAlerts(fetchedAlerts);
} catch (error) {
console.error("Error fetching alerts:", error);
}
};
fetchAlerts();
}, []);
// Use useEffect to watch for new alerts
useEffect(() => {
if (alerts && Object.keys(alerts).length > 0) {
// Convert the alerts object into an array
const alertArray = Object.values(alerts);
// Filter out alerts that have already been dismissed
const newAlerts = alertArray.filter((alert) => !displayedAlertIds.includes(alert.id));
newAlerts.forEach((alert) => {
// Display the notification
notification.open({
key: "notification-alerts-" + alert.id,
message: alert.message,
description: alert.description,
type: alert.type || "info",
duration: 0,
placement: "bottomRight",
closable: true,
onClose: () => {
// When the notification is closed, update displayed alerts state and localStorage
setDisplayedAlertIds((prevIds) => {
const updatedIds = [...prevIds, alert.id];
localStorage.setItem("displayedAlerts", JSON.stringify(updatedIds));
return updatedIds;
});
}
});
});
}
}, [alerts, displayedAlertIds]);
useEffect(() => {
const widgetId = InstanceRenderManager({
imex: "IABVNO4scRKY11XBQkNr",

View File

@@ -67,6 +67,12 @@ export const setUpdateAvailable = (isUpdateAvailable) => ({
type: ApplicationActionTypes.SET_UPDATE_AVAILABLE,
payload: isUpdateAvailable
});
export const addAlerts = (alerts) => ({
type: ApplicationActionTypes.ADD_ALERTS,
payload: alerts
});
export const setWssStatus = (status) => ({
type: ApplicationActionTypes.SET_WSS_STATUS,
payload: status

View File

@@ -15,7 +15,8 @@ const INITIAL_STATE = {
error: null
},
jobReadOnly: false,
partnerVersion: null
partnerVersion: null,
alerts: {}
};
const applicationReducer = (state = INITIAL_STATE, action) => {
@@ -91,6 +92,18 @@ const applicationReducer = (state = INITIAL_STATE, action) => {
case ApplicationActionTypes.SET_WSS_STATUS: {
return { ...state, wssStatus: action.payload };
}
case ApplicationActionTypes.ADD_ALERTS: {
const newAlertsMap = { ...state.alerts };
action.payload.alerts.forEach((alert) => {
newAlertsMap[alert.id] = alert;
});
return {
...state,
alerts: newAlertsMap
};
}
default:
return state;
}

View File

@@ -23,3 +23,4 @@ export const selectOnline = createSelector([selectApplication], (application) =>
export const selectProblemJobs = createSelector([selectApplication], (application) => application.problemJobs);
export const selectUpdateAvailable = createSelector([selectApplication], (application) => application.updateAvailable);
export const selectWssStatus = createSelector([selectApplication], (application) => application.wssStatus);
export const selectAlerts = createSelector([selectApplication], (application) => application.alerts);

View File

@@ -13,6 +13,7 @@ const ApplicationActionTypes = {
INSERT_AUDIT_TRAIL: "INSERT_AUDIT_TRAIL",
SET_PROBLEM_JOBS: "SET_PROBLEM_JOBS",
SET_UPDATE_AVAILABLE: "SET_UPDATE_AVAILABLE",
SET_WSS_STATUS: "SET_WSS_STATUS"
SET_WSS_STATUS: "SET_WSS_STATUS",
ADD_ALERTS: "ADD_ALERTS"
};
export default ApplicationActionTypes;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -74,7 +74,7 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- SERVICES=ses,secretsmanager,cloudwatch,logs
- SERVICES=s3,ses,secretsmanager,cloudwatch,logs
- DEBUG=0
- AWS_ACCESS_KEY_ID=test
- AWS_SECRET_ACCESS_KEY=test
@@ -115,7 +115,8 @@ services:
aws --endpoint-url=http://localstack:4566 ses verify-domain-identity --domain imex.online --region ca-central-1
aws --endpoint-url=http://localstack:4566 ses verify-email-identity --email-address noreply@imex.online --region ca-central-1
aws --endpoint-url=http://localstack:4566 secretsmanager create-secret --name CHATTER_PRIVATE_KEY --secret-string file:///tmp/certs/id_rsa
aws --endpoint-url=http://localstack:4566 logs create-log-group --log-group-name development --region ca-central-1
aws --endpoint-url=http://localstack:4566 logs create-log-group --log-group-name development --region ca-central-1
aws --endpoint-url=http://localstack:4566 s3api create-bucket --bucket imex-large-log --create-bucket-configuration LocationConstraint=ca-central-1
"
# Node App: The Main IMEX API
node-app:

View File

@@ -69,7 +69,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
jobline:
job:
@@ -180,7 +179,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -387,7 +385,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -504,7 +501,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bill:
job:
@@ -671,7 +667,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
_and:
- job:
@@ -1285,7 +1280,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
courtesycar:
bodyshop:
@@ -1526,7 +1520,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -1786,7 +1779,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -1920,7 +1912,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
_or:
- job:
@@ -2105,7 +2096,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
employee_team:
bodyshop:
@@ -2268,7 +2258,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
employee:
bodyshop:
@@ -2449,7 +2438,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -2696,7 +2684,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -2808,7 +2795,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
conversation:
bodyshop:
@@ -3123,7 +3109,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
job:
bodyshop:
@@ -4232,7 +4217,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -4248,41 +4232,41 @@
enable_manual: false
update:
columns:
- clm_no
- v_make_desc
- date_next_contact
- status
- employee_csr
- employee_prep
- clm_total
- suspended
- employee_body
- ro_number
- actual_in
- ownr_co_nm
- v_model_yr
- comment
- job_totals
- v_vin
- ownr_fn
- scheduled_completion
- special_coverage_policy
- v_color
- ca_gst_registrant
- scheduled_delivery
- actual_delivery
- actual_completion
- kanbanparent
- est_ct_fn
- alt_transport
- v_model_desc
- clm_no
- v_make_desc
- date_next_contact
- status
- employee_csr
- actual_in
- v_model_yr
- comment
- job_totals
- ownr_fn
- v_color
- ca_gst_registrant
- employee_refinish
- ownr_ph1
- date_last_contacted
- alt_transport
- inproduction
- est_ct_ln
- production_vars
- category
- v_model_desc
- date_invoiced
- est_co_nm
- ownr_ln
@@ -4295,6 +4279,12 @@
- name: event-secret
value_from_env: EVENT_SECRET
request_transform:
body:
action: transform
template: |-
{
"data": {{$body?.event?.data?.new}}
}
method: POST
query_params: {}
template_engine: Kriti
@@ -4496,7 +4486,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
conversation:
bodyshop:
@@ -4670,7 +4659,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
job:
bodyshop:
@@ -4805,7 +4793,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -5110,7 +5097,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
parts_order:
job:
@@ -5243,7 +5229,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
job:
bodyshop:
@@ -5419,7 +5404,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
job:
bodyshop:
@@ -5559,7 +5543,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -5670,7 +5653,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
_or:
- parentjob_rel:
@@ -5760,7 +5742,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
job:
bodyshop:
@@ -6045,7 +6026,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -6541,7 +6521,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:
@@ -6698,7 +6677,6 @@
delete_permissions:
- role: user
permission:
backend_only: false
filter:
bodyshop:
associations:

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1 @@
CREATE INDEX idx_timetickets_date ON timetickets (date );

View File

@@ -0,0 +1,9 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
-- CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
-- CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
-- CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,7 @@
CREATE INDEX idx_jobs_ownr_fn ON jobs USING gin (ownr_fn gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_ln ON jobs USING gin (ownr_ln gin_trgm_ops);
CREATE INDEX idx_jobs_ownr_co_nm ON jobs USING gin (ownr_co_nm gin_trgm_ops);
CREATE INDEX idx_jobs_clm_no ON jobs USING gin (clm_no gin_trgm_ops);
CREATE INDEX idx_jobs_v_make_desc ON jobs USING gin (v_make_desc gin_trgm_ops);
CREATE INDEX idx_jobs_v_model_desc ON jobs USING gin (v_model_desc gin_trgm_ops);
CREATE INDEX idx_jobs_plate_no ON jobs USING gin (plate_no gin_trgm_ops);

View File

@@ -0,0 +1,3 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1 @@
CREATE INDEX idx_exportlog_createdat_desc ON exportlog (created_at desc);

View File

@@ -0,0 +1,4 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
-- WHERE read = false AND isoutbound = false;

View File

@@ -0,0 +1,2 @@
CREATE index idx_messages_unread_agg ON messages (read, isoutbound)
WHERE read = false AND isoutbound = false;

View File

@@ -0,0 +1,10 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- CREATE INDEX jobs_search_gin_ro_number ON jobs USING GIN ((ro_number) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_ownrfn ON jobs USING GIN ((ownr_fn) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_clm_no ON jobs USING GIN ((clm_no) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_plate_no ON jobs USING GIN ((plate_no) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_v_make_desc ON jobs USING GIN (( v_make_desc) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_v_model_desc ON jobs USING GIN (( v_model_desc) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_ownr_ln ON jobs USING GIN (( ownr_ln) gin_trgm_ops);
-- CREATE INDEX jobs_search_gin_ownr_co_nm ON jobs USING GIN (( ownr_co_nm) gin_trgm_ops);

View File

@@ -0,0 +1,8 @@
CREATE INDEX jobs_search_gin_ro_number ON jobs USING GIN ((ro_number) gin_trgm_ops);
CREATE INDEX jobs_search_gin_ownrfn ON jobs USING GIN ((ownr_fn) gin_trgm_ops);
CREATE INDEX jobs_search_gin_clm_no ON jobs USING GIN ((clm_no) gin_trgm_ops);
CREATE INDEX jobs_search_gin_plate_no ON jobs USING GIN ((plate_no) gin_trgm_ops);
CREATE INDEX jobs_search_gin_v_make_desc ON jobs USING GIN (( v_make_desc) gin_trgm_ops);
CREATE INDEX jobs_search_gin_v_model_desc ON jobs USING GIN (( v_model_desc) gin_trgm_ops);
CREATE INDEX jobs_search_gin_ownr_ln ON jobs USING GIN (( ownr_ln) gin_trgm_ops);
CREATE INDEX jobs_search_gin_ownr_co_nm ON jobs USING GIN (( ownr_co_nm) gin_trgm_ops);

1286
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -21,6 +21,7 @@
"dependencies": {
"@aws-sdk/client-cloudwatch-logs": "^3.679.0",
"@aws-sdk/client-elasticache": "^3.675.0",
"@aws-sdk/client-s3": "^3.689.0",
"@aws-sdk/client-secrets-manager": "^3.675.0",
"@aws-sdk/client-ses": "^3.675.0",
"@aws-sdk/credential-provider-node": "^3.675.0",

View File

@@ -21,7 +21,7 @@ const { applyRedisHelpers } = require("./server/utils/redisHelpers");
const { applyIOHelpers } = require("./server/utils/ioHelpers");
const { redisSocketEvents } = require("./server/web-sockets/redisSocketEvents");
const { ElastiCacheClient, DescribeCacheClustersCommand } = require("@aws-sdk/client-elasticache");
const { default: InstanceManager } = require("./server/utils/instanceMgr");
const { InstanceRegion } = require("./server/utils/instanceMgr");
const CLUSTER_RETRY_BASE_DELAY = 100;
const CLUSTER_RETRY_MAX_DELAY = 5000;
@@ -114,10 +114,7 @@ const applyRoutes = ({ app }) => {
*/
const getRedisNodesFromAWS = async () => {
const client = new ElastiCacheClient({
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region: InstanceRegion()
});
const params = {

View File

@@ -611,7 +611,7 @@ async function InsertFailedExportLog(socket, error) {
bodyshopid: socket.JobData.bodyshop.id,
jobid: socket.JobData.id,
successful: false,
message: [error],
message: JSON.stringify(error),
useremail: socket.user.email
}
});

View File

@@ -167,7 +167,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
const Vendor = {
DisplayName: bill.vendor.name
DisplayName: StandardizeName(bill.vendor.name)
};
try {
const result = await oauthClient.makeApiCall({

View File

@@ -219,6 +219,11 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef,
PaymentMethodRef: {
value: paymentMethods[payment.type]
},
PrivateNote: payment.memo
? payment.memo.length > 4000
? payment.memo.substring(0, 4000).trim()
: payment.memo.trim()
: "",
PaymentRefNum: payment.transactionid,
...(invoices && invoices.length === 1 && invoices[0]
? {

View File

@@ -10,7 +10,7 @@ function urlBuilder(realmId, object, query = null) {
}
function StandardizeName(str) {
return str.replace(new RegExp(/'/g), "\\'");
return str.replace(new RegExp(/'/g), "\\'").trim();
}
exports.urlBuilder = urlBuilder;

View File

@@ -0,0 +1,76 @@
const axios = require("axios");
const _ = require("lodash");
const { default: InstanceMgr } = require("../utils/instanceMgr"); // For deep object comparison
// Constants
const ALERTS_REDIS_KEY = "alerts_data"; // The key under which we'll store alerts in Redis
const GLOBAL_SOCKET_ID = "global"; // Use 'global' as a socketId to store global data
const ALERT_FILE_URL = InstanceMgr({
imex: "https://images.imex.online/alerts/alerts-imex.json",
rome: "https://images.imex.online/alerts/alerts-rome.json"
});
const alertCheck = async (req, res) => {
// Access Redis helper functions
const { ioRedis, logger } = req;
const { getSessionData, setSessionData } = req.sessionUtils;
try {
// Get the JSON Alert file from the server
const response = await axios.get(ALERT_FILE_URL);
const currentAlerts = response.data;
// Retrieve stored alerts from Redis using a global socketId
const storedAlerts = await getSessionData(GLOBAL_SOCKET_ID, ALERTS_REDIS_KEY);
if (!storedAlerts) {
// Alerts not in Redis, store them
await setSessionData(GLOBAL_SOCKET_ID, ALERTS_REDIS_KEY, currentAlerts);
logger.logger.debug("Alerts added to Redis for the first time.");
// Emit to clients
if (ioRedis) {
ioRedis.emit("bodyshop-message", {
type: "alert-update",
payload: currentAlerts
});
logger.logger.debug("Alerts emitted to clients for the first time.");
} else {
logger.log("Socket.IO instance not found. (1)", "error");
}
return res.status(200).send("Alerts added to Redis and emitted to clients.");
} else {
// Alerts are in Redis, compare them
if (!_.isEqual(currentAlerts, storedAlerts)) {
// Alerts are different, update Redis and emit to clients
await setSessionData(GLOBAL_SOCKET_ID, ALERTS_REDIS_KEY, currentAlerts);
logger.logger.debug("Alerts updated in Redis.");
// Emit the new alerts to all connected clients
if (ioRedis) {
ioRedis.emit("bodyshop-message", {
type: "alert-update",
payload: currentAlerts
});
logger.logger.debug("Alerts emitted to clients after update.");
} else {
logger.log("Socket.IO instance not found. (2)", "error");
}
return res.status(200).send("Alerts updated in Redis and emitted to clients.");
} else {
return res.status(200).send("No changes in alerts.");
}
}
} catch (error) {
logger.log("Error in alertCheck:", "error", null, null, {
error: {
message: error.message,
stack: error.stack
}
});
return res.status(500).send("Internal server error.");
}
};
module.exports = { alertCheck };

View File

@@ -995,7 +995,7 @@ async function InsertFailedExportLog(socket, error) {
bodyshopid: socket.JobData.bodyshop.id,
jobid: socket.JobData.id,
successful: false,
message: [error],
message: JSON.stringify(error),
useremail: socket.user.email
}
});

View File

@@ -20,7 +20,7 @@ function CheckCdkResponseForError(socket, soapResponse) {
//The response was null, this might be ok, it might not.
CdkBase.createLogEvent(
socket,
"WARNING",
"warn",
`Warning detected in CDK Response - it appears to be null. Stack: ${new Error().stack}`
);
return;

View File

@@ -13,6 +13,7 @@ let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
const { sendServerEmail } = require("../email/sendemail");
const AHDineroFormat = "0.00";
const AhDateFormat = "MMDDYYYY";
@@ -26,170 +27,180 @@ const ftpSetup = {
password: process.env.AUTOHOUSE_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"]
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("autohouse-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("autohouse-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("autohouse-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("autohouse-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const autoHouseObject = {
AutoHouseExport: {
RepairOrder: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
};
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
autoHouseObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autuhouseid: bodyshop.autuhouseid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
const { jobs, bodyshops_by_pk } = await client.request(queries.AUTOHOUSE_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
...errors
})
);
try {
//Connect to the FTP and upload all.
const autoHouseObject = {
AutoHouseExport: {
RepairOrder: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
};
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, autoHouseObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: autoHouseObject.AutoHouseExport.RepairOrder.length,
xml: ret,
filename: `IM_${bodyshop.autohouseid}_${moment().format("DDMMYYYY_HHMMss")}.xml`
});
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
autohouseid: bodyshop.autohouseid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `Autohouse Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("autohouse-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("autohouse-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("autohouse-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -287,8 +298,8 @@ const CreateRepairOrderTag = (job, errorCallback) => {
InsuranceCo: job.ins_co_nm || "",
CompanyName: job.ins_co_nm || "",
Address: job.ins_addr1 || "",
City: job.ins_addr1 || "",
State: job.ins_city || "",
City: job.ins_city || "",
State: job.ins_st || "",
Zip: job.ins_zip || "",
Phone: job.ins_ph1 || "",
Fax: job.ins_fax || "",
@@ -601,10 +612,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
logger.log("autohouse-job-calculate-error", "ERROR", "api", null, {
error
});
logger.log("autohouse-job-calculate-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};

View File

@@ -39,7 +39,11 @@ exports.default = async (req, res) => {
}
// Send immediate response and continue processing.
res.status(200).send();
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
logger.log("chatter-start", "DEBUG", "api", null, null);
@@ -176,9 +180,8 @@ async function uploadViaSFTP(allcsvsToUpload) {
for (const csvObj of allcsvsToUpload) {
try {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, { filename: csvObj.filename });
csvObj.result = await sftp.put(Buffer.from(csvObj.csv), `${csvObj.filename}`);
logger.log("chatter-sftp-upload-result", "DEBUG", "api", null, {
logger.log("chatter-sftp-upload", "DEBUG", "api", null, {
filename: csvObj.filename,
result: csvObj.result
});

View File

@@ -26,174 +26,184 @@ const ftpSetup = {
password: process.env.CLAIMSCORP_PASSWORD,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"]
serverHostKey: ["ssh-rsa", "ssh-dss", "rsa-sha2-256", "rsa-sha2-512", "ecdsa-sha2-nistp256", "ecdsa-sha2-nistp384"]
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("claimscorp-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CLAIMSCORP_SHOPS);
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("claimscorp-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_CLAIMSCORP_SHOPS); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("claimscorp-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("claimscorp-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("claimscorp-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("claimscorp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("claimscorp-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.CLAIMSCORP_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const claimsCorpObject = {
DataFeed: {
ShopInfo: {
ShopID: bodyshops_by_pk.claimscorpid,
ShopName: bodyshops_by_pk.shopname,
RO: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
if (erroredJobs.length > 0) {
logger.log("claimscorp-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
claimsCorpObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: claimsCorpObject.DataFeed.ShopInfo.RO.length,
xml: ret,
filename: `${bodyshop.claimscorpid}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("claimscorp-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("claimscorp-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
const { jobs, bodyshops_by_pk } = await client.request(queries.CLAIMSCORP_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("claimscorp-sftp-error", "ERROR", "api", null, {
...errors
})
);
try {
//Connect to the FTP and upload all.
const claimsCorpObject = {
DataFeed: {
ShopInfo: {
ShopID: bodyshops_by_pk.claimscorpid,
ShopName: bodyshops_by_pk.shopname,
RO: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("claimscorp-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("claimscorp-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("claimscorp-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, claimsCorpObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: claimsCorpObject.DataFeed.ShopInfo.RO.length,
xml: ret,
filename: `${bodyshop.claimscorpid}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("claimscorp-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("claimscorp-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("claimscorp-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
claimscorpid: bodyshop.claimscorpid,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `ClaimsCorp Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("claimscorp-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("claimscorp-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("claimscorp-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("claimscorp-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -445,10 +455,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
logger.log("claimscorp-job-calculate-error", "ERROR", "api", null, {
error
});
logger.log("claimscorp-job-calculate-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};

View File

@@ -16,8 +16,7 @@ const { sendServerEmail } = require("../email/sendemail");
const DineroFormat = "0,0.00";
const DateFormat = "MM/DD/YYYY";
const repairOpCodes = ["OP4", "OP9", "OP10"];
const replaceOpCodes = ["OP2", "OP5", "OP11", "OP12"];
const kaizenShopsIDs = ["SUMMIT", "STRATHMORE", "SUNRIDGE", "SHAW"];
const ftpSetup = {
host: process.env.KAIZEN_HOST,
@@ -30,173 +29,179 @@ const ftpSetup = {
}
};
const allxmlsToUpload = [];
const allErrors = [];
exports.default = async (req, res) => {
// Only process if in production environment.
if (process.env.NODE_ENV !== "production") {
res.sendStatus(403);
return;
}
//Query for the List of Bodyshop Clients.
logger.log("kaizen-start", "DEBUG", "api", null, null);
const kaizenShopsIDs = ["SUMMIT", "STRATHMORE", "SUNRIDGE", "SHAW"];
const { bodyshops } = await client.request(queries.GET_KAIZEN_SHOPS, {
imexshopid: kaizenShopsIDs
});
const specificShopIds = req.body.bodyshopIds; // ['uuid]
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
// Only process if the appropriate token is provided.
if (req.headers["x-imex-auth"] !== process.env.AUTOHOUSE_AUTH_TOKEN) {
res.sendStatus(401);
return;
}
const allxmlsToUpload = [];
const allErrors = [];
// Send immediate response and continue processing.
res.status(202).json({
success: true,
message: "Processing request ...",
timestamp: new Date().toISOString()
});
try {
for (const bodyshop of specificShopIds ? bodyshops.filter((b) => specificShopIds.includes(b.id)) : bodyshops) {
logger.log("kaizen-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_KAIZEN_SHOPS, { imexshopid: kaizenShopsIDs }); //Query for the List of Bodyshop Clients.
const specificShopIds = req.body.bodyshopIds; // ['uuid];
const { start, end, skipUpload } = req.body; //YYYY-MM-DD
const batchSize = 10;
const shopsToProcess =
specificShopIds?.length > 0 ? bodyshops.filter((shop) => specificShopIds.includes(shop.id)) : bodyshops;
logger.log("kaizen-shopsToProcess-generated", "DEBUG", "api", null, null);
if (shopsToProcess.length === 0) {
logger.log("kaizen-shopsToProcess-empty", "DEBUG", "api", null, null);
return;
}
const batchPromises = [];
for (let i = 0; i < shopsToProcess.length; i += batchSize) {
const batch = shopsToProcess.slice(i, i + batchSize);
const batchPromise = (async () => {
await processBatch(batch, start, end);
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
} else {
await uploadViaSFTP(allxmlsToUpload);
}
})();
batchPromises.push(batchPromise);
}
await Promise.all(batchPromises);
await sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\nUploaded:\n${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count, result: x.result })),
null,
2
)}`
});
logger.log("kaizen-end", "DEBUG", "api", null, null);
} catch (error) {
logger.log("kaizen-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
}
};
async function processBatch(batch, start, end) {
for (const bodyshop of batch) {
const erroredJobs = [];
try {
logger.log("kaizen-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
const erroredJobs = [];
try {
const { jobs, bodyshops_by_pk } = await client.request(queries.KAIZEN_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
const kaizenObject = {
DataFeed: {
ShopInfo: {
ShopName: bodyshops_by_pk.shopname,
Jobs: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
if (erroredJobs.length > 0) {
logger.log("kaizen-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
var ret = builder
.create(
{
// version: "1.0",
// encoding: "UTF-8",
//keepNullNodes: true,
},
kaizenObject
)
.end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: kaizenObject.DataFeed.ShopInfo.Jobs.length,
xml: ret,
filename: `${bodyshop.shopname}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("kaizen-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
//Error at the shop level.
logger.log("kaizen-error-shop", "ERROR", "api", bodyshop.id, {
...error
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
}
if (skipUpload) {
for (const xmlObj of allxmlsToUpload) {
fs.writeFileSync(`./logs/${xmlObj.filename}`, xmlObj.xml);
}
res.json(allxmlsToUpload);
sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
const { jobs, bodyshops_by_pk } = await client.request(queries.KAIZEN_QUERY, {
bodyshopid: bodyshop.id,
start: start ? moment(start).startOf("day") : moment().subtract(5, "days").startOf("day"),
...(end && { end: moment(end).endOf("day") })
});
return;
}
let sftp = new Client();
sftp.on("error", (errors) =>
logger.log("kaizen-sftp-error", "ERROR", "api", null, {
...errors
})
);
try {
//Connect to the FTP and upload all.
const kaizenObject = {
DataFeed: {
ShopInfo: {
ShopName: bodyshops_by_pk.shopname,
Jobs: jobs.map((j) =>
CreateRepairOrderTag({ ...j, bodyshop: bodyshops_by_pk }, function ({ job, error }) {
erroredJobs.push({ job: job, error: error.toString() });
})
)
}
}
};
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
logger.log("kaizen-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename
});
const uploadResult = await sftp.put(Buffer.from(xmlObj.xml), `/${xmlObj.filename}`);
logger.log("kaizen-sftp-upload-result", "DEBUG", "api", null, {
uploadResult
if (erroredJobs.length > 0) {
logger.log("kaizen-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
const ret = builder.create({}, kaizenObject).end({ allowEmptyTags: true });
allxmlsToUpload.push({
count: kaizenObject.DataFeed.ShopInfo.Jobs.length,
xml: ret,
filename: `${bodyshop.shopname}-${moment().format("YYYYMMDDTHHMMss")}.xml`
});
logger.log("kaizen-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname
});
} catch (error) {
logger.log("kaizen-sftp-error", "ERROR", "api", null, {
...error
//Error at the shop level.
logger.log("kaizen-error-shop", "ERROR", "api", bodyshop.id, { error: error.message, stack: error.stack });
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
fatal: true,
errors: [error.toString()]
});
} finally {
sftp.end();
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
shopname: bodyshop.shopname,
errors: erroredJobs.map((ej) => ({
ro_number: ej.job?.ro_number,
jobid: ej.job?.id,
error: ej.error
}))
});
}
sendServerEmail({
subject: `Kaizen Report ${moment().format("MM-DD-YY")}`,
text: `Errors: ${allErrors.map((e) => JSON.stringify(e, null, 2))}
Uploaded: ${JSON.stringify(
allxmlsToUpload.map((x) => ({ filename: x.filename, count: x.count })),
null,
2
)}
`
});
res.sendStatus(200);
} catch (error) {
res.status(200).json(error);
}
};
}
async function uploadViaSFTP(allxmlsToUpload) {
const sftp = new Client();
sftp.on("error", (errors) =>
logger.log("kaizen-sftp-connection-error", "ERROR", "api", null, { error: errors.message, stack: errors.stack })
);
try {
//Connect to the FTP and upload all.
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
try {
xmlObj.result = await sftp.put(Buffer.from(xmlObj.xml), `${xmlObj.filename}`);
logger.log("kaizen-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
result: xmlObj.result
});
} catch (error) {
logger.log("kaizen-sftp-upload-error", "ERROR", "api", null, {
filename: xmlObj.filename,
error: error.message,
stack: error.stack
});
throw error;
}
}
} catch (error) {
logger.log("kaizen-sftp-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
throw error;
} finally {
sftp.end();
}
}
const CreateRepairOrderTag = (job, errorCallback) => {
//Level 2
@@ -420,10 +425,7 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
logger.log("kaizen-job-calculate-error", "ERROR", "api", null, {
error
});
logger.log("kaizen-job-calculate-error", "ERROR", "api", null, { error: error.message, stack: error.stack });
errorCallback({ jobid: job.id, ro_number: job.ro_number, error });
}
};

View File

@@ -1,6 +1,6 @@
const { isString, isEmpty } = require("lodash");
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { default: InstanceManager } = require("../utils/instanceMgr");
const { InstanceRegion } = require("../utils/instanceMgr");
const aws = require("@aws-sdk/client-ses");
const nodemailer = require("nodemailer");
const logger = require("../utils/logger");
@@ -10,12 +10,7 @@ const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.e
const sesConfig = {
apiVersion: "latest",
credentials: defaultProvider(),
region: isLocal
? "ca-central-1"
: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region: InstanceRegion()
};
if (isLocal) {

View File

@@ -17,12 +17,10 @@ require("dotenv").config({
const domain = process.env.NODE_ENV ? "secure" : "test";
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
const { InstanceRegion } = require("../utils/instanceMgr");
const client = new SecretsManagerClient({
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region: InstanceRegion()
});
const gqlClient = require("../graphql-client/graphql-client").client;

View File

@@ -78,16 +78,20 @@ const jobLifecycle = async (req, res) => {
Object.keys(flatGroupedAllDurations).forEach((status) => {
const value = flatGroupedAllDurations[status].reduce((acc, curr) => acc + curr.value, 0);
const humanReadable = durationToHumanReadable(moment.duration(value));
const percentage = (value / finalTotal) * 100;
const percentage = finalTotal > 0 ? (value / finalTotal) * 100 : 0;
const color = getLifecycleStatusColor(status);
const roundedPercentage = `${Math.round(percentage)}%`;
const averageValue = _.size(jobIDs) > 0 ? value / jobIDs.length : 0;
const averageHumanReadable = durationToHumanReadable(moment.duration(averageValue));
finalSummations.push({
status,
value,
humanReadable,
percentage,
color,
roundedPercentage
roundedPercentage,
averageValue,
averageHumanReadable
});
});
@@ -100,7 +104,12 @@ const jobLifecycle = async (req, res) => {
totalStatuses: finalSummations.length,
total: finalTotal,
statusCounts: finalStatusCounts,
humanReadable: durationToHumanReadable(moment.duration(finalTotal))
humanReadable: durationToHumanReadable(moment.duration(finalTotal)),
averageValue: _.size(jobIDs) > 0 ? finalTotal / jobIDs.length : 0,
averageHumanReadable:
_.size(jobIDs) > 0
? durationToHumanReadable(moment.duration(finalTotal / jobIDs.length))
: durationToHumanReadable(moment.duration(0))
}
});
};

View File

@@ -2,8 +2,16 @@ const { isObject } = require("lodash");
const jobUpdated = async (req, res) => {
const { ioRedis, logger, ioHelpers } = req;
// Old Way
if (req?.body?.event?.data?.new || isObject(req?.body?.event?.data?.new)) {
const updatedJob = req.body.event.data.new;
const bodyshopID = updatedJob.shopid;
ioRedis.to(ioHelpers.getBodyshopRoom(bodyshopID)).emit("production-job-updated", updatedJob);
return res.json({ message: "Job updated and event emitted" });
}
if (!req?.body?.event?.data?.new || !isObject(req?.body?.event?.data?.new)) {
// New way
if (!req?.body?.data || !isObject(req.body.data)) {
logger.log("job-update-error", "ERROR", req.user?.email, null, {
message: `Malformed Job Update request sent from Hasura`,
body: req?.body
@@ -15,12 +23,14 @@ const jobUpdated = async (req, res) => {
});
}
logger.log("job-update", "DEBUG", req.user?.email, null, {
message: `Job updated event received from Hasura`,
jobid: req?.body?.event?.data?.new?.id
});
// Uncomment for further testing
// You can also test this using SocketIOAdmin
// logger.log("job-update", "DEBUG", req.user?.email, null, {
// message: `Job updated event received from Hasura`,
// jobid: req?.body?.event?.data?.new?.id
// });
const updatedJob = req.body.event.data.new;
const updatedJob = req.body.data;
const bodyshopID = updatedJob.shopid;
// Emit the job-updated event only to the room corresponding to the bodyshop

View File

@@ -59,7 +59,7 @@ exports.mixdataUpload = async (req, res) => {
res.status(500).json(error);
logger.log("job-mixdata-upload-error", "ERROR", null, null, {
error: error.message,
...error
stack: error.stack
});
}
};

View File

@@ -12,9 +12,10 @@ const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebas
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const { taskAssignedEmail, tasksRemindEmail } = require("../email/tasksEmails");
const { canvastest } = require("../render/canvas-handler");
const { alertCheck } = require("../alerts/alertcheck");
//Test route to ensure Express is responding.
router.get("/test", async function (req, res) {
router.get("/test", eventAuthorizationMiddleware, async function (req, res) {
const commit = require("child_process").execSync("git rev-parse --short HEAD");
// console.log(app.get('trust proxy'));
// console.log("remoteAddress", req.socket.remoteAddress);
@@ -31,6 +32,32 @@ router.get("/test", async function (req, res) {
res.status(200).send(`OK - ${commit}`);
});
router.get("/test-logs", eventAuthorizationMiddleware, (req, res) => {
const { logger } = req;
// // Test 1: Log with a message that exceeds the size limit, triggering an upload to S3.
const largeMessage = "A".repeat(256 * 1024 + 1); // Message larger than the log size limit
logger.log(largeMessage, "error", "user123", null, { detail: "large log entry" });
// Test 2: Log with a message that is within the size limit, should log directly using winston.
const smallMessage = "A small log message";
logger.log(smallMessage, "info", "user123", null, { detail: "small log entry" });
// Test 3: Log with the `upload` flag set to `true`, forcing the log to be uploaded to S3.
logger.log(
"This log will be uploaded to S3 regardless of size",
"warning",
"user123",
null,
{ detail: "upload log" },
true
);
// Test 4: Log with a message that doesn't exceed the size limit and doesn't require an upload.
logger.log("Normal log entry", "debug", "user123", { id: 4 }, { detail: "normal log entry" });
return res.status(500).send("Logs tested.");
});
// Search
router.post("/search", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, os.search);
router.post("/opensearch", eventAuthorizationMiddleware, os.handler);
@@ -53,4 +80,7 @@ router.post("/taskHandler", validateFirebaseIdTokenMiddleware, taskHandler.taskH
// Canvas Test
router.post("/canvastest", validateFirebaseIdTokenMiddleware, canvastest);
// Alert Check
router.post("/alertcheck", eventAuthorizationMiddleware, alertCheck);
module.exports = router;

View File

@@ -44,4 +44,10 @@ function InstanceManager({ args, instance, debug, executeFunction, rome, promana
return propToReturn === undefined ? null : propToReturn;
}
exports.InstanceRegion = () =>
InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
});
exports.default = InstanceManager;

View File

@@ -9,6 +9,45 @@ const winston = require("winston");
const WinstonCloudWatch = require("winston-cloudwatch");
const { isString, isEmpty } = require("lodash");
const { networkInterfaces, hostname } = require("node:os");
const { uploadFileToS3 } = require("./s3");
const { v4 } = require("uuid");
const { InstanceRegion } = require("./instanceMgr");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
warn: { level: 1, name: "warn" },
info: { level: 2, name: "info" },
http: { level: 3, name: "http" },
verbose: { level: 4, name: "verbose" },
debug: { level: 5, name: "debug" },
silly: { level: 6, name: "silly" }
};
const LOG_LENGTH_LIMIT = 256 * 1024; // 256KB
const S3_BUCKET_NAME = InstanceManager({
imex: "imex-large-log",
rome: "rome-large-log"
});
const region = InstanceRegion();
const estimateLogSize = (logEntry) => {
let estimatedSize = 0;
for (const key in logEntry) {
if (logEntry.hasOwnProperty(key)) {
const value = logEntry[key];
if (value === undefined || value === null) {
estimatedSize += key.length; // Only count the key length if value is undefined or null
} else {
estimatedSize += key.length + (typeof value === "string" ? value.length : JSON.stringify(value).length);
}
}
}
return estimatedSize;
};
const normalizeLevel = (level) => (level ? level.toLowerCase() : LOG_LEVELS.debug.name);
const createLogger = () => {
try {
@@ -18,10 +57,7 @@ const createLogger = () => {
const winstonCloudwatchTransportDefaults = {
logGroupName: logGroupName,
awsOptions: {
region: InstanceManager({
imex: "ca-central-1",
rome: "us-east-2"
})
region
},
jsonMessage: true
};
@@ -112,15 +148,66 @@ const createLogger = () => {
);
}
const log = (message, type, user, record, meta) => {
winstonLogger.log({
level: type.toLowerCase(),
const log = (message, type, user, record, meta, upload) => {
const logEntry = {
level: normalizeLevel(type),
message,
user,
record,
hostname: internalHostname,
meta
});
};
const uploadLogToS3 = (logEntry, message, type, user) => {
const uniqueId = v4();
const dateTimeString = new Date().toISOString().replace(/:/g, "-");
const envName = process.env?.NODE_ENV ? process.env.NODE_ENV : "";
const logStreamName = `${envName}-${internalHostname}-${dateTimeString}-${uniqueId}.json`;
const logString = JSON.stringify(logEntry);
const webPath = isLocal
? `https://${S3_BUCKET_NAME}.s3.localhost.localstack.cloud:4566/${logStreamName}`
: `https://${S3_BUCKET_NAME}.s3.${region}.amazonaws.com/${logStreamName}`;
uploadFileToS3({ bucketName: S3_BUCKET_NAME, key: logStreamName, content: logString })
.then(() => {
log("A log file has been uploaded to S3", "info", "S3", null, {
logStreamName,
webPath,
message: message?.slice(0, 200),
type,
user
});
})
.catch((err) => {
log("Error in S3 Upload", "error", "S3", null, {
logStreamName,
webPath,
message: message?.slice(0, 100),
type,
user,
errorMessage: err?.message?.slice(0, 100)
});
});
};
const checkAndUploadLog = () => {
const estimatedSize = estimateLogSize(logEntry);
if (estimatedSize > LOG_LENGTH_LIMIT * 0.9 || estimatedSize > LOG_LENGTH_LIMIT) {
uploadLogToS3(logEntry, message, type, user);
return true;
}
return false;
};
// Upload log immediately if upload is true, otherwise check the log size.
if (upload) {
uploadLogToS3(logEntry, message, type, user);
return;
}
if (checkAndUploadLog()) return;
winstonLogger.log(logEntry);
};
return {
@@ -131,7 +218,8 @@ const createLogger = () => {
console.error("Error setting up enhanced Logger, defaulting to console.: " + e?.message || "");
return {
log: console.log,
logger: console.log
logger: console.log,
LOG_LEVELS
};
}
};

109
server/utils/s3.js Normal file
View File

@@ -0,0 +1,109 @@
const {
S3Client,
PutObjectCommand,
GetObjectCommand,
ListObjectsV2Command,
DeleteObjectCommand,
CopyObjectCommand
} = require("@aws-sdk/client-s3");
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { InstanceRegion } = require("./instanceMgr");
const { isString, isEmpty } = require("lodash");
const createS3Client = () => {
const S3Options = {
region: InstanceRegion(),
credentials: defaultProvider()
};
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
if (isLocal) {
S3Options.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
S3Options.forcePathStyle = true; // Needed for LocalStack to avoid bucket name as hostname
}
const s3Client = new S3Client(S3Options);
/**
* Uploads a file to the specified S3 bucket and key.
*/
const uploadFileToS3 = async ({ bucketName, key, content, contentType }) => {
const params = {
Bucket: bucketName,
Key: key,
Body: content,
ContentType: contentType ?? "application/json"
};
const command = new PutObjectCommand(params);
return await s3Client.send(command);
};
/**
* Downloads a file from the specified S3 bucket and key.
*/
const downloadFileFromS3 = async ({ bucketName, key }) => {
const params = { Bucket: bucketName, Key: key };
const command = new GetObjectCommand(params);
const data = await s3Client.send(command);
return data.Body;
};
/**
* Lists objects in the specified S3 bucket.
*/
const listFilesInS3Bucket = async (bucketName, prefix = "") => {
const params = { Bucket: bucketName, Prefix: prefix };
const command = new ListObjectsV2Command(params);
const data = await s3Client.send(command);
return data.Contents || [];
};
/**
* Deletes a file from the specified S3 bucket and key.
*/
const deleteFileFromS3 = async ({ bucketName, key }) => {
const params = { Bucket: bucketName, Key: key };
const command = new DeleteObjectCommand(params);
return await s3Client.send(command);
};
/**
* Copies a file within S3 from a source bucket/key to a destination bucket/key.
*/
const copyFileInS3 = async ({ sourceBucket, sourceKey, destinationBucket, destinationKey }) => {
const params = {
CopySource: `/${sourceBucket}/${sourceKey}`,
Bucket: destinationBucket,
Key: destinationKey
};
const command = new CopyObjectCommand(params);
return await s3Client.send(command);
};
/**
* Checks if a file exists in the specified S3 bucket and key.
*/
const fileExistsInS3 = async ({ bucketName, key }) => {
try {
await downloadFileFromS3({ bucketName, key });
return true;
} catch (error) {
if (error.name === "NoSuchKey" || error.name === "NotFound") {
return false;
}
throw error;
}
};
return {
uploadFileToS3,
downloadFileFromS3,
listFilesInS3Bucket,
deleteFileFromS3,
copyFileInS3,
fileExistsInS3,
...s3Client
};
};
module.exports = createS3Client();

View File

@@ -155,10 +155,17 @@ function createJsonEvent(socket, level, message, json) {
message
});
}
logger.log("ws-log-event-json", level, socket.user.email, socket.recordid, {
wsmessage: message,
json
});
logger.log(
"ws-log-event-json",
level,
socket.user.email,
socket.recordid,
{
wsmessage: message,
json
},
true
);
if (socket.logEvents && isArray(socket.logEvents)) {
socket.logEvents.push({
@@ -189,7 +196,8 @@ function createXmlEvent(socket, xml, message, isError = false) {
{
wsmessage: message,
xml
}
},
true
);
if (socket.logEvents && isArray(socket.logEvents)) {
@@ -212,7 +220,7 @@ function LogLevelHierarchy(level) {
return 4;
case "INFO":
return 3;
case "WARNING":
case "WARN":
return 2;
case "ERROR":
return 1;