diff --git a/.platform/nginx/conf.d/proxy.conf b/.platform/nginx/conf.d/proxy.conf index ae3fb47f6..2dc60b344 100644 --- a/.platform/nginx/conf.d/proxy.conf +++ b/.platform/nginx/conf.d/proxy.conf @@ -1 +1,2 @@ -client_max_body_size 50M; \ No newline at end of file +client_max_body_size 50M; +client_body_buffer_size 5M; diff --git a/client/src/App/App.container.jsx b/client/src/App/App.container.jsx index e4df37c61..d6f3a53fd 100644 --- a/client/src/App/App.container.jsx +++ b/client/src/App/App.container.jsx @@ -2,8 +2,6 @@ import { ApolloProvider } from "@apollo/client"; import { SplitFactoryProvider, SplitSdk } from "@splitsoftware/splitio-react"; import { ConfigProvider } from "antd"; import enLocale from "antd/es/locale/en_US"; -import dayjs from "../utils/day"; -import "dayjs/locale/en"; import React from "react"; import { useTranslation } from "react-i18next"; import GlobalLoadingBar from "../components/global-loading-bar/global-loading-bar.component"; @@ -19,8 +17,6 @@ if (import.meta.env.DEV) { Userpilot.initialize("NX-69145f08"); } -dayjs.locale("en"); - const config = { core: { authorizationKey: import.meta.env.VITE_APP_SPLIT_API, diff --git a/client/src/components/chat-popup/chat-popup.component.jsx b/client/src/components/chat-popup/chat-popup.component.jsx index c608f8b11..d557ad91d 100644 --- a/client/src/components/chat-popup/chat-popup.component.jsx +++ b/client/src/components/chat-popup/chat-popup.component.jsx @@ -45,7 +45,7 @@ export function ChatPopupComponent({ chatVisible, selectedConversation, toggleCh if (fcmToken) { setpollInterval(0); } else { - setpollInterval(60000); + setpollInterval(90000); } }, [fcmToken]); diff --git a/client/src/components/dms-log-events/dms-log-events.component.jsx b/client/src/components/dms-log-events/dms-log-events.component.jsx index 037664900..b73e5dbe7 100644 --- a/client/src/components/dms-log-events/dms-log-events.component.jsx +++ b/client/src/components/dms-log-events/dms-log-events.component.jsx @@ -40,13 +40,11 @@ export function DmsLogEvents({ socket, logs, bodyshop }) { function LogLevelHierarchy(level) { switch (level) { - case "TRACE": - return "pink"; case "DEBUG": return "orange"; case "INFO": return "blue"; - case "WARNING": + case "WARN": return "yellow"; case "ERROR": return "red"; diff --git a/client/src/components/eula/eula.component.jsx b/client/src/components/eula/eula.component.jsx index 8d2227e02..399a52b40 100644 --- a/client/src/components/eula/eula.component.jsx +++ b/client/src/components/eula/eula.component.jsx @@ -8,7 +8,7 @@ import { INSERT_EULA_ACCEPTANCE } from "../../graphql/user.queries"; import { useMutation } from "@apollo/client"; import { acceptEula } from "../../redux/user/user.actions"; import { useTranslation } from "react-i18next"; -import day from "../../utils/day"; +import dayjs from "../../utils/day"; import "./eula.styles.scss"; import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component.jsx"; @@ -208,7 +208,7 @@ const EulaFormComponent = ({ form, handleChange, onFinish, t }) => ( { required: true, validator: (_, value) => { - if (day(value).isSame(day(), "day")) { + if (dayjs(value).isSame(dayjs(), "day")) { return Promise.resolve(); } return Promise.reject(new Error(t("eula.messages.date_accepted"))); diff --git a/client/src/components/form-date-time-picker/form-date-time-picker.component.jsx b/client/src/components/form-date-time-picker/form-date-time-picker.component.jsx index e34ae14f7..c9bbc0dd2 100644 --- a/client/src/components/form-date-time-picker/form-date-time-picker.component.jsx +++ b/client/src/components/form-date-time-picker/form-date-time-picker.component.jsx @@ -2,21 +2,38 @@ import { DatePicker } from "antd"; import PropTypes from "prop-types"; import React, { useCallback, useState } from "react"; import { useTranslation } from "react-i18next"; +import { connect } from "react-redux"; +import { createStructuredSelector } from "reselect"; +import { selectBodyshop } from "../../redux/user/user.selectors.js"; import dayjs from "../../utils/day"; import { fuzzyMatchDate } from "./formats.js"; -const DateTimePicker = ({ value, onChange, onBlur, id, onlyFuture, onlyToday, isDateOnly = false, ...restProps }) => { +const mapStateToProps = createStructuredSelector({ + bodyshop: selectBodyshop +}); + +const DateTimePicker = ({ + value, + onChange, + onBlur, + id, + onlyFuture, + onlyToday, + isDateOnly = false, + bodyshop, + ...restProps +}) => { const [isManualInput, setIsManualInput] = useState(false); const { t } = useTranslation(); const handleChange = useCallback( (newDate) => { if (onChange) { - onChange(newDate || null); + onChange(bodyshop?.timezone && newDate ? dayjs(newDate).tz(bodyshop.timezone, true) : newDate); } setIsManualInput(false); }, - [onChange] + [onChange, bodyshop?.timezone] ); const handleBlur = useCallback( @@ -102,4 +119,4 @@ DateTimePicker.propTypes = { isDateOnly: PropTypes.bool }; -export default React.memo(DateTimePicker); +export default connect(mapStateToProps, null)(DateTimePicker); diff --git a/client/src/components/header/header.component.jsx b/client/src/components/header/header.component.jsx index c00aa33e5..341e19ba0 100644 --- a/client/src/components/header/header.component.jsx +++ b/client/src/components/header/header.component.jsx @@ -116,18 +116,15 @@ function Header({ const { t } = useTranslation(); - const deleteBetaCookie = () => { - const cookieExists = document.cookie.split("; ").some((row) => row.startsWith(`betaSwitchImex=`)); - if (cookieExists) { - const domain = window.location.hostname.split(".").slice(-2).join("."); - document.cookie = `betaSwitchImex=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=/; domain=.${domain}`; - console.log(`betaSwitchImex cookie deleted`); - } else { - console.log(`betaSwitchImex cookie does not exist`); - } - }; - - deleteBetaCookie(); + // const deleteBetaCookie = () => { + // const cookieExists = document.cookie.split("; ").some((row) => row.startsWith(`betaSwitchImex=`)); + // if (cookieExists) { + // const domain = window.location.hostname.split(".").slice(-2).join("."); + // document.cookie = `betaSwitchImex=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=/; domain=.${domain}`; + // } + // }; + // + // deleteBetaCookie(); const accountingChildren = []; diff --git a/client/src/components/job-lifecycle/job-lifecycle.component.jsx b/client/src/components/job-lifecycle/job-lifecycle.component.jsx index 794524c2a..e648ad4d5 100644 --- a/client/src/components/job-lifecycle/job-lifecycle.component.jsx +++ b/client/src/components/job-lifecycle/job-lifecycle.component.jsx @@ -1,5 +1,5 @@ import React, { useCallback, useEffect, useState } from "react"; -import day from "../../utils/day"; +import dayjs from "../../utils/day"; import axios from "axios"; import { Badge, Card, Space, Table, Tag } from "antd"; import { gql, useQuery } from "@apollo/client"; @@ -72,7 +72,7 @@ export function JobLifecycleComponent({ job, statuses, ...rest }) { dataIndex: "start", key: "start", render: (text) => DateTimeFormatterFunction(text), - sorter: (a, b) => day(a.start).unix() - day(b.start).unix() + sorter: (a, b) => dayjs(a.start).unix() - dayjs(b.start).unix() }, { title: t("job_lifecycle.columns.relative_start"), @@ -90,7 +90,7 @@ export function JobLifecycleComponent({ job, statuses, ...rest }) { } return isEmpty(a.end) ? 1 : -1; } - return day(a.end).unix() - day(b.end).unix(); + return dayjs(a.end).unix() - dayjs(b.end).unix(); }, render: (text) => (isEmpty(text) ? t("job_lifecycle.content.not_available") : DateTimeFormatterFunction(text)) }, diff --git a/client/src/components/job-line-dispatch-button/job-line-dispatch-button.component.jsx b/client/src/components/job-line-dispatch-button/job-line-dispatch-button.component.jsx index 8e570ab2d..ce0b6e4fe 100644 --- a/client/src/components/job-line-dispatch-button/job-line-dispatch-button.component.jsx +++ b/client/src/components/job-line-dispatch-button/job-line-dispatch-button.component.jsx @@ -2,7 +2,7 @@ import React, { useState } from "react"; import { useMutation } from "@apollo/client"; import { Button, Form, notification, Popover, Select, Space } from "antd"; -import day from "../../utils/day"; +import dayjs from "../../utils/day"; import { useTranslation } from "react-i18next"; import { connect } from "react-redux"; import { createStructuredSelector } from "reselect"; @@ -48,7 +48,7 @@ export function JobLineDispatchButton({ const result = await dispatchLines({ variables: { partsDispatch: { - dispatched_at: day(), + dispatched_at: dayjs(), employeeid: values.employeeid, jobid: job.id, dispatched_by: currentUser.email, @@ -138,7 +138,11 @@ export function JobLineDispatchButton({ return ( - diff --git a/client/src/components/parts-dispatch-expander/parts-dispatch-expander.component.jsx b/client/src/components/parts-dispatch-expander/parts-dispatch-expander.component.jsx index 477240357..2a84527cc 100644 --- a/client/src/components/parts-dispatch-expander/parts-dispatch-expander.component.jsx +++ b/client/src/components/parts-dispatch-expander/parts-dispatch-expander.component.jsx @@ -1,6 +1,6 @@ import { useMutation } from "@apollo/client"; import { Button, Card, Col, notification, Row, Table } from "antd"; -import day from "../../utils/day"; +import dayjs from "../../utils/day"; import React from "react"; import { useTranslation } from "react-i18next"; import { UPDATE_PARTS_DISPATCH_LINE } from "../../graphql/parts-dispatch.queries"; @@ -11,7 +11,7 @@ export default function PartsDispatchExpander({ dispatch, job }) { const [updateDispatchLine] = useMutation(UPDATE_PARTS_DISPATCH_LINE); const handleAccept = async ({ partsDispatchLineId }) => { - const accepted_at = day(); + const accepted_at = dayjs(); const result = await updateDispatchLine({ variables: { id: partsDispatchLineId, line: { accepted_at } }, optimisticResponse: { diff --git a/client/src/components/schedule-calendar-wrapper/localizer.js b/client/src/components/schedule-calendar-wrapper/localizer.js index e91016416..9935252b4 100644 --- a/client/src/components/schedule-calendar-wrapper/localizer.js +++ b/client/src/components/schedule-calendar-wrapper/localizer.js @@ -120,14 +120,6 @@ var formats = { }; const localizer = (dayjsLib) => { - // load dayjs plugins - dayjsLib.extend(isBetween); - dayjsLib.extend(isSameOrAfter); - dayjsLib.extend(isSameOrBefore); - dayjsLib.extend(localeData); - dayjsLib.extend(localizedFormat); - dayjsLib.extend(minMax); - dayjsLib.extend(utc); var locale = function locale(dj, c) { return c ? dj.locale(c) : dj; }; @@ -136,7 +128,8 @@ const localizer = (dayjsLib) => { // then use the timezone aware version //TODO This was the issue entirely... - // var dayjs = dayjsLib.tz ? dayjsLib.tz : dayjsLib; + // var dayjs = dayjsLib.tz ? dayjsLib.tz : dayjsLib; + var dayjs = dayjsLib; function getTimezoneOffset(date) { diff --git a/client/src/components/schedule-calendar-wrapper/scheduler-calendar-wrapper.component.jsx b/client/src/components/schedule-calendar-wrapper/scheduler-calendar-wrapper.component.jsx index fb866d89d..4d6f8fe78 100644 --- a/client/src/components/schedule-calendar-wrapper/scheduler-calendar-wrapper.component.jsx +++ b/client/src/components/schedule-calendar-wrapper/scheduler-calendar-wrapper.component.jsx @@ -20,6 +20,7 @@ const mapStateToProps = createStructuredSelector({ bodyshop: selectBodyshop, problemJobs: selectProblemJobs }); + const localizer = local(dayjs); export function ScheduleCalendarWrapperComponent({ diff --git a/client/src/components/scoreboard-display/scoreboard-display.component.jsx b/client/src/components/scoreboard-display/scoreboard-display.component.jsx index c74cecdc0..5a4e6fe72 100644 --- a/client/src/components/scoreboard-display/scoreboard-display.component.jsx +++ b/client/src/components/scoreboard-display/scoreboard-display.component.jsx @@ -4,12 +4,12 @@ import ScoreboardChart from "../scoreboard-chart/scoreboard-chart.component"; import ScoreboardLastDays from "../scoreboard-last-days/scoreboard-last-days.component"; import ScoreboardTargetsTable from "../scoreboard-targets-table/scoreboard-targets-table.component"; +import { useApolloClient, useQuery } from "@apollo/client"; import { connect } from "react-redux"; import { createStructuredSelector } from "reselect"; +import { GET_BLOCKED_DAYS, QUERY_SCOREBOARD } from "../../graphql/scoreboard.queries"; import { selectBodyshop } from "../../redux/user/user.selectors"; import dayjs from "../../utils/day"; -import { useApolloClient, useQuery } from "@apollo/client"; -import { GET_BLOCKED_DAYS, QUERY_SCOREBOARD } from "../../graphql/scoreboard.queries"; const mapStateToProps = createStructuredSelector({ //currentUser: selectCurrentUser @@ -26,7 +26,7 @@ export function ScoreboardDisplayComponent({ bodyshop }) { start: dayjs().startOf("month"), end: dayjs().endOf("month") }, - pollInterval: 60000 + pollInterval: 60000*5 }); const { data } = scoreboardSubscription; diff --git a/client/src/components/scoreboard-timetickets-stats/scoreboard-timetickets.component.jsx b/client/src/components/scoreboard-timetickets-stats/scoreboard-timetickets.component.jsx index 489467740..07e77d479 100644 --- a/client/src/components/scoreboard-timetickets-stats/scoreboard-timetickets.component.jsx +++ b/client/src/components/scoreboard-timetickets-stats/scoreboard-timetickets.component.jsx @@ -1,13 +1,13 @@ import { useQuery } from "@apollo/client"; import { Col, Row } from "antd"; import _ from "lodash"; -import dayjs from "../../utils/day"; import React, { useMemo } from "react"; import { useTranslation } from "react-i18next"; import { connect } from "react-redux"; import { createStructuredSelector } from "reselect"; import { QUERY_TIME_TICKETS_IN_RANGE_SB } from "../../graphql/timetickets.queries"; import { selectBodyshop } from "../../redux/user/user.selectors"; +import dayjs from "../../utils/day"; import AlertComponent from "../alert/alert.component"; import LoadingSpinner from "../loading-spinner/loading-spinner.component"; import * as Utils from "../scoreboard-targets-table/scoreboard-targets-table.util"; @@ -86,7 +86,7 @@ export function ScoreboardTimeTicketsStats({ bodyshop }) { }, fetchPolicy: "network-only", nextFetchPolicy: "network-only", - pollInterval: 60000, + pollInterval: 60000*5, skip: !fixedPeriods }); diff --git a/client/src/components/scoreboard-timetickets/scoreboard-timetickets.component.jsx b/client/src/components/scoreboard-timetickets/scoreboard-timetickets.component.jsx index 2587cb74e..fb970be5d 100644 --- a/client/src/components/scoreboard-timetickets/scoreboard-timetickets.component.jsx +++ b/client/src/components/scoreboard-timetickets/scoreboard-timetickets.component.jsx @@ -1,11 +1,11 @@ import { useQuery } from "@apollo/client"; import { Col, Row } from "antd"; import _ from "lodash"; -import dayjs from "../../utils/day"; import queryString from "query-string"; import React, { useMemo } from "react"; import { useLocation } from "react-router-dom"; import { QUERY_TIME_TICKETS_IN_RANGE_SB } from "../../graphql/timetickets.queries"; +import dayjs from "../../utils/day"; import AlertComponent from "../alert/alert.component"; import LoadingSpinner from "../loading-spinner/loading-spinner.component"; import * as Utils from "../scoreboard-targets-table/scoreboard-targets-table.util"; @@ -68,7 +68,7 @@ export default function ScoreboardTimeTickets() { }, fetchPolicy: "network-only", nextFetchPolicy: "network-only", - pollInterval: 60000, + pollInterval: 60000*5, skip: !fixedPeriods }); diff --git a/client/src/components/task-list/task-list.component.jsx b/client/src/components/task-list/task-list.component.jsx index 0c01ef6a8..ba7d71b17 100644 --- a/client/src/components/task-list/task-list.component.jsx +++ b/client/src/components/task-list/task-list.component.jsx @@ -144,7 +144,7 @@ function TaskListComponent({ title: t("tasks.fields.created_by"), dataIndex: "created_by", key: "created_by", - width: "10%", + width: "8%", defaultSortOrder: "descend", sorter: true, sortOrder: sortcolumn === "created_by" && sortorder, @@ -166,65 +166,70 @@ function TaskListComponent({ }); } - if (showRo) { - columns.push({ - title: t("tasks.fields.job.ro_number"), - dataIndex: ["job", "ro_number"], - key: "job.ro_number", - width: "8%", - render: (text, record) => - record.job ? ( - {record.job.ro_number || t("general.labels.na")} - ) : ( - t("general.labels.na") - ) - }); - } + columns.push({ + title: t("tasks.fields.related_items"), + key: "related_items", + width: "12%", + render: (text, record) => { + const items = []; + + // Job + if (showRo && record.job) { + items.push( + + {t("tasks.fields.job.ro_number")}: {record.job.ro_number} + + ); + } + if (showRo && !record.job) { + items.push(`${t("tasks.fields.job.ro_number")}: ${t("general.labels.na")}`); + } + + // Jobline + if (record.jobline?.line_desc) { + items.push( + + {t("tasks.fields.jobline")}: {record.jobline.line_desc} + + ); + } + + // Parts Order + if (record.parts_order) { + const { order_number, vendor } = record.parts_order; + const partsOrderText = + order_number && vendor?.name ? `${order_number} - ${vendor.name}` : t("general.labels.na"); + items.push( + + {t("tasks.fields.parts_order")}: {partsOrderText} + + ); + } + + // Bill + if (record.bill) { + const { invoice_number, vendor } = record.bill; + const billText = invoice_number && vendor?.name ? `${invoice_number} - ${vendor.name}` : t("general.labels.na"); + items.push( + + {t("tasks.fields.bill")}: {billText} + + ); + } + + return items.length > 0 ? {items} : null; + } + }); columns.push( - { - title: t("tasks.fields.jobline"), - dataIndex: ["jobline", "id"], - key: "jobline.id", - width: "8%", - render: (text, record) => record?.jobline?.line_desc || "" - }, - { - title: t("tasks.fields.parts_order"), - dataIndex: ["parts_order", "id"], - key: "part_order.id", - width: "8%", - render: (text, record) => - record.parts_order ? ( - - {record.parts_order.order_number && record.parts_order.vendor && record.parts_order.vendor.name - ? `${record.parts_order.order_number} - ${record.parts_order.vendor.name}` - : t("general.labels.na")} - - ) : ( - "" - ) - }, - { - title: t("tasks.fields.bill"), - dataIndex: ["bill", "id"], - key: "bill.id", - width: "10%", - render: (text, record) => - record.bill ? ( - - {record.bill.invoice_number && record.bill.vendor && record.bill.vendor.name - ? `${record.bill.invoice_number} - ${record.bill.vendor.name}` - : t("general.labels.na")} - - ) : ( - "" - ) - }, { title: t("tasks.fields.title"), dataIndex: "title", key: "title", + minWidth: "20%", sorter: true, sortOrder: sortcolumn === "title" && sortorder }, @@ -258,7 +263,7 @@ function TaskListComponent({ { title: t("tasks.fields.actions"), key: "toggleCompleted", - width: "5%", + width: "8%", render: (text, record) => ( diff --git a/client/src/pages/dms/dms.container.jsx b/client/src/pages/dms/dms.container.jsx index 9cbc3ea50..3a8ac77b0 100644 --- a/client/src/pages/dms/dms.container.jsx +++ b/client/src/pages/dms/dms.container.jsx @@ -90,7 +90,7 @@ export function DmsContainer({ bodyshop, setBreadcrumbs, setSelectedHeader, inse ...logs, { timestamp: new Date(), - level: "WARNING", + level: "warn", message: "Reconnected to CDK Export Service" } ]; @@ -173,10 +173,9 @@ export function DmsContainer({ bodyshop, setBreadcrumbs, setSelectedHeader, inse socket.emit("set-log-level", value); }} > - TRACE DEBUG INFO - WARNING + WARN ERROR diff --git a/client/src/redux/user/user.sagas.js b/client/src/redux/user/user.sagas.js index f565ea544..423b40283 100644 --- a/client/src/redux/user/user.sagas.js +++ b/client/src/redux/user/user.sagas.js @@ -28,7 +28,7 @@ import { } from "../../firebase/firebase.utils"; import { QUERY_EULA } from "../../graphql/bodyshop.queries"; import client from "../../utils/GraphQLClient"; -import day from "../../utils/day"; +import dayjs from "../../utils/day"; import InstanceRenderManager from "../../utils/instanceRenderMgr"; import { checkInstanceId, @@ -96,7 +96,7 @@ export function* isUserAuthenticated() { const eulaQuery = yield client.query({ query: QUERY_EULA, variables: { - now: day() + now: dayjs() } }); @@ -314,8 +314,7 @@ export function* SetAuthLevelFromShopDetails({ payload }) { try { const userEmail = yield select((state) => state.user.currentUser.email); try { - console.log("Setting shop timezone."); - day.tz.setDefault(payload.timezone); + dayjs.tz.setDefault(payload.timezone); } catch (error) { console.log(error); } diff --git a/client/src/translations/en_us/common.json b/client/src/translations/en_us/common.json index 18b7b4e32..38f13e032 100644 --- a/client/src/translations/en_us/common.json +++ b/client/src/translations/en_us/common.json @@ -3204,6 +3204,7 @@ "medium": "Medium" }, "priority": "Priority", + "related_items": "Related Items", "remind_at": "Remind At", "title": "Title" }, diff --git a/client/src/translations/es/common.json b/client/src/translations/es/common.json index 51b4bd8c0..ed7760e9d 100644 --- a/client/src/translations/es/common.json +++ b/client/src/translations/es/common.json @@ -3204,6 +3204,7 @@ "medium": "" }, "priority": "", + "related_items": "", "remind_at": "", "title": "" }, diff --git a/client/src/translations/fr/common.json b/client/src/translations/fr/common.json index 0560af00a..4a64bcc55 100644 --- a/client/src/translations/fr/common.json +++ b/client/src/translations/fr/common.json @@ -3204,6 +3204,7 @@ "medium": "" }, "priority": "", + "related_items": "", "remind_at": "", "title": "" }, diff --git a/client/src/utils/day.js b/client/src/utils/day.js index 3fb824b11..3194cfb31 100644 --- a/client/src/utils/day.js +++ b/client/src/utils/day.js @@ -1,5 +1,6 @@ import dayjs from "dayjs"; +import "dayjs/locale/en"; import dayjsBusinessDays from "dayjs-business-days2"; import isSameOrAfter from "dayjs/plugin/isSameOrAfter"; import updateLocale from "dayjs/plugin/updateLocale"; @@ -64,4 +65,6 @@ dayjs.extend(minMax); dayjs.extend(isBetween); dayjs.extend(dayjsBusinessDays); +dayjs.locale("en"); + export default dayjs; diff --git a/hasura/metadata/tables.yaml b/hasura/metadata/tables.yaml index 70be46ad0..240b218a4 100644 --- a/hasura/metadata/tables.yaml +++ b/hasura/metadata/tables.yaml @@ -69,7 +69,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: jobline: job: @@ -180,7 +179,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -387,7 +385,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -504,7 +501,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bill: job: @@ -671,7 +667,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: _and: - job: @@ -1285,7 +1280,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: courtesycar: bodyshop: @@ -1526,7 +1520,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -1786,7 +1779,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -1920,7 +1912,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: _or: - job: @@ -2105,7 +2096,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: employee_team: bodyshop: @@ -2268,7 +2258,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: employee: bodyshop: @@ -2449,7 +2438,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -2696,7 +2684,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -2808,7 +2795,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: conversation: bodyshop: @@ -3123,7 +3109,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: job: bodyshop: @@ -4232,7 +4217,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -4248,41 +4232,41 @@ enable_manual: false update: columns: - - clm_no - - v_make_desc - - date_next_contact - - status - - employee_csr - employee_prep - clm_total - suspended - employee_body - ro_number - - actual_in - ownr_co_nm - - v_model_yr - - comment - - job_totals - v_vin - - ownr_fn - scheduled_completion - special_coverage_policy - - v_color - - ca_gst_registrant - scheduled_delivery - actual_delivery - actual_completion - kanbanparent - est_ct_fn + - alt_transport + - v_model_desc + - clm_no + - v_make_desc + - date_next_contact + - status + - employee_csr + - actual_in + - v_model_yr + - comment + - job_totals + - ownr_fn + - v_color + - ca_gst_registrant - employee_refinish - ownr_ph1 - date_last_contacted - - alt_transport - inproduction - est_ct_ln - production_vars - category - - v_model_desc - date_invoiced - est_co_nm - ownr_ln @@ -4295,6 +4279,12 @@ - name: event-secret value_from_env: EVENT_SECRET request_transform: + body: + action: transform + template: |- + { + "data": {{$body?.event?.data?.new}} + } method: POST query_params: {} template_engine: Kriti @@ -4496,7 +4486,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: conversation: bodyshop: @@ -4670,7 +4659,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: job: bodyshop: @@ -4805,7 +4793,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -5110,7 +5097,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: parts_order: job: @@ -5243,7 +5229,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: job: bodyshop: @@ -5419,7 +5404,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: job: bodyshop: @@ -5559,7 +5543,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -5670,7 +5653,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: _or: - parentjob_rel: @@ -5760,7 +5742,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: job: bodyshop: @@ -6045,7 +6026,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -6541,7 +6521,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: @@ -6698,7 +6677,6 @@ delete_permissions: - role: user permission: - backend_only: false filter: bodyshop: associations: diff --git a/server.js b/server.js index 8acba572a..a92ab7241 100644 --- a/server.js +++ b/server.js @@ -153,18 +153,13 @@ const connectToRedisCluster = async () => { } else { // Use the Dockerized Redis cluster in development if (isEmpty(process.env?.REDIS_URL) || !isString(process.env?.REDIS_URL)) { - logger.log(`[${process.env.NODE_ENV}] No or Malformed REDIS_URL present.`, "ERROR", "redis", "api"); + logger.log(`No or Malformed REDIS_URL present.`, "ERROR", "redis", "api"); process.exit(1); } try { redisServers = JSON.parse(process.env.REDIS_URL); } catch (error) { - logger.log( - `[${process.env.NODE_ENV}] Failed to parse REDIS_URL: ${error.message}. Exiting...`, - "ERROR", - "redis", - "api" - ); + logger.log(`Failed to parse REDIS_URL: ${error.message}. Exiting...`, "ERROR", "redis", "api"); process.exit(1); } } @@ -172,12 +167,7 @@ const connectToRedisCluster = async () => { const clusterRetryStrategy = (times) => { const delay = Math.min(CLUSTER_RETRY_BASE_DELAY + times * 50, CLUSTER_RETRY_MAX_DELAY) + Math.random() * CLUSTER_RETRY_JITTER; - logger.log( - `[${process.env.NODE_ENV}] Redis cluster not yet ready. Retrying in ${delay.toFixed(2)}ms`, - "WARN", - "redis", - "api" - ); + logger.log(`Redis cluster not yet ready. Retrying in ${delay.toFixed(2)}ms`, "WARN", "redis", "api"); return delay; }; @@ -194,12 +184,12 @@ const connectToRedisCluster = async () => { return new Promise((resolve, reject) => { redisCluster.on("ready", () => { - logger.log(`[${process.env.NODE_ENV}] Redis cluster connection established.`, "INFO", "redis", "api"); + logger.log(`Redis cluster connection established.`, "INFO", "redis", "api"); resolve(redisCluster); }); redisCluster.on("error", (err) => { - logger.log(`[${process.env.NODE_ENV}] Redis cluster connection failed: ${err.message}`, "ERROR", "redis", "api"); + logger.log(`Redis cluster connection failed: ${err.message}`, "ERROR", "redis", "api"); reject(err); }); }); @@ -215,7 +205,7 @@ const applySocketIO = async ({ server, app }) => { // Handle errors redisCluster.on("error", (err) => { - logger.log(`[${process.env.NODE_ENV}] Redis ERROR`, "ERROR", "redis", "api"); + logger.log(`Redis ERROR`, "ERROR", "redis", "api"); }); const pubClient = redisCluster; @@ -249,7 +239,7 @@ const applySocketIO = async ({ server, app }) => { }); if (isString(process.env.REDIS_ADMIN_PASS) && !isEmpty(process.env.REDIS_ADMIN_PASS)) { - logger.log(`[${process.env.NODE_ENV}] Initializing Redis Admin UI....`, "INFO", "redis", "api"); + logger.log(`Initializing Redis Admin UI....`, "INFO", "redis", "api"); instrument(ioRedis, { auth: { type: "basic", @@ -312,9 +302,9 @@ const main = async () => { try { await server.listen(port); - logger.log(`[${process.env.NODE_ENV}] Server started on port ${port}`, "INFO", "api"); + logger.log(`Server started on port ${port}`, "INFO", "api"); } catch (error) { - logger.log(`[${process.env.NODE_ENV}] Server failed to start on port ${port}`, "ERROR", "api", error); + logger.log(`Server failed to start on port ${port}`, "ERROR", "api", error); } }; diff --git a/server/accounting/pbs/pbs-ap-allocations.js b/server/accounting/pbs/pbs-ap-allocations.js index 269e22c4f..a969e82c8 100644 --- a/server/accounting/pbs/pbs-ap-allocations.js +++ b/server/accounting/pbs/pbs-ap-allocations.js @@ -26,7 +26,7 @@ axios.interceptors.request.use((x) => { } | ${JSON.stringify(x.data)} | ${JSON.stringify(headers)}`; //console.log(printable); - CdkBase.createJsonEvent(socket, "TRACE", `Raw Request: ${printable}`, x.data); + CdkBase.createJsonEvent(socket, "SILLY", `Raw Request: ${printable}`, x.data); return x; }); @@ -36,7 +36,7 @@ axios.interceptors.response.use((x) => { const printable = `${new Date()} | Response: ${x.status} | ${JSON.stringify(x.data)}`; //console.log(printable); - CdkBase.createJsonEvent(socket, "TRACE", `Raw Response: ${printable}`, x.data); + CdkBase.createJsonEvent(socket, "SILLY", `Raw Response: ${printable}`, x.data); return x; }); @@ -181,7 +181,7 @@ async function QueryBillData(socket, billids) { const result = await client .setHeaders({ Authorization: `Bearer ${socket.handshake.auth.token}` }) .request(queries.GET_PBS_AP_ALLOCATIONS, { billids: billids }); - CdkBase.createLogEvent(socket, "TRACE", `Bill data query result ${JSON.stringify(result, null, 2)}`); + CdkBase.createLogEvent(socket, "SILLY", `Bill data query result ${JSON.stringify(result, null, 2)}`); return result; } diff --git a/server/accounting/pbs/pbs-job-export.js b/server/accounting/pbs/pbs-job-export.js index 34db63a17..c38560293 100644 --- a/server/accounting/pbs/pbs-job-export.js +++ b/server/accounting/pbs/pbs-job-export.js @@ -28,7 +28,7 @@ axios.interceptors.request.use((x) => { } | ${JSON.stringify(x.data)} | ${JSON.stringify(headers)}`; //console.log(printable); - CdkBase.createJsonEvent(socket, "TRACE", `Raw Request: ${printable}`, x.data); + CdkBase.createJsonEvent(socket, "SILLY", `Raw Request: ${printable}`, x.data); return x; }); @@ -38,7 +38,7 @@ axios.interceptors.response.use((x) => { const printable = `${new Date()} | Response: ${x.status} | ${JSON.stringify(x.data)}`; //console.log(printable); - CdkBase.createJsonEvent(socket, "TRACE", `Raw Response: ${printable}`, x.data); + CdkBase.createJsonEvent(socket, "SILLY", `Raw Response: ${printable}`, x.data); return x; }); @@ -118,7 +118,7 @@ async function CheckForErrors(socket, response) { CdkBase.createLogEvent(socket, "DEBUG", `Successful response from DMS. ${response.Message || ""}`); } else { CdkBase.createLogEvent(socket, "ERROR", `Error received from DMS: ${response.Message}`); - CdkBase.createLogEvent(socket, "TRACE", `Error received from DMS: ${JSON.stringify(response)}`); + CdkBase.createLogEvent(socket, "SILLY", `Error received from DMS: ${JSON.stringify(response)}`); } } @@ -130,7 +130,7 @@ async function QueryJobData(socket, jobid) { const result = await client .setHeaders({ Authorization: `Bearer ${socket.handshake.auth.token}` }) .request(queries.QUERY_JOBS_FOR_PBS_EXPORT, { id: jobid }); - CdkBase.createLogEvent(socket, "TRACE", `Job data query result ${JSON.stringify(result, null, 2)}`); + CdkBase.createLogEvent(socket, "SILLY", `Job data query result ${JSON.stringify(result, null, 2)}`); return result.jobs_by_pk; } @@ -611,7 +611,7 @@ async function InsertFailedExportLog(socket, error) { bodyshopid: socket.JobData.bodyshop.id, jobid: socket.JobData.id, successful: false, - message: [error], + message: JSON.stringify(error), useremail: socket.user.email } }); diff --git a/server/accounting/qb-receivables-lines.js b/server/accounting/qb-receivables-lines.js index 8476ce6d4..043dc8e89 100644 --- a/server/accounting/qb-receivables-lines.js +++ b/server/accounting/qb-receivables-lines.js @@ -63,7 +63,7 @@ exports.default = function ({ bodyshop, jobs_by_pk, qbo = false, items, taxCodes ); if (!account) { - logger.log("qbxml-receivables-no-account", "warn", null, jobline.id, null); + logger.log("qbxml-receivables-no-account", "warn", null, jobline.id); throw new Error( `A matching account does not exist for the part allocation. Center: ${jobline.profitcenter_part}` ); diff --git a/server/accounting/qbo/qbo-payables.js b/server/accounting/qbo/qbo-payables.js index 9b5cf2558..196520de0 100644 --- a/server/accounting/qbo/qbo-payables.js +++ b/server/accounting/qbo/qbo-payables.js @@ -45,7 +45,7 @@ exports.default = async (req, res) => { const BearerToken = req.BearerToken; const client = req.userGraphQLClient; - logger.log("qbo-payable-create", "DEBUG", req.user.email, billsToQuery); + logger.log("qbo-payable-create", "DEBUG", req.user.email, null, { billsToQuery }); const result = await client .setHeaders({ Authorization: BearerToken }) @@ -91,6 +91,12 @@ exports.default = async (req, res) => { ret.push({ billid: bill.id, success: true }); } catch (error) { + logger.log("qbo-paybles-create-error", "ERROR", req.user.email, null, { + error: + (error && error.authResponse && error.authResponse.body) || + error.response?.data?.Fault?.Error.map((e) => e.Detail).join(", ") || + (error && error.message) + }); ret.push({ billid: bill.id, success: false, @@ -122,7 +128,7 @@ exports.default = async (req, res) => { res.status(200).json(ret); } catch (error) { //console.log(error); - logger.log("qbo-payable-create-error", "ERROR", req.user.email, { + logger.log("qbo-payable-create-error", "ERROR", req.user.email, null, { error: error.message, stack: error.stack }); diff --git a/server/accounting/qbo/qbo-payments.js b/server/accounting/qbo/qbo-payments.js index b5d884f02..6426bfa4f 100644 --- a/server/accounting/qbo/qbo-payments.js +++ b/server/accounting/qbo/qbo-payments.js @@ -49,7 +49,7 @@ exports.default = async (req, res) => { const BearerToken = req.BearerToken; const client = req.userGraphQLClient; - logger.log("qbo-payment-create", "DEBUG", req.user.email, paymentsToQuery); + logger.log("qbo-payment-create", "DEBUG", req.user.email, null, { paymentsToQuery }); const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PAYMENTS_FOR_EXPORT, { payments: paymentsToQuery @@ -152,7 +152,7 @@ exports.default = async (req, res) => { ret.push({ paymentid: payment.id, success: true }); } catch (error) { - logger.log("qbo-payment-create-error", "ERROR", req.user.email, { + logger.log("qbo-payment-create-error", "ERROR", req.user.email, null, { error: (error && error.authResponse && error.authResponse.body) || (error && error.message) }); //Add the export log error. @@ -183,7 +183,7 @@ exports.default = async (req, res) => { res.status(200).json(ret); } catch (error) { //console.log(error); - logger.log("qbo-payment-create-error", "ERROR", req.user.email, { + logger.log("qbo-payment-create-error", "ERROR", req.user.email, null, { error: error.message, stack: error.stack }); diff --git a/server/accounting/qbxml/qbxml-payables.js b/server/accounting/qbxml/qbxml-payables.js index 9ffe4513f..ef588e0fb 100644 --- a/server/accounting/qbxml/qbxml-payables.js +++ b/server/accounting/qbxml/qbxml-payables.js @@ -42,9 +42,10 @@ exports.default = async (req, res) => { //For each invoice. res.status(200).json(QbXmlToExecute); } catch (error) { - logger.log("qbxml-payable-error", "ERROR", req.user.email, req.body.billsToQuery, { - error: error.message, - stack: error.stack + logger.log("qbxml-payable-error", "ERROR", req?.user?.email, null, { + billsToQuery: req?.body?.billsToQuery, + error: error?.message, + stack: error?.stack }); res.status(400).send(JSON.stringify(error)); } diff --git a/server/accounting/qbxml/qbxml-payments.js b/server/accounting/qbxml/qbxml-payments.js index cdc08d2e4..68e7b8778 100644 --- a/server/accounting/qbxml/qbxml-payments.js +++ b/server/accounting/qbxml/qbxml-payments.js @@ -21,7 +21,9 @@ exports.default = async (req, res) => { const client = req.userGraphQLClient; try { - logger.log("qbxml-payments-create", "DEBUG", req.user.email, req.body.paymentsToQuery, null); + logger.log("qbxml-payments-create", "DEBUG", req?.user?.email, null, { + paymentsToQuery: req.body?.paymentsToQuery + }); const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PAYMENTS_FOR_EXPORT, { payments: paymentsToQuery @@ -62,7 +64,8 @@ exports.default = async (req, res) => { res.status(200).json(QbXmlToExecute); } catch (error) { - logger.log("qbxml-payments-error", "error", req.user.email, req.body.paymentsToQuery, { + logger.log("qbxml-payments-error", "error", req?.user?.email, null, { + paymentsToQuery: req.body?.paymentsToQuery, error: error.message, stack: error.stack }); diff --git a/server/accounting/qbxml/qbxml-receivables.js b/server/accounting/qbxml/qbxml-receivables.js index 0e8a4f20d..c4353704e 100644 --- a/server/accounting/qbxml/qbxml-receivables.js +++ b/server/accounting/qbxml/qbxml-receivables.js @@ -23,7 +23,9 @@ exports.default = async (req, res) => { const client = req.userGraphQLClient; try { - logger.log("qbxml-receivables-create", "DEBUG", req.user.email, req.body.jobIds, null); + logger.log("qbxml-receivables-create", "DEBUG", req?.user?.email, null, { + jobIds: req?.body?.jobIds + }); const result = await client .setHeaders({ Authorization: BearerToken }) @@ -74,7 +76,8 @@ exports.default = async (req, res) => { res.status(200).json(QbXmlToExecute); } catch (error) { - logger.log("qbxml-receivables-error", "error", req.user.email, req.body.jobIds, { + logger.log("qbxml-receivables-error", "error", req?.user?.email, null, { + jobIds: req.body?.jobIds, error: error.message, stack: error.stack }); diff --git a/server/cdk/cdk-calculate-allocations.js b/server/cdk/cdk-calculate-allocations.js index afe9549ea..f78c48b91 100644 --- a/server/cdk/cdk-calculate-allocations.js +++ b/server/cdk/cdk-calculate-allocations.js @@ -42,7 +42,7 @@ async function QueryJobData(connectionData, token, jobid) { CdkBase.createLogEvent(connectionData, "DEBUG", `Querying job data for id ${jobid}`); const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {}); const result = await client.setHeaders({ Authorization: token }).request(queries.GET_CDK_ALLOCATIONS, { id: jobid }); - CdkBase.createLogEvent(connectionData, "TRACE", `Job data query result ${JSON.stringify(result, null, 2)}`); + CdkBase.createLogEvent(connectionData, "SILLY", `Job data query result ${JSON.stringify(result, null, 2)}`); return result.jobs_by_pk; } @@ -373,13 +373,19 @@ function calculateAllocations(connectionData, job) { }); //profile level adjustments for labor and materials Object.keys(job.job_totals.rates).forEach((key) => { - if (job.job_totals.rates[key] && job.job_totals.rates[key].adjustment && Dinero(job.job_totals.rates[key].adjustment).isZero() === false) { + if ( + job.job_totals.rates[key] && + job.job_totals.rates[key].adjustment && + Dinero(job.job_totals.rates[key].adjustment).isZero() === false + ) { const accountName = selectedDmsAllocationConfig.profits[key.toUpperCase()]; const otherAccount = bodyshop.md_responsibility_centers.profits.find((c) => c.name === accountName); if (otherAccount) { if (!profitCenterHash[accountName]) profitCenterHash[accountName] = Dinero(); - profitCenterHash[accountName] = profitCenterHash[accountName].add(Dinero(job.job_totals.rates[key].adjustments)); + profitCenterHash[accountName] = profitCenterHash[accountName].add( + Dinero(job.job_totals.rates[key].adjustments) + ); } else { CdkBase.createLogEvent( connectionData, diff --git a/server/cdk/cdk-get-makes.js b/server/cdk/cdk-get-makes.js index 06ccc53e2..90accf64e 100644 --- a/server/cdk/cdk-get-makes.js +++ b/server/cdk/cdk-get-makes.js @@ -39,8 +39,8 @@ exports.default = async function ReloadCdkMakes(req, res) { const deleteResult = await client .setHeaders({ Authorization: BearerToken }) .request(queries.DELETE_ALL_DMS_VEHICLES, {}); - console.log("🚀 ~ file: cdk-get-makes.js ~ line 53 ~ deleteResult", deleteResult); + // logger.logger.debug("🚀 ~ file: cdk-get-makes.js ~ line 53 ~ deleteResult", { deleteResult }); //Insert the new ones. const insertResult = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_DMS_VEHICLES, { @@ -59,6 +59,7 @@ exports.default = async function ReloadCdkMakes(req, res) { cdk_dealerid, count: newList.length }); + res.sendStatus(200); } catch (error) { logger.log("cdk-replace-makes-models-error", "ERROR", req.user.email, null, { diff --git a/server/cdk/cdk-job-export.js b/server/cdk/cdk-job-export.js index 222a2b485..90001fa28 100644 --- a/server/cdk/cdk-job-export.js +++ b/server/cdk/cdk-job-export.js @@ -151,7 +151,7 @@ async function QueryJobData(socket, jobid) { const result = await client .setHeaders({ Authorization: `Bearer ${socket.handshake.auth.token}` }) .request(queries.QUERY_JOBS_FOR_CDK_EXPORT, { id: jobid }); - CdkBase.createLogEvent(socket, "TRACE", `Job data query result ${JSON.stringify(result, null, 2)}`); + CdkBase.createLogEvent(socket, "SILLY", `Job data query result ${JSON.stringify(result, null, 2)}`); return result.jobs_by_pk; } @@ -171,7 +171,7 @@ async function CalculateDmsVid(socket, JobData) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientVehicleInsertUpdate.getVehIdsAsync Result ${JSON.stringify(result, null, 2)}` ); CheckCdkResponseForError(socket, soapResponseVehicleInsertUpdate); @@ -214,7 +214,7 @@ async function QueryDmsVehicleById(socket, JobData, DMSVid) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientVehicleInsertUpdate.readAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientVehicleInsertUpdate.readAsync response.`); @@ -246,7 +246,7 @@ async function QueryDmsCustomerById(socket, JobData, CustomerId) { CdkBase.createXmlEvent(socket, rawResponse, `soapClientCustomerInsertUpdate.readAsync response.`); CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientCustomerInsertUpdate.readAsync Result ${JSON.stringify(result, null, 2)}` ); CheckCdkResponseForError(socket, soapResponseCustomerInsertUpdate); @@ -295,7 +295,7 @@ async function QueryDmsCustomerByName(socket, JobData) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientCustomerSearch.executeSearchBulkAsync Result ${JSON.stringify(result, null, 2)}` ); CheckCdkResponseForError(socket, soapResponseCustomerSearch); @@ -337,7 +337,7 @@ async function GenerateDmsCustomerNumber(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientCustomerInsertUpdate.getCustomerNumberAsync Result ${JSON.stringify(result, null, 2)}` ); CheckCdkResponseForError(socket, soapResponseCustomerInsertUpdate); @@ -425,7 +425,7 @@ async function InsertDmsCustomer(socket, newCustomerNumber) { CdkBase.createXmlEvent(socket, rawResponse, `soapClientCustomerInsertUpdate.insertAsync response.`); CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientCustomerInsertUpdate.insertAsync Result ${JSON.stringify(result, null, 2)}` ); CheckCdkResponseForError(socket, soapResponseCustomerInsertUpdate); @@ -505,7 +505,7 @@ async function InsertDmsVehicle(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientVehicleInsertUpdate.insertAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientVehicleInsertUpdate.insertAsync response.`); @@ -611,7 +611,7 @@ async function UpdateDmsVehicle(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "DEBUG", `soapClientVehicleInsertUpdate.updateAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientVehicleInsertUpdate.updateAsync response.`); @@ -650,7 +650,7 @@ async function InsertServiceVehicleHistory(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientServiceHistoryInsert.serviceHistoryHeaderInsert Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientServiceHistoryInsert.serviceHistoryHeaderInsert response.`); @@ -690,7 +690,7 @@ async function InsertDmsStartWip(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientAccountingGLInsertUpdate.doStartWIPAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientAccountingGLInsertUpdate.doStartWIPAsync response.`); @@ -721,7 +721,7 @@ async function InsertDmsBatchWip(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientAccountingGLInsertUpdate.doTransBatchWIPAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientAccountingGLInsertUpdate.doTransBatchWIPAsync response.`); @@ -885,7 +885,7 @@ async function PostDmsBatchWip(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientAccountingGLInsertUpdate.doPostBatchWIPAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientAccountingGLInsertUpdate.doPostBatchWIPAsync response.`); @@ -914,7 +914,7 @@ async function QueryDmsErrWip(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "DEBUG", `soapClientAccountingGLInsertUpdate.doErrWIPAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientAccountingGLInsertUpdate.doErrWIPAsync response.`); @@ -945,7 +945,7 @@ async function DeleteDmsWip(socket) { CdkBase.createLogEvent( socket, - "TRACE", + "SILLY", `soapClientAccountingGLInsertUpdate.doPostBatchWIPAsync Result ${JSON.stringify(result, null, 2)}` ); CdkBase.createXmlEvent(socket, rawResponse, `soapClientAccountingGLInsertUpdate.doPostBatchWIPAsync response.`); @@ -995,7 +995,7 @@ async function InsertFailedExportLog(socket, error) { bodyshopid: socket.JobData.bodyshop.id, jobid: socket.JobData.id, successful: false, - message: [error], + message: JSON.stringify(error), useremail: socket.user.email } }); diff --git a/server/cdk/cdk-wsdl.js b/server/cdk/cdk-wsdl.js index 032d780c2..8926e98ae 100644 --- a/server/cdk/cdk-wsdl.js +++ b/server/cdk/cdk-wsdl.js @@ -20,7 +20,7 @@ function CheckCdkResponseForError(socket, soapResponse) { //The response was null, this might be ok, it might not. CdkBase.createLogEvent( socket, - "WARNING", + "warn", `Warning detected in CDK Response - it appears to be null. Stack: ${new Error().stack}` ); return; diff --git a/server/data/chatter.js b/server/data/chatter.js index 24173a872..e610f9791 100644 --- a/server/data/chatter.js +++ b/server/data/chatter.js @@ -160,7 +160,8 @@ async function getPrivateKey() { try { const { SecretString, SecretBinary } = await client.send(command); if (SecretString || SecretBinary) logger.log("chatter-retrieved-private-key", "DEBUG", "api", null, null); - return SecretString || Buffer.from(SecretBinary, "base64").toString("ascii"); + const chatterPrivateKey = SecretString ? JSON.parse(SecretString) : JSON.parse(Buffer.from(SecretBinary, "base64").toString("ascii")); + return chatterPrivateKey.private_key; } catch (error) { logger.log("chatter-get-private-key", "ERROR", "api", null, error); throw err; diff --git a/server/email/mailer.js b/server/email/mailer.js index 6503e1f8c..654f56cb3 100644 --- a/server/email/mailer.js +++ b/server/email/mailer.js @@ -3,6 +3,7 @@ const { defaultProvider } = require("@aws-sdk/credential-provider-node"); const { default: InstanceManager } = require("../utils/instanceMgr"); const aws = require("@aws-sdk/client-ses"); const nodemailer = require("nodemailer"); +const logger = require("../utils/logger"); const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME); @@ -19,7 +20,7 @@ const sesConfig = { if (isLocal) { sesConfig.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`; - console.log(`SES Mailer set to LocalStack end point: ${sesConfig.endpoint}`); + logger.logger.debug(`SES Mailer set to LocalStack end point: ${sesConfig.endpoint}`); } const ses = new aws.SES(sesConfig); diff --git a/server/email/sendemail.js b/server/email/sendemail.js index 97e8f74ad..9d103ef81 100644 --- a/server/email/sendemail.js +++ b/server/email/sendemail.js @@ -21,7 +21,7 @@ const getImage = async (imageUrl) => { // Log the email in the database const logEmail = async (req, email) => { try { - const insertresult = await client.request(queries.INSERT_EMAIL_AUDIT, { + await client.request(queries.INSERT_EMAIL_AUDIT, { email: { to: email.to, cc: email.cc, @@ -34,13 +34,13 @@ const logEmail = async (req, email) => { status: "Sent" } }); - console.log(insertresult); } catch (error) { - logger.log("email-log-error", "error", req.user.email, null, { + logger.log("email-log-error", "error", req?.user?.email, null, { from: `${req.body.from.name} <${req.body.from.address}>`, - to: req.body.to, - cc: req.body.cc, - subject: req.body.subject + to: req?.body?.to, + cc: req?.body?.cc, + subject: req?.body?.subject, + email // info, }); } @@ -70,12 +70,11 @@ const sendServerEmail = async ({ subject, text }) => { } }, (err, info) => { - console.log(err || info); + logger.log("server-email-failure", err ? "error" : "debug", null, null, { message: err || info }); } ); } catch (error) { - console.log(error); - logger.log("server-email-failure", "error", null, null, error); + logger.log("server-email-failure", "error", null, null, { error }); } }; @@ -88,8 +87,7 @@ const sendProManagerWelcomeEmail = async ({ to, subject, html }) => { html }); } catch (error) { - console.log(error); - logger.log("server-email-failure", "error", null, null, error); + logger.log("server-email-failure", "error", null, null, { error }); } }; @@ -108,12 +106,12 @@ const sendTaskEmail = async ({ to, subject, type = "text", html, text, attachmen attachments: attachments || null }, (err, info) => { - console.log(err || info); + // (message, type, user, record, meta + logger.log("server-email", err ? "error" : "debug", null, null, { message: err || info }); } ); } catch (error) { - console.log(error); - logger.log("server-email-failure", "error", null, null, error); + logger.log("server-email-failure", "error", null, null, { error }); } }; @@ -184,9 +182,8 @@ const sendEmail = async (req, res) => { } }, (err, info) => { - console.log(err || info); if (info) { - logger.log("send-email-success", "DEBUG", req.user.email, null, { + logger.log("send-email-success", "DEBUG", req?.user?.email, null, { from: `${req.body.from.name} <${req.body.from.address}>`, replyTo: req.body.ReplyTo.Email, to: req.body.to, @@ -205,7 +202,7 @@ const sendEmail = async (req, res) => { success: true //response: info }); } else { - logger.log("send-email-failure", "ERROR", req.user.email, null, { + logger.log("send-email-failure", "ERROR", req?.user?.email, null, { from: `${req.body.from.name} <${req.body.from.address}>`, replyTo: req.body.ReplyTo.Email, to: req.body.to, @@ -290,7 +287,9 @@ ${body.bounce?.bouncedRecipients.map( ` }, (err, info) => { - console.log("***", err || info); + logger.log("sns-error", err ? "error" : "debug", "api", null, { + message: err ? JSON.stringify(error) : info + }); } ); } diff --git a/server/email/tasksEmails.js b/server/email/tasksEmails.js index ab25343e0..c05185b6b 100644 --- a/server/email/tasksEmails.js +++ b/server/email/tasksEmails.js @@ -18,10 +18,10 @@ const tasksEmailQueue = taskEmailQueue(); const tasksEmailQueueCleanup = async () => { try { // Example async operation - console.log("Performing Tasks Email Reminder process cleanup..."); + // console.log("Performing Tasks Email Reminder process cleanup..."); await new Promise((resolve) => tasksEmailQueue.destroy(() => resolve())); } catch (err) { - console.error("Tasks Email Reminder process cleanup failed:", err); + // console.error("Tasks Email Reminder process cleanup failed:", err); } }; diff --git a/server/email/tasksEmailsQueue.js b/server/email/tasksEmailsQueue.js index dc004abc1..c1828b858 100644 --- a/server/email/tasksEmailsQueue.js +++ b/server/email/tasksEmailsQueue.js @@ -10,8 +10,9 @@ const logger = require("../utils/logger"); const taskEmailQueue = () => new Queue( (taskIds, cb) => { - console.log("Processing reminds for taskIds: ", taskIds.join(", ")); - + logger.log("Processing reminds for taskIds: ", "silly", null, null, { + taskIds: taskIds?.join(", ") + }); // Set the remind_at_sent to the current time. const now = moment().toISOString(); diff --git a/server/firebase/firebase-handler.js b/server/firebase/firebase-handler.js index ec20114f9..e24c86174 100644 --- a/server/firebase/firebase-handler.js +++ b/server/firebase/firebase-handler.js @@ -269,10 +269,14 @@ const sendNotification = async (req, res) => { }) .then((response) => { // Response is a message ID string. - console.log("Successfully sent message:", response); + logger.log("Successfully sent message:", "debug", req?.user?.email, null, { + response + }); }) .catch((error) => { - console.log("Error sending message:", error); + logger.log("Successfully sent message:", "error", req?.user?.email, null, { + error + }); }); res.sendStatus(200); diff --git a/server/ioevent/ioevent.js b/server/ioevent/ioevent.js index 5d4223f73..6ca37f27e 100644 --- a/server/ioevent/ioevent.js +++ b/server/ioevent/ioevent.js @@ -35,7 +35,7 @@ exports.default = async (req, res) => { res.sendStatus(200); } catch (error) { - logger.log("ioevent-error", "trace", user, null, { + logger.log("ioevent-error", "silly", user, null, { operationname: operationName, time, dbevent, diff --git a/server/job/job-costing.js b/server/job/job-costing.js index d55af58a5..6c107ce8d 100644 --- a/server/job/job-costing.js +++ b/server/job/job-costing.js @@ -19,7 +19,8 @@ async function JobCosting(req, res) { const BearerToken = req.BearerToken; const client = req.userGraphQLClient; - logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null); + //Uncomment for further testing + // logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null); try { const resp = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_JOB_COSTING_DETAILS, { @@ -46,7 +47,10 @@ async function JobCostingMulti(req, res) { const BearerToken = req.BearerToken; const client = req.userGraphQLClient; - logger.log("job-costing-multi-start", "DEBUG", req.user.email, jobids, null); + //Uncomment for further testing + // logger.log("job-costing-multi-start", "DEBUG", req?.user?.email, null, { + // jobids + // }); try { const resp = await client @@ -244,7 +248,8 @@ async function JobCostingMulti(req, res) { data: ret }); } catch (error) { - logger.log("job-costing-multi-error", "ERROR", req.user.email, [jobids], { + logger.log("job-costing-multi-error", "ERROR", req?.user?.email, null, { + jobids, message: error.message, stack: error.stack }); @@ -282,7 +287,13 @@ function GenerateCostingData(job) { if (val.mod_lbr_ty) { const laborProfitCenter = val.profitcenter_labor || defaultProfits[val.mod_lbr_ty] || "Unknown"; - if (laborProfitCenter === "Unknown") console.log("Unknown type", val.line_desc, val.mod_lbr_ty); + //Uncomment for further testing + // if (laborProfitCenter === "Unknown") { + // logger.log("job-costing unknown type", "debug", null, null, { + // line_desc: val.line_desc, + // mod_lbr_ty: val.mod_lbr_ty + // }); + // } const rateName = `rate_${(val.mod_lbr_ty || "").toLowerCase()}`; @@ -349,10 +360,22 @@ function GenerateCostingData(job) { if (val.part_type && val.part_type !== "PAE" && val.part_type !== "PAS" && val.part_type !== "PASL") { const partsProfitCenter = val.profitcenter_part || defaultProfits[val.part_type] || "Unknown"; - if (partsProfitCenter === "Unknown") console.log("Unknown type", val.line_desc, val.part_type); + //Uncomment for further testing + // if (partsProfitCenter === "Unknown" || !partsProfitCenter) { + // logger.log( + // partsProfitCenter === "Unknown" + // ? "job-costing unknown type" + // : "Unknown cost/profit center mapping for parts.", + // "debug", + // null, + // null, + // { + // line_desc: val.line_desc, + // part_type: val.part_type + // } + // ); + // } - if (!partsProfitCenter) - console.log("Unknown cost/profit center mapping for parts.", val.line_desc, val.part_type); let partsAmount = Dinero({ amount: val.act_price_before_ppc ? Math.round(val.act_price_before_ppc * 100) @@ -409,10 +432,22 @@ function GenerateCostingData(job) { if (val.part_type && val.part_type !== "PAE" && (val.part_type === "PAS" || val.part_type === "PASL")) { const partsProfitCenter = val.profitcenter_part || defaultProfits[val.part_type] || "Unknown"; - if (partsProfitCenter === "Unknown") console.log("Unknown type", val.line_desc, val.part_type); + //Uncomment for further testing + // if (partsProfitCenter === "Unknown" || !partsProfitCenter) { + // logger.log( + // partsProfitCenter === "Unknown" + // ? "job-costing unknown type" + // : "job-costing Unknown cost/profit center mapping for sublet", + // "debug", + // null, + // null, + // { + // line_desc: val.line_desc, + // part_type: val.part_type + // } + // ); + // } - if (!partsProfitCenter) - console.log("Unknown cost/profit center mapping for sublet.", val.line_desc, val.part_type); const partsAmount = Dinero({ amount: Math.round((val.act_price || 0) * 100) }) @@ -443,9 +478,14 @@ function GenerateCostingData(job) { //If so, use it, otherwise try to use the same from the auto-allocate logic in IO app jobs-close-auto-allocate. const partsProfitCenter = val.profitcenter_part || getAdditionalCostCenter(val, defaultProfits) || "Unknown"; - if (partsProfitCenter === "Unknown") { - console.log("Unknown type", val.line_desc, val.part_type); - } + //Uncomment for further testing + // if (partsProfitCenter === "Unknown") { + // logger.log("job-costing unknown type", "debug", null, null, { + // line_desc: val.line_desc, + // part_type: val.part_type + // }); + // } + const partsAmount = Dinero({ amount: Math.round((val.act_price || 0) * 100) }) diff --git a/server/job/job-totals-USA.js b/server/job/job-totals-USA.js index d65ed80ef..f673b5728 100644 --- a/server/job/job-totals-USA.js +++ b/server/job/job-totals-USA.js @@ -26,7 +26,7 @@ exports.totalsSsu = async function (req, res) { const BearerToken = req.BearerToken; const client = req.userGraphQLClient; - logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, id, null); + logger.log("job-totals-ssu-USA", "DEBUG", req?.user?.email, id); try { const job = await client.setHeaders({ Authorization: BearerToken }).request(queries.GET_JOB_BY_PK, { @@ -47,7 +47,7 @@ exports.totalsSsu = async function (req, res) { res.status(200).send(); } catch (error) { - logger.log("job-totals-ssu-USA-error", "ERROR", req.user.email, id, { + logger.log("job-totals-ssu-USA-error", "ERROR", req?.user?.email, id, { jobid: id, error }); @@ -123,12 +123,10 @@ async function Totals(req, res) { const logger = req.logger; const client = req.userGraphQLClient; - logger.log("job-totals-USA", "DEBUG", req.user.email, job.id, { + logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, job.id, { jobid: job.id }); - logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, id, null); - await AutoAddAtsIfRequired({ job, client }); try { @@ -1000,7 +998,9 @@ function CalculateTaxesTotals(job, otherTotals) { } } } catch (error) { - console.error("Key with issue", key); + logger.log("job-totals-USA Key with issue", "error", null, null, { + key + }); } }); @@ -1065,7 +1065,9 @@ function CalculateTaxesTotals(job, otherTotals) { totalTaxByTier[taxTierKey] = totalTaxByTier[taxTierKey].add(taxAmountToAdd); } } catch (error) { - console.error("PFP Calculation error", error); + logger.log("job-totals-USA - PFP Calculation Error", "error", null, null, { + error + }); } }); diff --git a/server/job/job-updated.js b/server/job/job-updated.js index f218f83bc..89637a6f8 100644 --- a/server/job/job-updated.js +++ b/server/job/job-updated.js @@ -2,8 +2,16 @@ const { isObject } = require("lodash"); const jobUpdated = async (req, res) => { const { ioRedis, logger, ioHelpers } = req; + // Old Way + if (req?.body?.event?.data?.new || isObject(req?.body?.event?.data?.new)) { + const updatedJob = req.body.event.data.new; + const bodyshopID = updatedJob.shopid; + ioRedis.to(ioHelpers.getBodyshopRoom(bodyshopID)).emit("production-job-updated", updatedJob); + return res.json({ message: "Job updated and event emitted" }); + } - if (!req?.body?.event?.data?.new || !isObject(req?.body?.event?.data?.new)) { + // New way + if (!req?.body?.data || !isObject(req.body.data)) { logger.log("job-update-error", "ERROR", req.user?.email, null, { message: `Malformed Job Update request sent from Hasura`, body: req?.body @@ -15,12 +23,14 @@ const jobUpdated = async (req, res) => { }); } - logger.log("job-update", "INFO", req.user?.email, null, { - message: `Job updated event received from Hasura`, - jobid: req?.body?.event?.data?.new?.id - }); + // Uncomment for further testing + // You can also test this using SocketIOAdmin + // logger.log("job-update", "DEBUG", req.user?.email, null, { + // message: `Job updated event received from Hasura`, + // jobid: req?.body?.event?.data?.new?.id + // }); - const updatedJob = req.body.event.data.new; + const updatedJob = req.body.data; const bodyshopID = updatedJob.shopid; // Emit the job-updated event only to the room corresponding to the bodyshop diff --git a/server/middleware/validateFirebaseIdTokenMiddleware.js b/server/middleware/validateFirebaseIdTokenMiddleware.js index d76cc8019..83ab38eaf 100644 --- a/server/middleware/validateFirebaseIdTokenMiddleware.js +++ b/server/middleware/validateFirebaseIdTokenMiddleware.js @@ -16,7 +16,11 @@ const validateFirebaseIdTokenMiddleware = async (req, res, next) => { (!req.headers.authorization || !req.headers.authorization.startsWith("Bearer ")) && !(req.cookies && req.cookies.__session) ) { - console.error("Unauthorized attempt. No authorization provided."); + logger.log("api-authorization-call", "warn", req?.user?.email, null, { + type: "unauthorized", + path: req.path, + body: req.body + }); return res.status(403).send("Unauthorized"); } @@ -32,10 +36,10 @@ const validateFirebaseIdTokenMiddleware = async (req, res, next) => { idToken = req.cookies.__session; } else { // No cookie - console.error("Unauthorized attempt. No cookie provided."); - logger.log("api-unauthorized-call", "WARN", null, null, { - req, - type: "no-cookie" + logger.log("api-unauthorized-call", "warn", null, null, { + type: "unauthorized", + path: req.path, + body: req.body }); return res.status(403).send("Unauthorized"); @@ -47,11 +51,11 @@ const validateFirebaseIdTokenMiddleware = async (req, res, next) => { req.user = decodedIdToken; next(); } catch (error) { - logger.log("api-unauthorized-call", "WARN", null, null, { + logger.log("api-unauthorized-call", "warn", null, null, { path: req.path, body: req.body, - type: "unauthroized", + type: "unauthorized", ...error }); diff --git a/server/mixdata/mixdata.js b/server/mixdata/mixdata.js index 0dca3705b..919726efd 100644 --- a/server/mixdata/mixdata.js +++ b/server/mixdata/mixdata.js @@ -9,11 +9,9 @@ require("dotenv").config({ }); exports.mixdataUpload = async (req, res) => { - const { bodyshopid } = req.body; - const client = req.userGraphQLClient; - logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null); + logger.log("job-mixdata-upload", "DEBUG", req?.user?.email, null, null); try { for (const element of req.files) { @@ -23,7 +21,7 @@ exports.mixdataUpload = async (req, res) => { explicitArray: false }); - logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest); + logger.log("job-mixdata-parse", "DEBUG", req?.user?.email, null, { inboundRequest }); const ScaleType = DetermineScaleType(inboundRequest); const RoNumbersFromInboundRequest = GetListOfRos(inboundRequest, ScaleType); @@ -61,7 +59,7 @@ exports.mixdataUpload = async (req, res) => { res.status(500).json(error); logger.log("job-mixdata-upload-error", "ERROR", null, null, { error: error.message, - ...error + stack: error.stack }); } }; @@ -70,7 +68,7 @@ function DetermineScaleType(inboundRequest) { const ret = { type: "", verson: 0 }; //PPG Mix Data - if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") { + if (inboundRequest?.PPG?.Header?.Protocol?.Name === "PPG") { return { type: inboundRequest.PPG.Header.Protocol.Name, company: "PPG", @@ -80,13 +78,13 @@ function DetermineScaleType(inboundRequest) { } function GetListOfRos(inboundRequest, ScaleType) { - if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") { + if (ScaleType?.company === "PPG" && ScaleType?.version === "1.3.0") { return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => r.RONumber); } } function GenerateMixDataArray(inboundRequest, ScaleType, jobHash) { - if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") { + if (ScaleType?.company === "PPG" && ScaleType?.version === "1.3.0") { return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => { return { jobid: jobHash[r.RONumber]?.jobid, diff --git a/server/payroll/pay-all.js b/server/payroll/pay-all.js index 9288e256a..03c6fbee8 100644 --- a/server/payroll/pay-all.js +++ b/server/payroll/pay-all.js @@ -45,12 +45,12 @@ exports.payall = async function (req, res) { const path = diffParser(diff); if (diff.op === "add") { - console.log(Object.keys(diff.val)); + // console.log(Object.keys(diff.val)); if (typeof diff.val === "object" && Object.keys(diff.val).length > 1) { //Multiple values to add. Object.keys(diff.val).forEach((key) => { - console.log("Hours", diff.val[key][Object.keys(diff.val[key])[0]]); - console.log("Rate", Object.keys(diff.val[key])[0]); + // console.log("Hours", diff.val[key][Object.keys(diff.val[key])[0]]); + // console.log("Rate", Object.keys(diff.val[key])[0]); ticketsToInsert.push({ task_name: "Pay All", jobid: job.id, diff --git a/server/scheduling/scheduling-job.js b/server/scheduling/scheduling-job.js index a005a8d72..11b881f70 100644 --- a/server/scheduling/scheduling-job.js +++ b/server/scheduling/scheduling-job.js @@ -49,7 +49,7 @@ exports.job = async (req, res) => { if (bucketId) { load.productionTotal[bucketId].count = load.productionTotal[bucketId].count + 1; } else { - console.log("Uh oh, this job doesn't fit in a bucket!", item); + // console.log("Uh oh, this job doesn't fit in a bucket!", item); } }); @@ -254,7 +254,7 @@ const CalculateLoad = (currentLoad, buckets, jobsIn, jobsOut) => { if (bucketId) { newLoad[bucketId].count = newLoad[bucketId].count + 1; } else { - console.log("[Util Arr Job]Uh oh, this job doesn't fit in a bucket!", job); + // console.log("[Util Arr Job]Uh oh, this job doesn't fit in a bucket!", job); } }); @@ -263,10 +263,10 @@ const CalculateLoad = (currentLoad, buckets, jobsIn, jobsOut) => { if (bucketId) { newLoad[bucketId].count = newLoad[bucketId].count - 1; if (newLoad[bucketId].count < 0) { - console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job); + // console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job); } } else { - console.log("[Util Out Job]Uh oh, this job doesn't fit in a bucket!", job); + // console.log("[Util Out Job]Uh oh, this job doesn't fit in a bucket!", job); } }); diff --git a/server/stripe/payment.js b/server/stripe/payment.js index 3f79c78d0..7b7bc1dc8 100644 --- a/server/stripe/payment.js +++ b/server/stripe/payment.js @@ -39,7 +39,7 @@ const processor = async (req, res) => { } }); } catch (error) { - console.log("error", error); + // console.log("error", error); res.status(400).send(error); } }; diff --git a/server/tasks/tasks.js b/server/tasks/tasks.js index d59d834eb..2ae944e60 100644 --- a/server/tasks/tasks.js +++ b/server/tasks/tasks.js @@ -6,10 +6,11 @@ const client = require("../graphql-client/graphql-client").client; const emailer = require("../email/sendemail"); const moment = require("moment-timezone"); const converter = require("json-2-csv"); +const logger = require("../utils/logger"); exports.taskHandler = async (req, res) => { try { - const { bodyshopid, query, variables, text, to, subject, timezone } = req.body; + const { query, variables, text, to, subject, timezone } = req.body; //Check the variables to see if they are an object. Object.keys(variables).forEach((key) => { @@ -32,8 +33,10 @@ exports.taskHandler = async (req, res) => { text, attachments: [{ filename: "query.csv", content: csv }] }) - .catch((err) => { - console.error("Errors sending CSV Email."); + .catch((error) => { + logger.log("Tasks - Error sending CSV EMAIL", "error", req?.user?.email, null, { + error + }); }); return res.status(200).send(csv); diff --git a/server/utils/logger.js b/server/utils/logger.js index 3a1742432..bfb0c0cda 100644 --- a/server/utils/logger.js +++ b/server/utils/logger.js @@ -8,6 +8,19 @@ const InstanceManager = require("../utils/instanceMgr").default; const winston = require("winston"); const WinstonCloudWatch = require("winston-cloudwatch"); const { isString, isEmpty } = require("lodash"); +const { networkInterfaces, hostname } = require("node:os"); + +const LOG_LEVELS = { + error: { level: 0, name: "error" }, + warn: { level: 1, name: "warn" }, + info: { level: 2, name: "info" }, + http: { level: 3, name: "http" }, + verbose: { level: 4, name: "verbose" }, + debug: { level: 5, name: "debug" }, + silly: { level: 6, name: "silly" } +}; + +const normalizeLevel = (level) => (level ? level.toLowerCase() : LOG_LEVELS.debug.name); const createLogger = () => { try { @@ -27,9 +40,6 @@ const createLogger = () => { if (isLocal) { winstonCloudwatchTransportDefaults.awsOptions.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`; - console.log( - `Winston Transports set to LocalStack end point: ${winstonCloudwatchTransportDefaults.awsOptions.endpoint}` - ); } const levelFilter = (levels) => { @@ -42,6 +52,22 @@ const createLogger = () => { })(); }; + const getHostNameOrIP = () => { + // Try to get the hostname first + const hostName = hostname(); + if (hostName) return hostName; + + const interfaces = networkInterfaces(); + for (const name of Object.keys(interfaces)) { + for (const iface of interfaces[name]) { + if (iface.family === "IPv4" && !iface.internal) { + return iface.address; + } + } + } + + return "127.0.0.1"; + }; const createProductionTransport = (level, logStreamName, filters) => { return new WinstonCloudWatch({ level, @@ -51,17 +77,26 @@ const createLogger = () => { }); }; + const internalHostname = process.env.HOSTNAME || getHostNameOrIP(); + const getDevelopmentTransports = () => [ new winston.transports.Console({ level: "silly", format: winston.format.combine( winston.format.colorize(), winston.format.timestamp(), - winston.format.printf(({ level, message, timestamp, user, record, object }) => { - return `${timestamp} [${level}]: ${message} ${ - user ? `| user: ${JSON.stringify(user)}` : "" - } ${record ? `| record: ${JSON.stringify(record)}` : ""} ${ - object ? `| object: ${JSON.stringify(object, null, 2)}` : "" + winston.format.printf(({ level, message, timestamp, user, record, meta }) => { + const hostnameColor = `\x1b[34m${internalHostname}\x1b[0m`; // Blue + const timestampColor = `\x1b[36m${timestamp}\x1b[0m`; // Cyan + const labelColor = "\x1b[33m"; // Yellow + const separatorColor = "\x1b[35m|\x1b[0m"; // Magenta for separators + + return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${ + user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : "" + } ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${ + meta + ? `\n${separatorColor} ${labelColor}meta:\x1b[0m ${JSON.stringify(meta, null, 2)} ${separatorColor}` + : "" }`; }) ) @@ -83,12 +118,19 @@ const createLogger = () => { : [...getDevelopmentTransports(), ...getProductionTransports()] }); + if (isLocal) { + winstonLogger.debug( + `CloudWatch set to LocalStack end point: ${winstonCloudwatchTransportDefaults.awsOptions.endpoint}` + ); + } + const log = (message, type, user, record, meta) => { winstonLogger.log({ - level: type.toLowerCase(), + level: normalizeLevel(type), message, user, record, + hostname: internalHostname, meta }); }; @@ -101,7 +143,8 @@ const createLogger = () => { console.error("Error setting up enhanced Logger, defaulting to console.: " + e?.message || ""); return { log: console.log, - logger: console.log + logger: console.log, + LOG_LEVELS }; } }; diff --git a/server/web-sockets/redisSocketEvents.js b/server/web-sockets/redisSocketEvents.js index a19953311..7ae8cd8c4 100644 --- a/server/web-sockets/redisSocketEvents.js +++ b/server/web-sockets/redisSocketEvents.js @@ -8,7 +8,6 @@ const redisSocketEvents = ({ }) => { // Logging helper functions const createLogEvent = (socket, level, message) => { - //console.log(`[IOREDIS LOG EVENT] - ${socket?.user?.email} - ${socket.id} - ${message}`); logger.log("ioredis-log-event", level, socket?.user?.email, null, { wsmessage: message }); }; @@ -33,7 +32,6 @@ const redisSocketEvents = ({ next(new Error("Authentication error - no authorization token.")); } } catch (error) { - //console.log("Uncaught connection error:::", error); logger.log("websocket-connection-error", "error", null, null, { ...error }); @@ -43,7 +41,8 @@ const redisSocketEvents = ({ // Register Socket Events const registerSocketEvents = (socket) => { - createLogEvent(socket, "DEBUG", `Registering RedisIO Socket Events.`); + // Uncomment for further testing + // createLogEvent(socket, "debug", `Registering RedisIO Socket Events.`); // Token Update Events const registerUpdateEvents = (socket) => { @@ -56,18 +55,19 @@ const redisSocketEvents = ({ // If We ever want to persist user Data across workers // await setSessionData(socket.id, "user", user); - createLogEvent(socket, "INFO", "Token updated successfully"); + // Uncomment for further testing + // createLogEvent(socket, "debug", "Token updated successfully"); socket.emit("token-updated", { success: true }); } catch (error) { if (error.code === "auth/id-token-expired") { - createLogEvent(socket, "WARNING", "Stale token received, waiting for new token"); + createLogEvent(socket, "warn", "Stale token received, waiting for new token"); socket.emit("token-updated", { success: false, error: "Stale token." }); } else { - createLogEvent(socket, "ERROR", `Token update failed: ${error.message}`); + createLogEvent(socket, "error", `Token update failed: ${error.message}`); socket.emit("token-updated", { success: false, error: error.message }); // For any other errors, optionally disconnect the socket socket.disconnect(); @@ -82,9 +82,9 @@ const redisSocketEvents = ({ try { const room = getBodyshopRoom(bodyshopUUID); socket.join(room); - createLogEvent(socket, "DEBUG", `Client joined bodyshop room: ${room}`); + // createLogEvent(socket, "debug", `Client joined bodyshop room: ${room}`); } catch (error) { - createLogEvent(socket, "ERROR", `Error joining room: ${error}`); + createLogEvent(socket, "error", `Error joining room: ${error}`); } }; @@ -92,9 +92,9 @@ const redisSocketEvents = ({ try { const room = getBodyshopRoom(bodyshopUUID); socket.leave(room); - createLogEvent(socket, "DEBUG", `Client left bodyshop room: ${room}`); + createLogEvent(socket, "debug", `Client left bodyshop room: ${room}`); } catch (error) { - createLogEvent(socket, "ERROR", `Error joining room: ${error}`); + createLogEvent(socket, "error", `Error joining room: ${error}`); } }; @@ -102,9 +102,10 @@ const redisSocketEvents = ({ try { const room = getBodyshopRoom(bodyshopUUID); io.to(room).emit("bodyshop-message", message); - createLogEvent(socket, "DEBUG", `Broadcast message to bodyshop ${room}`); + // We do not need this as these can be debugged live + // createLogEvent(socket, "debug", `Broadcast message to bodyshop ${room}`); } catch (error) { - createLogEvent(socket, "ERROR", `Error getting room: ${error}`); + createLogEvent(socket, "error", `Error getting room: ${error}`); } }; @@ -115,7 +116,9 @@ const redisSocketEvents = ({ // Disconnect Events const registerDisconnectEvents = (socket) => { const disconnect = () => { - createLogEvent(socket, "DEBUG", `User disconnected.`); + // Uncomment for further testing + // createLogEvent(socket, "debug", `User disconnected.`); + const rooms = Array.from(socket.rooms).filter((room) => room !== socket.id); for (const room of rooms) { socket.leave(room); diff --git a/server/web-sockets/web-socket.js b/server/web-sockets/web-socket.js index 3140c2e87..c5e5012c8 100644 --- a/server/web-sockets/web-socket.js +++ b/server/web-sockets/web-socket.js @@ -41,7 +41,7 @@ io.use(function (socket, next) { }); io.on("connection", (socket) => { - socket.log_level = "TRACE"; + socket.log_level = "DEBUG"; createLogEvent(socket, "DEBUG", `Connected and Authenticated.`); socket.on("set-log-level", (level) => { @@ -75,7 +75,7 @@ io.on("connection", (socket) => { socket.on("cdk-calculate-allocations", async (jobid, callback) => { const allocations = await CdkCalculateAllocations(socket, jobid); createLogEvent(socket, "DEBUG", `Allocations calculated.`); - createLogEvent(socket, "TRACE", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`); + createLogEvent(socket, "SILLY", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`); callback(allocations); }); @@ -85,7 +85,7 @@ io.on("connection", (socket) => { socket.on("pbs-calculate-allocations", async (jobid, callback) => { const allocations = await CdkCalculateAllocations(socket, jobid); createLogEvent(socket, "DEBUG", `Allocations calculated.`); - createLogEvent(socket, "TRACE", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`); + createLogEvent(socket, "SILLY", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`); callback(allocations); }); @@ -103,7 +103,7 @@ io.on("connection", (socket) => { socket.on("pbs-calculate-allocations-ap", async (billids, callback) => { const allocations = await PbsCalculateAllocationsAp(socket, billids); createLogEvent(socket, "DEBUG", `AP Allocations calculated.`); - createLogEvent(socket, "TRACE", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`); + createLogEvent(socket, "DEBUG", `Allocations calculated. ${JSON.stringify(allocations, null, 2)}`); socket.apAllocations = allocations; callback(allocations); }); @@ -122,7 +122,7 @@ io.on("connection", (socket) => { function createLogEvent(socket, level, message) { if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy(level)) { - // console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`); + // console.log(`[WS LOG EVENT] ${level} - ${new Date()} - ${socket.user.email} - ${socket.id} - ${message}`); socket.emit("log-event", { timestamp: new Date(), level, @@ -173,17 +173,17 @@ function createJsonEvent(socket, level, message, json) { } function createXmlEvent(socket, xml, message, isError = false) { - if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy("TRACE")) { + if (LogLevelHierarchy(socket.log_level) >= LogLevelHierarchy("SILLY")) { socket.emit("log-event", { timestamp: new Date(), - level: isError ? "ERROR" : "TRACE", + level: isError ? "ERROR" : "SILLY", message: `${message}: ${xml}` }); } logger.log( isError ? "ws-log-event-xml-error" : "ws-log-event-xml", - isError ? "ERROR" : "TRACE", + isError ? "ERROR" : "SILLY", socket.user.email, socket.recordid, { @@ -195,7 +195,7 @@ function createXmlEvent(socket, xml, message, isError = false) { if (socket.logEvents && isArray(socket.logEvents)) { socket.logEvents.push({ timestamp: new Date(), - level: isError ? "ERROR" : "TRACE", + level: isError ? "ERROR" : "SILLY", message, xml }); @@ -206,13 +206,13 @@ function LogLevelHierarchy(level) { switch (level) { case "XML": return 5; - case "TRACE": + case "SILLY": return 5; case "DEBUG": return 4; case "INFO": return 3; - case "WARNING": + case "WARN": return 2; case "ERROR": return 1;