Merged in release/2024-11-01 (pull request #1866)

Release/2024-11-01 into master-AIO - IO-2979 - Misc Logger related fixes
This commit is contained in:
Dave Richer
2024-10-29 20:10:02 +00:00
37 changed files with 238 additions and 151 deletions

View File

@@ -2,8 +2,6 @@ import { ApolloProvider } from "@apollo/client";
import { SplitFactoryProvider, SplitSdk } from "@splitsoftware/splitio-react";
import { ConfigProvider } from "antd";
import enLocale from "antd/es/locale/en_US";
import dayjs from "../utils/day";
import "dayjs/locale/en";
import React from "react";
import { useTranslation } from "react-i18next";
import GlobalLoadingBar from "../components/global-loading-bar/global-loading-bar.component";
@@ -19,8 +17,6 @@ if (import.meta.env.DEV) {
Userpilot.initialize("NX-69145f08");
}
dayjs.locale("en");
const config = {
core: {
authorizationKey: import.meta.env.VITE_APP_SPLIT_API,

View File

@@ -8,7 +8,7 @@ import { INSERT_EULA_ACCEPTANCE } from "../../graphql/user.queries";
import { useMutation } from "@apollo/client";
import { acceptEula } from "../../redux/user/user.actions";
import { useTranslation } from "react-i18next";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import "./eula.styles.scss";
import DateTimePicker from "../form-date-time-picker/form-date-time-picker.component.jsx";
@@ -208,7 +208,7 @@ const EulaFormComponent = ({ form, handleChange, onFinish, t }) => (
{
required: true,
validator: (_, value) => {
if (day(value).isSame(day(), "day")) {
if (dayjs(value).isSame(dayjs(), "day")) {
return Promise.resolve();
}
return Promise.reject(new Error(t("eula.messages.date_accepted")));

View File

@@ -4,19 +4,37 @@ import React, { useCallback, useState } from "react";
import { useTranslation } from "react-i18next";
import dayjs from "../../utils/day";
import { fuzzyMatchDate } from "./formats.js";
import { createStructuredSelector } from "reselect";
import { selectBodyshop } from "../../redux/user/user.selectors.js";
import { connect } from "react-redux";
const DateTimePicker = ({ value, onChange, onBlur, id, onlyFuture, onlyToday, isDateOnly = false, ...restProps }) => {
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
});
const DateTimePicker = ({
value,
onChange,
onBlur,
id,
onlyFuture,
onlyToday,
isDateOnly = false,
bodyshop,
...restProps
}) => {
const [isManualInput, setIsManualInput] = useState(false);
const { t } = useTranslation();
const handleChange = useCallback(
(newDate) => {
if (!newDate) return;
if (onChange) {
onChange(newDate || null);
onChange(bodyshop?.timezone ? dayjs(newDate).tz(bodyshop.timezone, true) : newDate);
}
setIsManualInput(false);
},
[onChange]
[onChange, bodyshop?.timezone]
);
const handleBlur = useCallback(
@@ -102,4 +120,4 @@ DateTimePicker.propTypes = {
isDateOnly: PropTypes.bool
};
export default React.memo(DateTimePicker);
export default connect(mapStateToProps, null)(DateTimePicker);

View File

@@ -116,18 +116,15 @@ function Header({
const { t } = useTranslation();
const deleteBetaCookie = () => {
const cookieExists = document.cookie.split("; ").some((row) => row.startsWith(`betaSwitchImex=`));
if (cookieExists) {
const domain = window.location.hostname.split(".").slice(-2).join(".");
document.cookie = `betaSwitchImex=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=/; domain=.${domain}`;
console.log(`betaSwitchImex cookie deleted`);
} else {
console.log(`betaSwitchImex cookie does not exist`);
}
};
deleteBetaCookie();
// const deleteBetaCookie = () => {
// const cookieExists = document.cookie.split("; ").some((row) => row.startsWith(`betaSwitchImex=`));
// if (cookieExists) {
// const domain = window.location.hostname.split(".").slice(-2).join(".");
// document.cookie = `betaSwitchImex=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=/; domain=.${domain}`;
// }
// };
//
// deleteBetaCookie();
const accountingChildren = [];

View File

@@ -1,5 +1,5 @@
import React, { useCallback, useEffect, useState } from "react";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import axios from "axios";
import { Badge, Card, Space, Table, Tag } from "antd";
import { gql, useQuery } from "@apollo/client";
@@ -72,7 +72,7 @@ export function JobLifecycleComponent({ job, statuses, ...rest }) {
dataIndex: "start",
key: "start",
render: (text) => DateTimeFormatterFunction(text),
sorter: (a, b) => day(a.start).unix() - day(b.start).unix()
sorter: (a, b) => dayjs(a.start).unix() - dayjs(b.start).unix()
},
{
title: t("job_lifecycle.columns.relative_start"),
@@ -90,7 +90,7 @@ export function JobLifecycleComponent({ job, statuses, ...rest }) {
}
return isEmpty(a.end) ? 1 : -1;
}
return day(a.end).unix() - day(b.end).unix();
return dayjs(a.end).unix() - dayjs(b.end).unix();
},
render: (text) => (isEmpty(text) ? t("job_lifecycle.content.not_available") : DateTimeFormatterFunction(text))
},

View File

@@ -2,7 +2,7 @@ import React, { useState } from "react";
import { useMutation } from "@apollo/client";
import { Button, Form, notification, Popover, Select, Space } from "antd";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
import { createStructuredSelector } from "reselect";
@@ -48,7 +48,7 @@ export function JobLineDispatchButton({
const result = await dispatchLines({
variables: {
partsDispatch: {
dispatched_at: day(),
dispatched_at: dayjs(),
employeeid: values.employeeid,
jobid: job.id,
dispatched_by: currentUser.email,
@@ -138,7 +138,11 @@ export function JobLineDispatchButton({
return (
<Popover open={visible} content={popMenu}>
<Button disabled={selectedLines.length === 0 || jobRO || disabled} loading={loading} onClick={() => setVisible(true)}>
<Button
disabled={selectedLines.length === 0 || jobRO || disabled}
loading={loading}
onClick={() => setVisible(true)}
>
{t("joblines.actions.dispatchparts", { count: selectedLines.length })}
</Button>
</Popover>

View File

@@ -1,6 +1,6 @@
import { useMutation } from "@apollo/client";
import { Button, Card, Col, notification, Row, Table } from "antd";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import React from "react";
import { useTranslation } from "react-i18next";
import { UPDATE_PARTS_DISPATCH_LINE } from "../../graphql/parts-dispatch.queries";
@@ -11,7 +11,7 @@ export default function PartsDispatchExpander({ dispatch, job }) {
const [updateDispatchLine] = useMutation(UPDATE_PARTS_DISPATCH_LINE);
const handleAccept = async ({ partsDispatchLineId }) => {
const accepted_at = day();
const accepted_at = dayjs();
const result = await updateDispatchLine({
variables: { id: partsDispatchLineId, line: { accepted_at } },
optimisticResponse: {

View File

@@ -120,14 +120,6 @@ var formats = {
};
const localizer = (dayjsLib) => {
// load dayjs plugins
dayjsLib.extend(isBetween);
dayjsLib.extend(isSameOrAfter);
dayjsLib.extend(isSameOrBefore);
dayjsLib.extend(localeData);
dayjsLib.extend(localizedFormat);
dayjsLib.extend(minMax);
dayjsLib.extend(utc);
var locale = function locale(dj, c) {
return c ? dj.locale(c) : dj;
};
@@ -136,7 +128,8 @@ const localizer = (dayjsLib) => {
// then use the timezone aware version
//TODO This was the issue entirely...
// var dayjs = dayjsLib.tz ? dayjsLib.tz : dayjsLib;
// var dayjs = dayjsLib.tz ? dayjsLib.tz : dayjsLib;
var dayjs = dayjsLib;
function getTimezoneOffset(date) {

View File

@@ -20,6 +20,7 @@ const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop,
problemJobs: selectProblemJobs
});
const localizer = local(dayjs);
export function ScheduleCalendarWrapperComponent({

View File

@@ -1,6 +1,6 @@
import { useMutation } from "@apollo/client";
import { Button, notification } from "antd";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import React, { useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -29,7 +29,7 @@ export function TimeTicketsCommit({ bodyshop, currentUser, timeticket, disabled,
? { commited_by: null, committed_at: null }
: {
commited_by: currentUser.email,
committed_at: day()
committed_at: dayjs()
};
const result = await updateTimeTicket({

View File

@@ -1,6 +1,6 @@
import { useMutation } from "@apollo/client";
import { Button, notification } from "antd";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import React, { useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -34,7 +34,7 @@ export function TimeTicketsCommit({
timeticketIds: timetickets.map((ticket) => ticket.id),
timeticket: {
commited_by: currentUser.email,
committed_at: day()
committed_at: dayjs()
}
},
update(cache) {
@@ -47,7 +47,7 @@ export function TimeTicketsCommit({
return {
...ticket,
commited_by: currentUser.email,
committed_at: day()
committed_at: dayjs()
};
}
return ticket;

View File

@@ -1,7 +1,7 @@
import { useApolloClient } from "@apollo/client";
import { Button, notification } from "antd";
import _ from "lodash";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import React, { useState } from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -49,7 +49,7 @@ export function TtApproveButton({
})),
approvalIds: selectedTickets,
approvalUpdate: {
approved_at: day(),
approved_at: dayjs(),
approved_by: currentUser.email
}
}

View File

@@ -28,7 +28,7 @@ import {
} from "../../firebase/firebase.utils";
import { QUERY_EULA } from "../../graphql/bodyshop.queries";
import client from "../../utils/GraphQLClient";
import day from "../../utils/day";
import dayjs from "../../utils/day";
import InstanceRenderManager from "../../utils/instanceRenderMgr";
import {
checkInstanceId,
@@ -96,7 +96,7 @@ export function* isUserAuthenticated() {
const eulaQuery = yield client.query({
query: QUERY_EULA,
variables: {
now: day()
now: dayjs()
}
});
@@ -314,8 +314,7 @@ export function* SetAuthLevelFromShopDetails({ payload }) {
try {
const userEmail = yield select((state) => state.user.currentUser.email);
try {
console.log("Setting shop timezone.");
day.tz.setDefault(payload.timezone);
dayjs.tz.setDefault(payload.timezone);
} catch (error) {
console.log(error);
}

View File

@@ -1,5 +1,6 @@
import dayjs from "dayjs";
import "dayjs/locale/en";
import dayjsBusinessDays from "dayjs-business-days2";
import isSameOrAfter from "dayjs/plugin/isSameOrAfter";
import updateLocale from "dayjs/plugin/updateLocale";
@@ -64,4 +65,6 @@ dayjs.extend(minMax);
dayjs.extend(isBetween);
dayjs.extend(dayjsBusinessDays);
dayjs.locale("en");
export default dayjs;

View File

@@ -63,7 +63,7 @@ exports.default = function ({ bodyshop, jobs_by_pk, qbo = false, items, taxCodes
);
if (!account) {
logger.log("qbxml-receivables-no-account", "warn", null, jobline.id, null);
logger.log("qbxml-receivables-no-account", "warn", null, jobline.id);
throw new Error(
`A matching account does not exist for the part allocation. Center: ${jobline.profitcenter_part}`
);

View File

@@ -45,7 +45,7 @@ exports.default = async (req, res) => {
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payable-create", "DEBUG", req.user.email, billsToQuery);
logger.log("qbo-payable-create", "DEBUG", req.user.email, null, { billsToQuery });
const result = await client
.setHeaders({ Authorization: BearerToken })
@@ -91,6 +91,12 @@ exports.default = async (req, res) => {
ret.push({ billid: bill.id, success: true });
} catch (error) {
logger.log("qbo-paybles-create-error", "ERROR", req.user.email, null, {
error:
(error && error.authResponse && error.authResponse.body) ||
error.response?.data?.Fault?.Error.map((e) => e.Detail).join(", ") ||
(error && error.message)
});
ret.push({
billid: bill.id,
success: false,
@@ -122,7 +128,7 @@ exports.default = async (req, res) => {
res.status(200).json(ret);
} catch (error) {
//console.log(error);
logger.log("qbo-payable-create-error", "ERROR", req.user.email, {
logger.log("qbo-payable-create-error", "ERROR", req.user.email, null, {
error: error.message,
stack: error.stack
});

View File

@@ -49,7 +49,7 @@ exports.default = async (req, res) => {
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("qbo-payment-create", "DEBUG", req.user.email, paymentsToQuery);
logger.log("qbo-payment-create", "DEBUG", req.user.email, null, { paymentsToQuery });
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
payments: paymentsToQuery
@@ -152,7 +152,7 @@ exports.default = async (req, res) => {
ret.push({ paymentid: payment.id, success: true });
} catch (error) {
logger.log("qbo-payment-create-error", "ERROR", req.user.email, {
logger.log("qbo-payment-create-error", "ERROR", req.user.email, null, {
error: (error && error.authResponse && error.authResponse.body) || (error && error.message)
});
//Add the export log error.
@@ -183,7 +183,7 @@ exports.default = async (req, res) => {
res.status(200).json(ret);
} catch (error) {
//console.log(error);
logger.log("qbo-payment-create-error", "ERROR", req.user.email, {
logger.log("qbo-payment-create-error", "ERROR", req.user.email, null, {
error: error.message,
stack: error.stack
});

View File

@@ -42,9 +42,10 @@ exports.default = async (req, res) => {
//For each invoice.
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log("qbxml-payable-error", "ERROR", req.user.email, req.body.billsToQuery, {
error: error.message,
stack: error.stack
logger.log("qbxml-payable-error", "ERROR", req?.user?.email, null, {
billsToQuery: req?.body?.billsToQuery,
error: error?.message,
stack: error?.stack
});
res.status(400).send(JSON.stringify(error));
}

View File

@@ -21,7 +21,9 @@ exports.default = async (req, res) => {
const client = req.userGraphQLClient;
try {
logger.log("qbxml-payments-create", "DEBUG", req.user.email, req.body.paymentsToQuery, null);
logger.log("qbxml-payments-create", "DEBUG", req?.user?.email, null, {
paymentsToQuery: req.body?.paymentsToQuery
});
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
payments: paymentsToQuery
@@ -62,7 +64,8 @@ exports.default = async (req, res) => {
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log("qbxml-payments-error", "error", req.user.email, req.body.paymentsToQuery, {
logger.log("qbxml-payments-error", "error", req?.user?.email, null, {
paymentsToQuery: req.body?.paymentsToQuery,
error: error.message,
stack: error.stack
});

View File

@@ -23,7 +23,9 @@ exports.default = async (req, res) => {
const client = req.userGraphQLClient;
try {
logger.log("qbxml-receivables-create", "DEBUG", req.user.email, req.body.jobIds, null);
logger.log("qbxml-receivables-create", "DEBUG", req?.user?.email, null, {
jobIds: req?.body?.jobIds
});
const result = await client
.setHeaders({ Authorization: BearerToken })
@@ -74,7 +76,8 @@ exports.default = async (req, res) => {
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log("qbxml-receivables-error", "error", req.user.email, req.body.jobIds, {
logger.log("qbxml-receivables-error", "error", req?.user?.email, null, {
jobIds: req.body?.jobIds,
error: error.message,
stack: error.stack
});

View File

@@ -39,8 +39,8 @@ exports.default = async function ReloadCdkMakes(req, res) {
const deleteResult = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.DELETE_ALL_DMS_VEHICLES, {});
console.log("🚀 ~ file: cdk-get-makes.js ~ line 53 ~ deleteResult", deleteResult);
// logger.logger.debug("🚀 ~ file: cdk-get-makes.js ~ line 53 ~ deleteResult", { deleteResult });
//Insert the new ones.
const insertResult = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_DMS_VEHICLES, {
@@ -59,6 +59,7 @@ exports.default = async function ReloadCdkMakes(req, res) {
cdk_dealerid,
count: newList.length
});
res.sendStatus(200);
} catch (error) {
logger.log("cdk-replace-makes-models-error", "ERROR", req.user.email, null, {

View File

@@ -3,6 +3,7 @@ const { defaultProvider } = require("@aws-sdk/credential-provider-node");
const { default: InstanceManager } = require("../utils/instanceMgr");
const aws = require("@aws-sdk/client-ses");
const nodemailer = require("nodemailer");
const logger = require("../utils/logger");
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
@@ -19,7 +20,7 @@ const sesConfig = {
if (isLocal) {
sesConfig.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
console.log(`SES Mailer set to LocalStack end point: ${sesConfig.endpoint}`);
logger.logger.debug(`SES Mailer set to LocalStack end point: ${sesConfig.endpoint}`);
}
const ses = new aws.SES(sesConfig);

View File

@@ -21,7 +21,7 @@ const getImage = async (imageUrl) => {
// Log the email in the database
const logEmail = async (req, email) => {
try {
const insertresult = await client.request(queries.INSERT_EMAIL_AUDIT, {
await client.request(queries.INSERT_EMAIL_AUDIT, {
email: {
to: email.to,
cc: email.cc,
@@ -34,13 +34,13 @@ const logEmail = async (req, email) => {
status: "Sent"
}
});
console.log(insertresult);
} catch (error) {
logger.log("email-log-error", "error", req.user.email, null, {
logger.log("email-log-error", "error", req?.user?.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject
to: req?.body?.to,
cc: req?.body?.cc,
subject: req?.body?.subject,
email
// info,
});
}
@@ -70,12 +70,11 @@ const sendServerEmail = async ({ subject, text }) => {
}
},
(err, info) => {
console.log(err || info);
logger.log("server-email-failure", err ? "error" : "debug", null, null, { message: err || info });
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
logger.log("server-email-failure", "error", null, null, { error });
}
};
@@ -88,8 +87,7 @@ const sendProManagerWelcomeEmail = async ({ to, subject, html }) => {
html
});
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
logger.log("server-email-failure", "error", null, null, { error });
}
};
@@ -108,12 +106,12 @@ const sendTaskEmail = async ({ to, subject, type = "text", html, text, attachmen
attachments: attachments || null
},
(err, info) => {
console.log(err || info);
// (message, type, user, record, meta
logger.log("server-email", err ? "error" : "debug", null, null, { message: err || info });
}
);
} catch (error) {
console.log(error);
logger.log("server-email-failure", "error", null, null, error);
logger.log("server-email-failure", "error", null, null, { error });
}
};
@@ -184,9 +182,8 @@ const sendEmail = async (req, res) => {
}
},
(err, info) => {
console.log(err || info);
if (info) {
logger.log("send-email-success", "DEBUG", req.user.email, null, {
logger.log("send-email-success", "DEBUG", req?.user?.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
@@ -205,7 +202,7 @@ const sendEmail = async (req, res) => {
success: true //response: info
});
} else {
logger.log("send-email-failure", "ERROR", req.user.email, null, {
logger.log("send-email-failure", "ERROR", req?.user?.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
@@ -290,7 +287,9 @@ ${body.bounce?.bouncedRecipients.map(
`
},
(err, info) => {
console.log("***", err || info);
logger.log("sns-error", err ? "error" : "debug", "api", null, {
message: err ? JSON.stringify(error) : info
});
}
);
}

View File

@@ -18,10 +18,10 @@ const tasksEmailQueue = taskEmailQueue();
const tasksEmailQueueCleanup = async () => {
try {
// Example async operation
console.log("Performing Tasks Email Reminder process cleanup...");
// console.log("Performing Tasks Email Reminder process cleanup...");
await new Promise((resolve) => tasksEmailQueue.destroy(() => resolve()));
} catch (err) {
console.error("Tasks Email Reminder process cleanup failed:", err);
// console.error("Tasks Email Reminder process cleanup failed:", err);
}
};

View File

@@ -10,8 +10,9 @@ const logger = require("../utils/logger");
const taskEmailQueue = () =>
new Queue(
(taskIds, cb) => {
console.log("Processing reminds for taskIds: ", taskIds.join(", "));
logger.log("Processing reminds for taskIds: ", "silly", null, null, {
taskIds: taskIds?.join(", ")
});
// Set the remind_at_sent to the current time.
const now = moment().toISOString();

View File

@@ -269,10 +269,14 @@ const sendNotification = async (req, res) => {
})
.then((response) => {
// Response is a message ID string.
console.log("Successfully sent message:", response);
logger.log("Successfully sent message:", "debug", req?.user?.email, null, {
response
});
})
.catch((error) => {
console.log("Error sending message:", error);
logger.log("Successfully sent message:", "error", req?.user?.email, null, {
error
});
});
res.sendStatus(200);

View File

@@ -19,7 +19,8 @@ async function JobCosting(req, res) {
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null);
//Uncomment for further testing
// logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null);
try {
const resp = await client.setHeaders({ Authorization: BearerToken }).request(queries.QUERY_JOB_COSTING_DETAILS, {
@@ -46,7 +47,10 @@ async function JobCostingMulti(req, res) {
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("job-costing-multi-start", "DEBUG", req.user.email, jobids, null);
//Uncomment for further testing
// logger.log("job-costing-multi-start", "DEBUG", req?.user?.email, null, {
// jobids
// });
try {
const resp = await client
@@ -244,7 +248,8 @@ async function JobCostingMulti(req, res) {
data: ret
});
} catch (error) {
logger.log("job-costing-multi-error", "ERROR", req.user.email, [jobids], {
logger.log("job-costing-multi-error", "ERROR", req?.user?.email, null, {
jobids,
message: error.message,
stack: error.stack
});
@@ -282,7 +287,13 @@ function GenerateCostingData(job) {
if (val.mod_lbr_ty) {
const laborProfitCenter = val.profitcenter_labor || defaultProfits[val.mod_lbr_ty] || "Unknown";
if (laborProfitCenter === "Unknown") console.log("Unknown type", val.line_desc, val.mod_lbr_ty);
//Uncomment for further testing
// if (laborProfitCenter === "Unknown") {
// logger.log("job-costing unknown type", "debug", null, null, {
// line_desc: val.line_desc,
// mod_lbr_ty: val.mod_lbr_ty
// });
// }
const rateName = `rate_${(val.mod_lbr_ty || "").toLowerCase()}`;
@@ -349,10 +360,22 @@ function GenerateCostingData(job) {
if (val.part_type && val.part_type !== "PAE" && val.part_type !== "PAS" && val.part_type !== "PASL") {
const partsProfitCenter = val.profitcenter_part || defaultProfits[val.part_type] || "Unknown";
if (partsProfitCenter === "Unknown") console.log("Unknown type", val.line_desc, val.part_type);
//Uncomment for further testing
// if (partsProfitCenter === "Unknown" || !partsProfitCenter) {
// logger.log(
// partsProfitCenter === "Unknown"
// ? "job-costing unknown type"
// : "Unknown cost/profit center mapping for parts.",
// "debug",
// null,
// null,
// {
// line_desc: val.line_desc,
// part_type: val.part_type
// }
// );
// }
if (!partsProfitCenter)
console.log("Unknown cost/profit center mapping for parts.", val.line_desc, val.part_type);
let partsAmount = Dinero({
amount: val.act_price_before_ppc
? Math.round(val.act_price_before_ppc * 100)
@@ -409,10 +432,22 @@ function GenerateCostingData(job) {
if (val.part_type && val.part_type !== "PAE" && (val.part_type === "PAS" || val.part_type === "PASL")) {
const partsProfitCenter = val.profitcenter_part || defaultProfits[val.part_type] || "Unknown";
if (partsProfitCenter === "Unknown") console.log("Unknown type", val.line_desc, val.part_type);
//Uncomment for further testing
// if (partsProfitCenter === "Unknown" || !partsProfitCenter) {
// logger.log(
// partsProfitCenter === "Unknown"
// ? "job-costing unknown type"
// : "job-costing Unknown cost/profit center mapping for sublet",
// "debug",
// null,
// null,
// {
// line_desc: val.line_desc,
// part_type: val.part_type
// }
// );
// }
if (!partsProfitCenter)
console.log("Unknown cost/profit center mapping for sublet.", val.line_desc, val.part_type);
const partsAmount = Dinero({
amount: Math.round((val.act_price || 0) * 100)
})
@@ -443,9 +478,14 @@ function GenerateCostingData(job) {
//If so, use it, otherwise try to use the same from the auto-allocate logic in IO app jobs-close-auto-allocate.
const partsProfitCenter = val.profitcenter_part || getAdditionalCostCenter(val, defaultProfits) || "Unknown";
if (partsProfitCenter === "Unknown") {
console.log("Unknown type", val.line_desc, val.part_type);
}
//Uncomment for further testing
// if (partsProfitCenter === "Unknown") {
// logger.log("job-costing unknown type", "debug", null, null, {
// line_desc: val.line_desc,
// part_type: val.part_type
// });
// }
const partsAmount = Dinero({
amount: Math.round((val.act_price || 0) * 100)
})
@@ -476,7 +516,7 @@ function GenerateCostingData(job) {
if (!hasMapaLine) {
if (!jobLineTotalsByProfitCenter.additional[defaultProfits["MAPA"]])
jobLineTotalsByProfitCenter.additional[defaultProfits["MAPA"]] = Dinero();
jobLineTotalsByProfitCenter.additional[defaultProfits["MAPA"]] = jobLineTotalsByProfitCenter.additional[
defaultProfits["MAPA"]
].add(

View File

@@ -26,7 +26,7 @@ exports.totalsSsu = async function (req, res) {
const BearerToken = req.BearerToken;
const client = req.userGraphQLClient;
logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, id, null);
logger.log("job-totals-ssu-USA", "DEBUG", req?.user?.email, id);
try {
const job = await client.setHeaders({ Authorization: BearerToken }).request(queries.GET_JOB_BY_PK, {
@@ -47,7 +47,7 @@ exports.totalsSsu = async function (req, res) {
res.status(200).send();
} catch (error) {
logger.log("job-totals-ssu-USA-error", "ERROR", req.user.email, id, {
logger.log("job-totals-ssu-USA-error", "ERROR", req?.user?.email, id, {
jobid: id,
error
});
@@ -84,12 +84,10 @@ async function Totals(req, res) {
const logger = req.logger;
const client = req.userGraphQLClient;
logger.log("job-totals-USA", "DEBUG", req.user.email, job.id, {
logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, job.id, {
jobid: job.id
});
logger.log("job-totals-ssu-USA", "DEBUG", req.user.email, id, null);
await AutoAddAtsIfRequired({ job, client });
try {
@@ -961,7 +959,9 @@ function CalculateTaxesTotals(job, otherTotals) {
}
}
} catch (error) {
console.error("Key with issue", key);
logger.log("job-totals-USA Key with issue", "error", null, null, {
key
});
}
});
@@ -1026,7 +1026,9 @@ function CalculateTaxesTotals(job, otherTotals) {
totalTaxByTier[taxTierKey] = totalTaxByTier[taxTierKey].add(taxAmountToAdd);
}
} catch (error) {
console.error("PFP Calculation error", error);
logger.log("job-totals-USA - PFP Calculation Error", "error", null, null, {
error
});
}
});

View File

@@ -15,7 +15,7 @@ const jobUpdated = async (req, res) => {
});
}
logger.log("job-update", "INFO", req.user?.email, null, {
logger.log("job-update", "DEBUG", req.user?.email, null, {
message: `Job updated event received from Hasura`,
jobid: req?.body?.event?.data?.new?.id
});

View File

@@ -16,7 +16,11 @@ const validateFirebaseIdTokenMiddleware = async (req, res, next) => {
(!req.headers.authorization || !req.headers.authorization.startsWith("Bearer ")) &&
!(req.cookies && req.cookies.__session)
) {
console.error("Unauthorized attempt. No authorization provided.");
logger.log("api-authorization-call", "warn", req?.user?.email, null, {
type: "unauthorized",
path: req.path,
body: req.body
});
return res.status(403).send("Unauthorized");
}
@@ -32,10 +36,10 @@ const validateFirebaseIdTokenMiddleware = async (req, res, next) => {
idToken = req.cookies.__session;
} else {
// No cookie
console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie"
logger.log("api-unauthorized-call", "warn", null, null, {
type: "unauthorized",
path: req.path,
body: req.body
});
return res.status(403).send("Unauthorized");
@@ -47,11 +51,11 @@ const validateFirebaseIdTokenMiddleware = async (req, res, next) => {
req.user = decodedIdToken;
next();
} catch (error) {
logger.log("api-unauthorized-call", "WARN", null, null, {
logger.log("api-unauthorized-call", "warn", null, null, {
path: req.path,
body: req.body,
type: "unauthroized",
type: "unauthorized",
...error
});

View File

@@ -9,11 +9,9 @@ require("dotenv").config({
});
exports.mixdataUpload = async (req, res) => {
const { bodyshopid } = req.body;
const client = req.userGraphQLClient;
logger.log("job-mixdata-upload", "DEBUG", req.user.email, null, null);
logger.log("job-mixdata-upload", "DEBUG", req?.user?.email, null, null);
try {
for (const element of req.files) {
@@ -23,7 +21,7 @@ exports.mixdataUpload = async (req, res) => {
explicitArray: false
});
logger.log("job-mixdata-parse", "DEBUG", req.user.email, inboundRequest);
logger.log("job-mixdata-parse", "DEBUG", req?.user?.email, null, { inboundRequest });
const ScaleType = DetermineScaleType(inboundRequest);
const RoNumbersFromInboundRequest = GetListOfRos(inboundRequest, ScaleType);
@@ -70,7 +68,7 @@ function DetermineScaleType(inboundRequest) {
const ret = { type: "", verson: 0 };
//PPG Mix Data
if (inboundRequest.PPG && inboundRequest.PPG.Header.Protocol.Name === "PPG") {
if (inboundRequest?.PPG?.Header?.Protocol?.Name === "PPG") {
return {
type: inboundRequest.PPG.Header.Protocol.Name,
company: "PPG",
@@ -80,13 +78,13 @@ function DetermineScaleType(inboundRequest) {
}
function GetListOfRos(inboundRequest, ScaleType) {
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
if (ScaleType?.company === "PPG" && ScaleType?.version === "1.3.0") {
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => r.RONumber);
}
}
function GenerateMixDataArray(inboundRequest, ScaleType, jobHash) {
if (ScaleType.company === "PPG" && ScaleType.version === "1.3.0") {
if (ScaleType?.company === "PPG" && ScaleType?.version === "1.3.0") {
return inboundRequest.PPG.MixDataInterface.ROData.RepairOrders.RO.map((r) => {
return {
jobid: jobHash[r.RONumber]?.jobid,

View File

@@ -45,12 +45,12 @@ exports.payall = async function (req, res) {
const path = diffParser(diff);
if (diff.op === "add") {
console.log(Object.keys(diff.val));
// console.log(Object.keys(diff.val));
if (typeof diff.val === "object" && Object.keys(diff.val).length > 1) {
//Multiple values to add.
Object.keys(diff.val).forEach((key) => {
console.log("Hours", diff.val[key][Object.keys(diff.val[key])[0]]);
console.log("Rate", Object.keys(diff.val[key])[0]);
// console.log("Hours", diff.val[key][Object.keys(diff.val[key])[0]]);
// console.log("Rate", Object.keys(diff.val[key])[0]);
ticketsToInsert.push({
task_name: "Pay All",
jobid: job.id,

View File

@@ -49,7 +49,7 @@ exports.job = async (req, res) => {
if (bucketId) {
load.productionTotal[bucketId].count = load.productionTotal[bucketId].count + 1;
} else {
console.log("Uh oh, this job doesn't fit in a bucket!", item);
// console.log("Uh oh, this job doesn't fit in a bucket!", item);
}
});
@@ -254,7 +254,7 @@ const CalculateLoad = (currentLoad, buckets, jobsIn, jobsOut) => {
if (bucketId) {
newLoad[bucketId].count = newLoad[bucketId].count + 1;
} else {
console.log("[Util Arr Job]Uh oh, this job doesn't fit in a bucket!", job);
// console.log("[Util Arr Job]Uh oh, this job doesn't fit in a bucket!", job);
}
});
@@ -263,10 +263,10 @@ const CalculateLoad = (currentLoad, buckets, jobsIn, jobsOut) => {
if (bucketId) {
newLoad[bucketId].count = newLoad[bucketId].count - 1;
if (newLoad[bucketId].count < 0) {
console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job);
// console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job);
}
} else {
console.log("[Util Out Job]Uh oh, this job doesn't fit in a bucket!", job);
// console.log("[Util Out Job]Uh oh, this job doesn't fit in a bucket!", job);
}
});

View File

@@ -39,7 +39,7 @@ const processor = async (req, res) => {
}
});
} catch (error) {
console.log("error", error);
// console.log("error", error);
res.status(400).send(error);
}
};

View File

@@ -6,10 +6,11 @@ const client = require("../graphql-client/graphql-client").client;
const emailer = require("../email/sendemail");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const logger = require("../utils/logger");
exports.taskHandler = async (req, res) => {
try {
const { bodyshopid, query, variables, text, to, subject, timezone } = req.body;
const { query, variables, text, to, subject, timezone } = req.body;
//Check the variables to see if they are an object.
Object.keys(variables).forEach((key) => {
@@ -32,8 +33,10 @@ exports.taskHandler = async (req, res) => {
text,
attachments: [{ filename: "query.csv", content: csv }]
})
.catch((err) => {
console.error("Errors sending CSV Email.");
.catch((error) => {
logger.log("Tasks - Error sending CSV EMAIL", "error", req?.user?.email, null, {
error
});
});
return res.status(200).send(csv);

View File

@@ -8,7 +8,7 @@ const InstanceManager = require("../utils/instanceMgr").default;
const winston = require("winston");
const WinstonCloudWatch = require("winston-cloudwatch");
const { isString, isEmpty } = require("lodash");
const { networkInterfaces } = require("node:os");
const { networkInterfaces, hostname } = require("node:os");
const createLogger = () => {
try {
@@ -28,9 +28,6 @@ const createLogger = () => {
if (isLocal) {
winstonCloudwatchTransportDefaults.awsOptions.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
console.log(
`Winston Transports set to LocalStack end point: ${winstonCloudwatchTransportDefaults.awsOptions.endpoint}`
);
}
const levelFilter = (levels) => {
@@ -43,19 +40,22 @@ const createLogger = () => {
})();
};
const getPrivateIP = () => {
const getHostNameOrIP = () => {
// Try to get the hostname first
const hostName = hostname();
if (hostName) return hostName;
const interfaces = networkInterfaces();
for (const name of Object.keys(interfaces)) {
for (const iface of interfaces[name]) {
// Find an IPv4 address that's not internal (like localhost)
if (iface.family === "IPv4" && !iface.internal) {
return iface.address;
}
}
}
return "127.0.0.1";
};
const createProductionTransport = (level, logStreamName, filters) => {
return new WinstonCloudWatch({
level,
@@ -65,7 +65,7 @@ const createLogger = () => {
});
};
const hostname = process.env.HOSTNAME || getPrivateIP();
const internalHostname = process.env.HOSTNAME || getHostNameOrIP();
const getDevelopmentTransports = () => [
new winston.transports.Console({
@@ -74,7 +74,7 @@ const createLogger = () => {
winston.format.colorize(),
winston.format.timestamp(),
winston.format.printf(({ level, message, timestamp, user, record, meta }) => {
const hostnameColor = `\x1b[34m${hostname}\x1b[0m`; // Blue
const hostnameColor = `\x1b[34m${internalHostname}\x1b[0m`; // Blue
const timestampColor = `\x1b[36m${timestamp}\x1b[0m`; // Cyan
const labelColor = "\x1b[33m"; // Yellow
const separatorColor = "\x1b[35m|\x1b[0m"; // Magenta for separators
@@ -106,13 +106,19 @@ const createLogger = () => {
: [...getDevelopmentTransports(), ...getProductionTransports()]
});
if (isLocal) {
winstonLogger.debug(
`CloudWatch set to LocalStack end point: ${winstonCloudwatchTransportDefaults.awsOptions.endpoint}`
);
}
const log = (message, type, user, record, meta) => {
winstonLogger.log({
level: type.toLowerCase(),
message,
user,
record,
hostname,
hostname: internalHostname,
meta
});
};

View File

@@ -41,7 +41,8 @@ const redisSocketEvents = ({
// Register Socket Events
const registerSocketEvents = (socket) => {
createLogEvent(socket, "debug", `Registering RedisIO Socket Events.`);
// Uncomment for further testing
// createLogEvent(socket, "debug", `Registering RedisIO Socket Events.`);
// Token Update Events
const registerUpdateEvents = (socket) => {
@@ -54,12 +55,13 @@ const redisSocketEvents = ({
// If We ever want to persist user Data across workers
// await setSessionData(socket.id, "user", user);
createLogEvent(socket, "debug", "Token updated successfully");
// Uncomment for further testing
// createLogEvent(socket, "debug", "Token updated successfully");
socket.emit("token-updated", { success: true });
} catch (error) {
if (error.code === "auth/id-token-expired") {
createLogEvent(socket, "WARNING", "Stale token received, waiting for new token");
createLogEvent(socket, "warn", "Stale token received, waiting for new token");
socket.emit("token-updated", {
success: false,
error: "Stale token."
@@ -80,7 +82,7 @@ const redisSocketEvents = ({
try {
const room = getBodyshopRoom(bodyshopUUID);
socket.join(room);
createLogEvent(socket, "debug", `Client joined bodyshop room: ${room}`);
// createLogEvent(socket, "debug", `Client joined bodyshop room: ${room}`);
} catch (error) {
createLogEvent(socket, "error", `Error joining room: ${error}`);
}
@@ -114,7 +116,9 @@ const redisSocketEvents = ({
// Disconnect Events
const registerDisconnectEvents = (socket) => {
const disconnect = () => {
createLogEvent(socket, "debug", `User disconnected.`);
// Uncomment for further testing
// createLogEvent(socket, "debug", `User disconnected.`);
const rooms = Array.from(socket.rooms).filter((room) => room !== socket.id);
for (const room of rooms) {
socket.leave(room);