Merged in release/2021-08-27 (pull request #186)

Release/2021 08 27

Approved-by: Patrick Fic
This commit is contained in:
Patrick Fic
2021-08-23 15:34:50 +00:00
49 changed files with 45250 additions and 1118 deletions

View File

@@ -35796,6 +35796,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>flat_rate</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>memo</name>
<definition_loaded>false</definition_loaded>

View File

@@ -1,9 +1,25 @@
// craco.config.js
const TerserPlugin = require("terser-webpack-plugin");
const CracoLessPlugin = require("craco-less");
const SentryWebpackPlugin = require("@sentry/webpack-plugin");
module.exports = {
plugins: [
{
plugin: SentryWebpackPlugin,
options: {
// sentry-cli configuration
authToken:
"6b45b028a02342db97a9a2f92c0959058665443d379d4a3a876430009e744260",
org: "snapt-software",
project: "imexonline",
release: process.env.REACT_APP_GIT_SHA,
// webpack-specific configuration
include: ".",
ignore: ["node_modules", "webpack.config.js"],
},
},
{
plugin: CracoLessPlugin,
options: {
@@ -53,4 +69,5 @@ module.exports = {
},
}),
},
devtool: "source-map",
};

43861
client/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -8,6 +8,10 @@
"@craco/craco": "^6.2.0",
"@fingerprintjs/fingerprintjs": "^3.2.0",
"@lourenci/react-kanban": "^2.1.0",
"@openreplay/tracker": "^3.2.1",
"@openreplay/tracker-assist": "^3.0.3",
"@openreplay/tracker-graphql": "^3.0.0",
"@openreplay/tracker-redux": "^3.0.0",
"@sentry/react": "^6.10.0",
"@sentry/tracing": "^6.10.0",
"@stripe/react-stripe-js": "^1.4.0",
@@ -108,6 +112,7 @@
]
},
"devDependencies": {
"@sentry/webpack-plugin": "^1.17.1",
"redux-logger": "^3.0.6",
"source-map-explorer": "^2.5.2"
}

View File

@@ -8,8 +8,28 @@ import { useTranslation } from "react-i18next";
import GlobalLoadingBar from "../components/global-loading-bar/global-loading-bar.component";
import client from "../utils/GraphQLClient";
import App from "./App";
import trackerGraphQL from "@openreplay/tracker-graphql";
//import trackerRedux from "@openreplay/tracker-redux";
import Tracker from "@openreplay/tracker";
import trackerAssist from "@openreplay/tracker-assist";
moment.locale("en-US");
export const tracker = new Tracker({
projectKey: "FPjeYIbwJyvhrVVTTLHo",
ingestPoint: "https://replay.bodyshop.app/ingest",
// ...(process.env.NODE_ENV === null || process.env.NODE_ENV === "development"
// ? { __DISABLE_SECURE_MODE: true }
// : {}),
// beaconSize: 10485760,
onStart: ({ sessionID }) =>
console.log(
"******** OpenReplay tracker started with session: ",
sessionID
),
});
tracker.use(trackerAssist({ confirmText: "Confimr hep" })); // check the list of available options below
export const recordGraphQL = tracker.use(trackerGraphQL());
tracker.start();
if (process.env.NODE_ENV === "production") LogRocket.init("gvfvfw/bodyshopapp");
export default function AppContainer() {

View File

@@ -83,8 +83,10 @@ export function ContractsList({
render: (text, record) => (
<Link to={`/manage/courtesycars/${record.courtesycar.id}`}>{`${
record.courtesycar.year
} ${record.courtesycar.make} ${record.courtesycar.model} ${
record.courtesycar.plate ? `(${record.courtesycar.plate})` : ""
} ${record.courtesycar.make} ${record.courtesycar.model}${
record.courtesycar.plate ? ` (${record.courtesycar.plate})` : ""
}${
record.courtesycar.fleetnumber ? ` (${record.courtesycar.fleetnumber})` : ""
}`}</Link>
),
},

View File

@@ -106,6 +106,12 @@ export default function PartsOrderModalComponent({
label={t("parts_orders.fields.quantity")}
key={`${index}quantity`}
name={[field.name, "quantity"]}
rules={[
{
required: true,
//message: t("general.validation.required"),
},
]}
>
<InputNumber />
</Form.Item>

View File

@@ -1,5 +1,5 @@
import { useQuery } from "@apollo/client";
import { Form, Input, InputNumber, Select } from "antd";
import { Form, Input, InputNumber, Select, Switch } from "antd";
import React from "react";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -113,7 +113,17 @@ export function TimeTicketModalComponent({
},
]}
>
<EmployeeSearchSelect options={employeeAutoCompleteOptions} />
<EmployeeSearchSelect
options={employeeAutoCompleteOptions}
onSelect={(value) => {
console.log(value);
const emps =
employeeAutoCompleteOptions &&
employeeAutoCompleteOptions.filter((e) => e.id === value)[0];
console.log(emps);
form.setFieldsValue({ flat_rate: emps && emps.flat_rate });
}}
/>
</Form.Item>
<Form.Item
shouldUpdate={(prev, cur) => prev.employeeid !== cur.employeeid}
@@ -140,6 +150,14 @@ export function TimeTicketModalComponent({
);
}}
</Form.Item>
<Form.Item
name="flat_rate"
label={t("timetickets.fields.flat_rate")}
valuePropName="checked"
>
<Switch />
</Form.Item>
</LayoutFormRow>
<LayoutFormRow>

View File

@@ -58,6 +58,7 @@ export const GET_LINE_TICKET_BY_PK = gql`
jobid
employeeid
memo
flat_rate
employee {
id
first_name

View File

@@ -318,6 +318,7 @@ export const QUERY_JOB_COSTING_DETAILS = gql`
cost_center
actualhrs
productivehrs
flat_rate
}
}
}

View File

@@ -14,6 +14,7 @@ export const QUERY_TICKETS_BY_JOBID = gql`
id
memo
jobid
flat_rate
employee {
employee_number
first_name
@@ -42,6 +43,7 @@ export const QUERY_TIME_TICKETS_IN_RANGE = gql`
productivehrs
memo
jobid
flat_rate
job {
id
ro_number
@@ -72,6 +74,7 @@ export const INSERT_NEW_TIME_TICKET = gql`
ciecacode
date
memo
flat_rate
}
}
}
@@ -98,6 +101,7 @@ export const UPDATE_TIME_TICKET = gql`
updated_at
jobid
date
flat_rate
memo
}
}
@@ -121,6 +125,7 @@ export const QUERY_ACTIVE_TIME_TICKETS = gql`
clockon
memo
cost_center
flat_rate
jobid
job {
id

View File

@@ -27,7 +27,7 @@ const middlewares = [
if (process.env.NODE_ENV === "development") {
middlewares.push(createLogger({ collapsed: true, diff: true }));
}
//middlewares.push(Tracker.use(trackerRedux()));
const composeEnhancers =
typeof window === "object" && window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__
? window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__({

View File

@@ -29,6 +29,7 @@ import {
} from "./user.actions";
import UserActionTypes from "./user.types";
import * as Sentry from "@sentry/browser";
import { tracker } from "../../App/App.container";
export function* onEmailSignInStart() {
yield takeLatest(UserActionTypes.EMAIL_SIGN_IN_START, signInWithEmail);
@@ -68,6 +69,7 @@ export function* isUserAuthenticated() {
}
LogRocket.identify(user.email);
tracker.setUserID(user.email);
yield put(
signInSuccess({
uid: user.uid,
@@ -169,6 +171,8 @@ export function* onSignInSuccess() {
export function* signInSuccessSaga({ payload }) {
LogRocket.identify(payload.email);
tracker.setUserID(payload.email);
try {
// window.$crisp.push(["set", "user:email", [payload.email]]);
console.log("$crisp set nickname", [payload.displayName || payload.email]);

View File

@@ -2160,6 +2160,7 @@
"date": "Ticket Date",
"efficiency": "Efficiency",
"employee": "Employee",
"flat_rate": "Flat Rate?",
"memo": "Memo",
"productivehrs": "Productive Hours",
"ro_number": "Job to Post Against"

View File

@@ -2160,6 +2160,7 @@
"date": "",
"efficiency": "",
"employee": "",
"flat_rate": "",
"memo": "",
"productivehrs": "",
"ro_number": ""

View File

@@ -2160,6 +2160,7 @@
"date": "",
"efficiency": "",
"employee": "",
"flat_rate": "",
"memo": "",
"productivehrs": "",
"ro_number": ""

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
- args:
cascade: false
read_only: false
sql: ALTER TABLE "public"."timetickets" DROP COLUMN "flat_rate";
type: run_sql

View File

@@ -0,0 +1,6 @@
- args:
cascade: false
read_only: false
sql: ALTER TABLE "public"."timetickets" ADD COLUMN "flat_rate" boolean NULL DEFAULT
false;
type: run_sql

View File

@@ -0,0 +1,39 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_insert_permission
- args:
permission:
check:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
set: {}
role: user
table:
name: timetickets
schema: public
type: create_insert_permission

View File

@@ -0,0 +1,40 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_insert_permission
- args:
permission:
check:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
set: {}
role: user
table:
name: timetickets
schema: public
type: create_insert_permission

View File

@@ -0,0 +1,40 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_select_permission
- args:
permission:
allow_aggregations: false
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
computed_fields: []
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
role: user
table:
name: timetickets
schema: public
type: create_select_permission

View File

@@ -0,0 +1,41 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_select_permission
- args:
permission:
allow_aggregations: false
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
computed_fields: []
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
role: user
table:
name: timetickets
schema: public
type: create_select_permission

View File

@@ -0,0 +1,39 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_update_permission
- args:
permission:
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
set: {}
role: user
table:
name: timetickets
schema: public
type: create_update_permission

View File

@@ -0,0 +1,40 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_update_permission
- args:
permission:
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
set: {}
role: user
table:
name: timetickets
schema: public
type: create_update_permission

View File

@@ -4097,6 +4097,7 @@ tables:
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
@@ -4116,6 +4117,7 @@ tables:
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
@@ -4144,6 +4146,7 @@ tables:
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo

View File

@@ -34,24 +34,13 @@ app.post("/sendemail", fb.validateFirebaseIdToken, sendEmail.sendEmail);
//Test route to ensure Express is responding.
app.get("/test", async function (req, res) {
logger.log("test-api", null, null, null);
const Commit = require("child_process").execSync(
const commit = require("child_process").execSync(
"git rev-parse --short HEAD"
);
logger.log("test-api-status", "DEBUG", "api", { commit });
res.status(200).send(`OK - ${Commit}`);
res.status(200).send(`OK - ${commit}`);
});
const test = require("./server/_test/test.js");
app.post("/test", test.testResponse);
//Accounting-IIF
const accountingIIF = require("./server/accounting/iif/iif");
app.post(
"/accounting/iif/receivables",
fb.validateFirebaseIdToken,
accountingIIF.receivables
);
//Accounting Qbxml
const accountQbxml = require("./server/accounting/qbxml/qbxml");
@@ -109,8 +98,8 @@ var scheduling = require("./server/scheduling/scheduling-job");
app.post("/scheduling/job", fb.validateFirebaseIdToken, scheduling.job);
//Handlebars Paths for Email/Report Rendering
var renderHandlebars = require("./server/render/renderHandlebars");
app.post("/render", fb.validateFirebaseIdToken, renderHandlebars.render);
// var renderHandlebars = require("./server/render/renderHandlebars");
// app.post("/render", fb.validateFirebaseIdToken, renderHandlebars.render);
var inlineCss = require("./server/render/inlinecss");
app.post("/render/inlinecss", fb.validateFirebaseIdToken, inlineCss.inlinecss);
@@ -168,7 +157,11 @@ const io = new Server(server, {
server.listen(port, (error) => {
if (error) throw error;
logger.log(`[${process.env.NODE_ENV}] Server running on port ${port}`);
logger.log(
`[${process.env.NODE_ENV || "DEVELOPMENT"}] Server running on port ${port}`,
"DEBUG",
"api"
);
});
exports.io = io;
require("./server/web-sockets/web-socket");

View File

@@ -1,74 +0,0 @@
const path = require("path");
const admin = require("../firebase/firebase-handler").admin;
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
exports.testResponse = async (req, res) => {
console.log("Test");
const uniqueTokens = [
"f7B-k-ceDNCEAIFYCfhF3M:APA91bEn-xOmUahCBMJBBDqXpVOZJnnb_qhWlo8eOPrIkvFeSc2nqaKd4D8zs3qqZ_VNgS_OhifsetJXcwtczO8N4k3xfDzCyI3i6j6YTUNK56QC-WNmVOLR2C_g-owy7hSvhGhWilZ3",
"eNdzsUqRBBZCM8LQKvqk6e:APA91bFgL0VQLf_TooYmHKQ7_b4H--ZmUYCdgiZpT7dxHSyEkpcCHUz33K7sKqgifUk8rMAEhSsHWa0TJgLbOJxWD6lJaGEpXn8G3PbunkJsJCNCA3CprMONylBr9d6hnQ5wnjUX2Gt6",
];
var message = {
notification: {
title: "Test Notification",
body: "Test Body",
//click_action: "TEST CLICK ACTION",
},
data: {
jobid: "1234",
title: "Test Notification",
body: "Test Body",
},
tokens: uniqueTokens,
// android: {
// notification: {
// body: "This is an FCM notification specifically for android.",
// title: "FCM Notification for Android",
// image: "/logo192.png",
// badge: "/logo192.png",
// },
// },
webpush: {
headers: {
// Urgency: "high",
},
notification: {
body: "This is a message from FCM to web",
requireInteraction: "true",
actions: [{ action: "the action - matched in sw", title: "Read" }],
// renotify: true,
//tag: "1234", image: "/logo192.png",
badge: "/logo192.png",
//badge: "/badge-icon.png",
},
},
};
// Send a message to the device corresponding to the provided
// registration token.
admin
.messaging()
.sendMulticast(message)
.then((response) => {
// Response is a message ID string.
console.log(
"[TEST] Successfully sent FCM Broadcast.:",
response
//JSON.stringify(response)
);
})
.catch((error) => {
console.log("Error sending message:", error);
});
res.status(200).send("OK");
};

View File

@@ -1,167 +0,0 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
exports.default = async (req, res) => {
const BearerToken = req.headers.authorization;
const { jobId } = req.body;
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
try {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, { id: jobId });
const { jobs_by_pk } = result;
const { bodyshop } = jobs_by_pk;
//Build the IIF file.
const response = [];
response.push(TRNS_HEADER);
response.push(
generateInvoiceHeader(jobs_by_pk, bodyshop.md_responsibility_centers.ar)
);
//Allocations
const invoice_allocation = jobs_by_pk.invoice_allocation;
Object.keys(invoice_allocation.partsAllocations).forEach(
(partsAllocationKey) => {
if (
!!!invoice_allocation.partsAllocations[partsAllocationKey].allocations
)
return;
invoice_allocation.partsAllocations[
partsAllocationKey
].allocations.forEach((alloc) => {
response.push(
generateInvoiceLine(
jobs_by_pk,
alloc,
bodyshop.md_responsibility_centers
)
);
});
}
);
Object.keys(invoice_allocation.labMatAllocations).forEach(
(AllocationKey) => {
if (!!!invoice_allocation.labMatAllocations[AllocationKey].allocations)
return;
invoice_allocation.labMatAllocations[AllocationKey].allocations.forEach(
(alloc) => {
response.push(
generateInvoiceLine(
jobs_by_pk,
alloc,
bodyshop.md_responsibility_centers
)
);
}
);
}
);
//End Allocations
//Taxes
const taxMapping = bodyshop.md_responsibility_centers.taxes;
const { federal_tax, state_tax, local_tax } = JSON.parse(
jobs_by_pk.job_totals
).totals;
const federal_tax_dinero = Dinero(federal_tax);
const state_tax_dinero = Dinero(state_tax);
const local_tax_dinero = Dinero(local_tax);
if (federal_tax_dinero.getAmount() > 0) {
response.push(
generateTaxLine(jobs_by_pk, federal_tax_dinero, "federal", taxMapping)
);
}
if (state_tax_dinero.getAmount() > 0) {
response.push(
generateTaxLine(jobs_by_pk, state_tax_dinero, "state", taxMapping)
);
}
if (local_tax_dinero.getAmount() > 0) {
response.push(
generateTaxLine(jobs_by_pk, local_tax_dinero, "local", taxMapping)
);
}
//End Taxes
response.push(END_TRNS);
//Prep the response and send it.
res.setHeader("Content-type", "application/octet-stream");
res.setHeader("Content-disposition", "attachment; filename=file.txt");
res.setHeader("filename", `${jobs_by_pk.ro_number}-RECEIVABLES.iif`);
res.send(response.join("\n"));
} catch (error) {
console.log("error", error);
res.status(400).send(JSON.stringify(error));
}
};
const TRNS_HEADER = `!TRNS TRNSID TRNSTYPE DATE ACCNT NAME CLASS AMOUNT DOCNUM MEMO CLEAR TOPRINT NAMEISTAXABLE ADDR1 ADDR2 ADDR3 ADDR4 DUEDATE TERMS OTHER1 PONUM
!SPL SPLID TRNSTYPE DATE ACCNT NAME CLASS AMOUNT DOCNUM MEMO CLEAR QNTY PRICE INVITEM PAYMETH TAXABLE VALADJ SERVICEDATE OTHER2 EXTRA
!ENDTRNS`;
const generateInvoiceHeader = (job, arMapping) =>
`TRNS INVOICE ${generateJobInvoiceDate(job)} ${arMapping.name} GUO DA Acct.# ${
job.ownerid
}:${job.ro_number} 0100 ${job.clm_total} ${job.ro_number} N N Y GUO DA Acct.# ${
job.ownr_id
}:${job.ro_number} ${job.ownr_addr1} ${job.ownr_city} ${job.ownr_st} ${
job.ownr_zip
} `;
const generateInvoiceLine = (job, allocation, responsibilityCenters) => {
const { amount, center } = allocation;
const DineroAmount = Dinero(amount);
const account = responsibilityCenters.profits.find(
(i) => i.name.toLowerCase() === center.toLowerCase()
);
if (!!!account) {
throw new Error(
`A matching account does not exist for the allocation. Center: ${center}`
);
}
return `SPL INVOICE ${generateJobInvoiceDate(job)} ${
account.accountname
} 0100 ${DineroAmount.multiply(-1).toFormat(DineroQbFormat)} ${job.ro_number} ${
account.accountdesc
} N ${DineroAmount.toFormat(DineroQbFormat)} ${account.accountitem} Y N `;
};
const generateTaxLine = (job, amount, type, taxMapping) => {
return `SPL INVOICE ${generateJobInvoiceDate(job)} ${
taxMapping[type].accountname
} ${taxMapping[type].accountdesc} 0100 ${amount
.multiply(-1)
.toFormat(DineroQbFormat)} ${job.ro_number} N ${taxMapping[type].rate.toFixed(
2
)}% ${taxMapping[type].accountitem} N N AUTOSTAX `;
};
const END_TRNS = `ENDTRNS`;
const generateJobInvoiceDate = (job) => {
return `${new Date(job.date_invoiced).getMonth() + 1}/${new Date(
job.date_invoiced
).getDate()}/${new Date(job.date_invoiced).getFullYear()}`;
};

View File

@@ -1 +0,0 @@
exports.receivables = require("./iif-receivables").default

View File

@@ -7,7 +7,6 @@ var builder = require("xmlbuilder2");
const QbXmlUtils = require("./qbxml-utils");
const moment = require("moment");
const logger = require("../../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
@@ -26,6 +25,13 @@ exports.default = async (req, res) => {
});
try {
logger.log(
"qbxml-payable-create",
"DEBUG",
req.user.email,
req.body.billsToQuery
);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {
@@ -43,15 +49,14 @@ exports.default = async (req, res) => {
});
//For each invoice.
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log(
"qbxml-payable-error",
"error",
req.body.user,
"ERROR",
req.user.email,
req.body.billsToQuery,
error
{ error }
);
res.status(400).send(JSON.stringify(error));
}
@@ -119,13 +124,6 @@ const generateBillLine = (billLine, responsibilityCenters, jobClass) => {
};
};
// [
// {
// AccountRef: { FullName: "BODY SHOP COST:SUBLET" },
// Amount: invoice.amount,
// },
// ],
const findTaxCode = (billLine, taxcode) => {
const {
applicable_taxes: { local, state, federal },

View File

@@ -29,6 +29,14 @@ exports.default = async (req, res) => {
});
try {
logger.log(
"qbxml-payments-create",
"DEBUG",
req.user.email,
req.body.paymentsToQuery,
null
);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
@@ -85,7 +93,7 @@ exports.default = async (req, res) => {
logger.log(
"qbxml-payments-error",
"error",
req.body.user,
req.user.email,
req.body.paymentsToQuery,
error
);

View File

@@ -29,6 +29,14 @@ exports.default = async (req, res) => {
});
try {
logger.log(
"qbxml-receivables-create",
"DEBUG",
req.user.email,
req.body.jobIds,
null
);
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, { ids: jobIds });
@@ -96,9 +104,9 @@ exports.default = async (req, res) => {
res.status(200).json(QbXmlToExecute);
} catch (error) {
logger.log(
"qbxml-payments-error",
"qbxml-receivables-error",
"error",
req.body.user,
req.user.email,
req.body.jobIds,
error
);
@@ -215,7 +223,7 @@ const generateInvoiceQbxml = (
jobs_by_pk.joblines.map((jobline) => {
//Parts Lines
if (jobline.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals calculations.
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
}
if (jobline.db_ref === "936007") {
@@ -304,7 +312,7 @@ const generateInvoiceQbxml = (
// console.log("Done creating hash", JSON.stringify(invoiceLineHash));
if (!hasMapaLine && jobs_by_pk.job_totals.rates.mapa.total.amount > 0) {
console.log("Adding MAPA Line Manually.");
// console.log("Adding MAPA Line Manually.");
const mapaAccountName = responsibilityCenters.defaults.profits.MAPA;
const mapaAccount = responsibilityCenters.profits.find(
@@ -329,7 +337,7 @@ const generateInvoiceQbxml = (
}
if (!hasMashLine && jobs_by_pk.job_totals.rates.mash.total.amount > 0) {
console.log("Adding MASH Line Manually.");
// console.log("Adding MASH Line Manually.");
const mashAccountName = responsibilityCenters.defaults.profits.MASH;
@@ -350,7 +358,7 @@ const generateInvoiceQbxml = (
},
});
} else {
console.log("NO MASH ACCOUNT FOUND!!");
// console.log("NO MASH ACCOUNT FOUND!!");
}
}

View File

@@ -4,7 +4,7 @@ const Dinero = require("dinero.js");
const moment = require("moment");
var builder = require("xmlbuilder2");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
@@ -25,7 +25,7 @@ const ftpSetup = {
port: process.env.AUTOHOUSE_PORT,
username: process.env.AUTOHOUSE_USER,
password: process.env.AUTOHOUSE_PASSWORD,
debug: console.log,
debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"],
},
@@ -33,14 +33,16 @@ const ftpSetup = {
exports.default = async (req, res) => {
//Query for the List of Bodyshop Clients.
console.log("Starting Autohouse datapump request.");
logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const allxmlsToUpload = [];
const allErrors = [];
try {
for (const bodyshop of bodyshops) {
console.log("Starting extract for ", bodyshop.shopname);
logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname,
});
const erroredJobs = [];
try {
const { jobs } = await client.request(queries.AUTOHOUSE_QUERY, {
@@ -60,12 +62,12 @@ exports.default = async (req, res) => {
},
};
console.log(
bodyshop.shopname,
"***Number of Failed jobs***: ",
erroredJobs.length,
JSON.stringify(erroredJobs.map((j) => j.job.ro_number))
);
if (erroredJobs.length > 0) {
logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
count: erroredJobs.length,
jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number)),
});
}
var ret = builder
.create(autoHouseObject, {
@@ -81,10 +83,15 @@ exports.default = async (req, res) => {
)}.xml`,
});
console.log("Finished extract for shop ", bodyshop.shopname);
logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname,
});
} catch (error) {
//Error at the shop level.
console.log("Error at shop level", bodyshop.shopname, error);
logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, {
error,
});
allErrors.push({
bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid,
@@ -102,7 +109,9 @@ exports.default = async (req, res) => {
let sftp = new Client();
sftp.on("error", (errors) =>
console.log("Error in FTP client", JSON.stringify(errors))
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
errors,
})
);
try {
//Connect to the FTP and upload all.
@@ -110,20 +119,24 @@ exports.default = async (req, res) => {
await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) {
console.log("Uploading", xmlObj.filename);
logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
});
const uploadResult = await sftp.put(
Buffer.from(xmlObj.xml),
`/${xmlObj.filename}`
);
console.log(
"🚀 ~ file: autohouse.js ~ line 94 ~ uploadResult",
uploadResult
);
logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
uploadResult,
});
}
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
} catch (error) {
console.log("Error when connecting to FTP", error);
logger.log("autohouse-sftp-error", "ERROR", "api", null, {
error,
});
} finally {
sftp.end();
}
@@ -498,7 +511,10 @@ const CreateRepairOrderTag = (job, errorCallback) => {
};
return ret;
} catch (error) {
console.log("Error calculating job", error);
logger.log("autohouse-job-calculate-error", "ERROR", "api", null, {
error,
});
errorCallback({ job, error });
}
};
@@ -510,7 +526,7 @@ const CreateCosts = (job) => {
//At the bill level.
bill_val.billlines.map((line_val) => {
//At the bill line level.
//console.log("JobCostingPartsTable -> line_val", line_val);
if (!bill_acc[line_val.cost_center])
bill_acc[line_val.cost_center] = Dinero();

View File

@@ -8,7 +8,7 @@ require("dotenv").config({
const axios = require("axios");
let nodemailer = require("nodemailer");
let aws = require("aws-sdk");
const logger = require("../utils/logger");
const ses = new aws.SES({
apiVersion: "2010-12-01",
region: "ca-central-1",
@@ -19,9 +19,13 @@ let transporter = nodemailer.createTransport({
});
exports.sendEmail = async (req, res) => {
if (process.env.NODE_ENV !== "production") {
console.log("[EMAIL] Incoming Message", req.body.from.name);
}
logger.log("send-email", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
});
let downloadedMedia = [];
if (req.body.media && req.body.media.length > 0) {
@@ -30,7 +34,14 @@ exports.sendEmail = async (req, res) => {
try {
return getImage(m);
} catch (error) {
console.log(error);
logger.log("send-email-error", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error,
});
}
})
);
@@ -73,10 +84,26 @@ exports.sendEmail = async (req, res) => {
(err, info) => {
console.log(err || info);
if (info) {
console.log("[EMAIL] Email sent: " + info);
logger.log("send-email-success", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
info,
});
res.json({ success: true, response: info });
} else {
console.log("[EMAIL] Email send failed. ", err);
logger.log("send-email-failure", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error: err,
});
res.json({ success: false, error: err });
}
}

View File

@@ -1,5 +1,5 @@
var admin = require("firebase-admin");
const logger = require("../utils/logger");
const path = require("path");
require("dotenv").config({
path: path.resolve(
@@ -26,8 +26,20 @@ const adminEmail = [
];
exports.updateUser = (req, res) => {
console.log("USer Requesting", req.user);
logger.log("admin-update-user", "WARN", req.user.email, null, {
request: req.body,
});
if (!adminEmail.includes(req.user.email)) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
request: req.body,
user: req.user,
}
);
res.sendStatus(404);
}
@@ -48,11 +60,16 @@ exports.updateUser = (req, res) => {
)
.then((userRecord) => {
// See the UserRecord reference doc for the contents of userRecord.
console.log("Successfully updated user", userRecord.toJSON());
logger.log("admin-update-user-success", "DEBUG", req.user.email, null, {
userRecord,
});
res.json(userRecord);
})
.catch((error) => {
console.log("Error updating user:", error);
logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error);
});
};
@@ -85,8 +102,6 @@ exports.sendNotification = (req, res) => {
};
exports.validateFirebaseIdToken = async (req, res, next) => {
console.log("Check if request is authorized with Firebase ID token");
if (
(!req.headers.authorization ||
!req.headers.authorization.startsWith("Bearer ")) &&
@@ -112,7 +127,10 @@ exports.validateFirebaseIdToken = async (req, res, next) => {
} else {
// No cookie
console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie",
});
res.status(403).send("Unauthorized");
return;
}
@@ -124,7 +142,12 @@ exports.validateFirebaseIdToken = async (req, res, next) => {
next();
return;
} catch (error) {
console.error("Error while verifying Firebase ID token:", error);
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "unauthroized",
error,
});
res.status(403).send("Unauthorized");
return;
}

View File

@@ -751,6 +751,7 @@ exports.QUERY_JOB_COSTING_DETAILS = ` query QUERY_JOB_COSTING_DETAILS($id: uuid!
ca_customer_gst
joblines(where: { removed: { _eq: false } }) {
id
db_ref
unq_seq
line_ind
tax_part
@@ -790,6 +791,7 @@ exports.QUERY_JOB_COSTING_DETAILS = ` query QUERY_JOB_COSTING_DETAILS($id: uuid!
cost_center
actualhrs
productivehrs
flat_rate
}
bodyshop{
id
@@ -852,6 +854,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
ca_customer_gst
joblines(where: {removed: {_eq: false}}) {
id
db_ref
unq_seq
line_ind
tax_part
@@ -891,6 +894,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
cost_center
actualhrs
productivehrs
flat_rate
}
bodyshop {
id

View File

@@ -3,16 +3,16 @@ const queries = require("../graphql-client/queries");
//const client = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
async function JobCosting(req, res) {
const { jobid } = req.body;
console.time("Query for Data");
const BearerToken = req.headers.authorization;
const BearerToken = req.headers.authorization;
logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
@@ -25,15 +25,16 @@ async function JobCosting(req, res) {
.request(queries.QUERY_JOB_COSTING_DETAILS, {
id: jobid,
});
console.timeEnd("querydata");
console.time(`generatecostingdata-${resp.jobs_by_pk.id}`);
const ret = GenerateCostingData(resp.jobs_by_pk);
console.timeEnd(`generatecostingdata-${resp.jobs_by_pk.id}`);
res.status(200).json(ret);
} catch (error) {
console.log("error", error);
logger.log("job-costing-error", "ERROR", req.user.email, jobid, {
jobid,
error,
});
res.status(400).send(JSON.stringify(error));
}
}
@@ -41,7 +42,8 @@ async function JobCosting(req, res) {
async function JobCostingMulti(req, res) {
const { jobids } = req.body;
const BearerToken = req.headers.authorization;
console.time("JobCostingMultiQueryExecution");
logger.log("job-costing-multi-start", "DEBUG", req.user.email, jobids, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
@@ -78,12 +80,8 @@ async function JobCostingMulti(req, res) {
const ret = {};
resp.jobs.map((job) => {
console.time(`CostingData-${job.id}`);
const costingData = GenerateCostingData(job);
ret[job.id] = costingData;
console.timeEnd(`CostingData-${job.id}`);
console.time(`SummaryOfCostingData-${job.id}`);
//Merge on a cost center basis.
@@ -165,7 +163,6 @@ async function JobCostingMulti(req, res) {
costingData.summaryData.totalPartsGp
);
console.timeEnd(`SummaryOfCostingData-${job.id}`);
//Take the summary data & add it to total summary data.
});
@@ -220,15 +217,16 @@ async function JobCostingMulti(req, res) {
//Calculate thte total gross profit percentages.
console.timeEnd("JobCostingMultiQueryExecution");
res.status(200).json({
allCostCenterData: finalCostingdata,
allSummaryData: multiSummary.summaryData,
data: ret,
});
} catch (error) {
console.log("error", error);
logger.log("job-costing-multi-error", "ERROR", req.user.email, [jobids], {
jobids,
error,
});
res.status(400).send(JSON.stringify(error));
}
}
@@ -242,12 +240,22 @@ function GenerateCostingData(job) {
);
const materialsHours = { mapaHrs: 0, mashHrs: 0 };
let hasMapaLine = false;
let hasMashLine = false;
//Massage the data.
const jobLineTotalsByProfitCenter =
job &&
job.joblines.reduce(
(acc, val) => {
//Parts Lines
if (val.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
}
if (val.db_ref === "936007") {
hasMashLine = true;
}
if (val.mod_lbr_ty) {
const laborProfitCenter =
val.profitcenter_labor || defaultProfits[val.mod_lbr_ty] || "?";
@@ -265,32 +273,11 @@ function GenerateCostingData(job) {
acc.labor[laborProfitCenter].add(laborAmount);
if (val.mod_lbr_ty === "LAR") {
if (!acc.parts[defaultProfits["MAPA"]])
acc.parts[defaultProfits["MAPA"]] = Dinero();
materialsHours.mapaHrs += val.mod_lb_hrs || 0;
acc.parts[defaultProfits["MAPA"]] = acc.parts[
defaultProfits["MAPA"]
].add(
Dinero({
amount: Math.round((job.rate_mapa || 0) * 100),
}).multiply(val.mod_lb_hrs || 0)
);
}
if (!acc.parts[defaultProfits["MASH"]])
acc.parts[defaultProfits["MASH"]] = Dinero();
if (val.mod_lbr_ty !== "LAR") {
acc.parts[defaultProfits["MASH"]] = acc.parts[
defaultProfits["MASH"]
].add(
Dinero({
amount: Math.round((job.rate_mash || 0) * 100),
}).multiply(val.mod_lb_hrs || 0)
);
materialsHours.mashHrs += val.mod_lb_hrs || 0;
}
//If labor line, add to paint and shop materials.
}
if (val.part_type && val.part_type !== "PAE") {
@@ -358,6 +345,27 @@ function GenerateCostingData(job) {
{ parts: {}, labor: {} }
);
if (!hasMapaLine) {
if (!jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]])
jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]] = Dinero();
jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]] =
jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]].add(
Dinero({
amount: Math.round((job.rate_mapa || 0) * 100),
}).multiply(materialsHours.mapaHrs || 0)
);
}
if (!hasMashLine) {
if (!jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]])
jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]] = Dinero();
jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]] =
jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]].add(
Dinero({
amount: Math.round((job.rate_mash || 0) * 100),
}).multiply(materialsHours.mashHrs || 0)
);
}
const billTotalsByCostCenters = job.bills.reduce((bill_acc, bill_val) => {
//At the bill level.
bill_val.billlines.map((line_val) => {
@@ -437,7 +445,11 @@ function GenerateCostingData(job) {
].add(
Dinero({
amount: Math.round((ticket_val.rate || 0) * 100),
}).multiply(ticket_val.actualhrs || ticket_val.productivehrs || 0)
}).multiply(
ticket_val.flat_rate
? ticket_val.productivehrs || ticket_val.actualhrs || 0
: ticket_val.actualhrs || ticket_val.productivehrs || 0
) //Should base this on the employee.
);
return ticket_acc;
@@ -603,17 +615,14 @@ const formatGpPercent = (gppercent) => {
//Verify that this stays in line with jobs-close-auto-allocate logic from the application.
const getAdditionalCostCenter = (jl, profitCenters) => {
console.log("Checking additional cost center", jl.line_desc);
if (!jl.part_type && !jl.mod_lbr_ty) {
const lineDesc = jl.line_desc ? jl.line_desc.toLowerCase() : "";
//This logic is covered prior and assigned based on the labor type of the lines
// if (lineDesc.includes("shop materials")) {
// return profitCenters["MASH"];
// } else if (lineDesc.includes("paint/materials")) {
// return profitCenters["MAPA"];
// } else
//End covered logic
if (lineDesc.includes("ats amount")) {
const lineDesc = jl.line_desc.toLowerCase();
if (lineDesc.includes("shop mat")) {
return profitCenters["MASH"];
} else if (lineDesc.includes("paint/mat")) {
return profitCenters["MAPA"];
} else if (lineDesc.includes("ats amount")) {
return profitCenters["ATS"];
} else {
return null;

View File

@@ -1,7 +1,7 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
@@ -9,7 +9,7 @@ Dinero.globalRoundingMode = "HALF_EVEN";
exports.totalsSsu = async function (req, res) {
const BearerToken = req.headers.authorization;
const { id } = req.body;
logger.log("job-totals-ssu", "DEBUG", req.user.email, id, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
@@ -43,7 +43,10 @@ exports.totalsSsu = async function (req, res) {
res.status(200).send();
} catch (error) {
console.log(error);
logger.log("job-totals-ssu-error", "ERROR", req.user.email, id, {
jobid: id,
error,
});
res.status(503).send();
}
};
@@ -51,9 +54,6 @@ exports.totalsSsu = async function (req, res) {
//IMPORTANT*** These two functions MUST be mirrrored.
async function TotalsServerSide(req, res) {
const { job } = req.body;
console.log(
`Calculating Job Totals on the server side for ${job.id} - ${job.ro_number}`
);
try {
let ret = {
parts: CalculatePartsTotals(job.joblines),
@@ -64,14 +64,20 @@ async function TotalsServerSide(req, res) {
return ret;
} catch (error) {
console.log("error", error);
logger.log("job-totals-ssu-error", "ERROR", req.user.email, job.id, {
jobid: job.id,
error,
});
res.status(400).send(JSON.stringify(error));
}
}
async function Totals(req, res) {
const { job } = req.body;
console.log(`Calculating Job Totals for ${job.id} - ${job.ro_number}`);
logger.log("job-totals", "DEBUG", req.user.email, job.id, {
jobid: job.id,
});
try {
let ret = {
parts: CalculatePartsTotals(job.joblines),
@@ -82,7 +88,10 @@ async function Totals(req, res) {
res.status(200).json(ret);
} catch (error) {
console.log("error", error);
logger.log("job-totals-error", "ERROR", req.user.email, job.id, {
jobid: job.id,
error,
});
res.status(400).send(JSON.stringify(error));
}
}
@@ -157,7 +166,28 @@ function CalculateRatesTotals(ratesList) {
},
};
//Determine if there are MAPA and MASH lines already on the estimate.
//If there are, don't do anything extra (mitchell estimate)
//Otherwise, calculate them and add them to the default MAPA and MASH centers.
let hasMapaLine = false;
let hasMashLine = false;
jobLines.forEach((item) => {
//IO-1317 Use the lines on the estimate if they exist instead.
if (item.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
ret["mapa"].total = Dinero({
amount: Math.round((item.act_price || 0) * 100),
});
}
if (item.db_ref === "936007") {
hasMashLine = true;
ret["mash"].total = Dinero({
amount: Math.round((item.act_price || 0) * 100),
});
}
if (item.mod_lbr_ty) {
//There's a labor type, assign the hours.
ret[item.mod_lbr_ty.toLowerCase()].hours =
@@ -173,11 +203,22 @@ function CalculateRatesTotals(ratesList) {
let subtotal = Dinero({ amount: 0 });
let rates_subtotal = Dinero({ amount: 0 });
for (const property in ret) {
ret[property].total = Dinero({
amount: Math.round((ret[property].rate || 0) * 100),
}).multiply(ret[property].hours);
//Skip calculating mapa and mash if we got the amounts.
if (
!(
(property === "mapa" && hasMapaLine) ||
(property === "mash" && hasMashLine)
)
) {
ret[property].total = Dinero({
amount: Math.round((ret[property].rate || 0) * 100),
}).multiply(ret[property].hours);
}
subtotal = subtotal.add(ret[property].total);
if (property !== "mapa" && property !== "mash")
rates_subtotal = rates_subtotal.add(ret[property].total);
}
@@ -363,7 +404,8 @@ function CalculateTaxesTotals(job, otherTotals) {
job.joblines
.filter((jl) => !jl.removed)
.forEach((val) => {
if (!val.tax_part || (!val.part_type && IsAdditionalCost(val))) {
if (!val.tax_part) return;
if (!val.part_type && IsAdditionalCost(val)) {
additionalItemsTax = additionalItemsTax.add(
Dinero({ amount: Math.round((val.act_price || 0) * 100) })
.multiply(val.part_qty || 0)

View File

@@ -1,5 +1,7 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
@@ -11,8 +13,7 @@ var cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL);
exports.createSignedUploadURL = (req, res) => {
console.log("Request to create signed upload URL for Cloudinary.", req.body);
logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send(
cloudinary.utils.api_sign_request(
req.body,
@@ -23,6 +24,7 @@ exports.createSignedUploadURL = (req, res) => {
exports.downloadFiles = (req, res) => {
const { ids } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const url = cloudinary.utils.download_zip_url({
public_ids: ids,
@@ -34,7 +36,8 @@ exports.downloadFiles = (req, res) => {
exports.deleteFiles = async (req, res) => {
const { ids } = req.body;
const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
console.log("🚀 ~ file: media.js ~ line 28 ~ types", types);
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
const returns = [];
if (types.image) {
@@ -65,16 +68,15 @@ exports.deleteFiles = async (req, res) => {
)
);
}
console.log("🚀 ~ file: media.js ~ line 40 ~ returns", returns);
res.send(returns);
};
exports.renameKeys = async (req, res) => {
const { documents } = req.body;
//{id: "", from: "", to:""}
logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
const proms = [];
console.log("Documents", documents);
documents.forEach((d) => {
proms.push(
(async () => {

View File

@@ -5,23 +5,25 @@ require("dotenv").config({
`.env.${process.env.NODE_ENV || "development"}`
),
});
const logger = require("../utils/logger");
const inlineCssTool = require("inline-css");
exports.inlinecss = (req, res) => {
//Perform request validation
console.log("[CSS] New Inline CSS Request.");
logger.log("email-inline-css", "DEBUG", req.user.email, null, null);
const { html, url } = req.body;
inlineCssTool(html, { url: url })
.then((inlinedHtml) => {
console.log("Inline success.");
res.send(inlinedHtml);
})
.catch((error) => {
console.log("Error while inlining CSS", JSON.stringify(error));
logger.log("email-inline-css-error", "ERROR", req.user.email, null, {
error,
});
res.send(error);
});
};

View File

@@ -1,212 +0,0 @@
// const path = require("path");
// const moment = require("moment");
// require("dotenv").config({
// path: path.resolve(
// process.cwd(),
// `.env.${process.env.NODE_ENV || "development"}`
// ),
// });
// var _ = require("lodash");
// const Handlebars = require("handlebars");
// const phone = require("phone");
// var Dinero = require("dinero.js");
// Dinero.defaultCurrency = "CAD";
// Dinero.globalLocale = "en-CA";
// //Usage: {{moment appointments_by_pk.start format="dddd, DD MMMM YYYY"}}
// Handlebars.registerHelper("round", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// try {
// return context.toFixed(2);
// } catch {
// return context;
// }
// });
// Handlebars.registerHelper("dinerof", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// var amount = Dinero(context);
// if (context) {
// return amount.toFormat();
// }
// return "";
// });
// Handlebars.registerHelper("phonef", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// var ph = phone(context)[0];
// if (context) {
// return ph;
// }
// return "";
// });
// Handlebars.registerHelper("partType", function (context, block) {
// if (!context) return "";
// switch (context.toUpperCase()) {
// case "PAA":
// return "Aftermarket";
// case "PAE":
// return "Existing";
// case "PAN":
// return "OEM";
// case "PAO":
// return "Other";
// case "PAS":
// return "Sublet";
// case "PASL":
// return "Sublet";
// case "PAL":
// return "LKQ";
// case "PAM":
// return "Remanufactured";
// case "PAC":
// return "Chrome";
// case "PAP":
// return "OEM Partial";
// case "PAR":
// return "Record";
// default:
// return context;
// }
// });
// Handlebars.registerHelper("lbrType", function (context, block) {
// if (!context) return "";
// switch (context.toUpperCase()) {
// case "LAA":
// return "Aluminum";
// case "LAB":
// return "Body";
// case "LAD":
// return "Diagnostic";
// case "LAF":
// return "Frame";
// case "LAG":
// return "Glass";
// case "LAM":
// return "Mechanical";
// case "LAR":
// return "Refinish";
// case "LAS":
// return "Structural";
// case "LAU":
// return "Detail";
// default:
// return context;
// }
// });
// Handlebars.registerHelper("objectKeys", function (obj, block) {
// var accum = "";
// obj &&
// Object.keys(obj).map((key) => {
// accum += block.fn({ key, value: obj[key] });
// });
// return accum;
// });
// Handlebars.registerHelper("dinero", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// var amount = Dinero({
// amount: Math.round((context || 0) * 100),
// currency: "CAD",
// });
// return amount.toFormat();
// });
// Handlebars.registerHelper("moment", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// if (!!!context) return "";
// var date = moment(context);
// if (block.hash.timezone) {
// date.tz(block.hash.timezone);
// }
// var hasFormat = false;
// // Reset the language back to default before doing anything else
// date.locale("en");
// for (var i in block.hash) {
// if (i === "format") {
// hasFormat = true;
// } else if (date[i]) {
// date = date[i](block.hash[i]);
// } else {
// console.log('moment.js does not support "' + i + '"');
// }
// }
// if (hasFormat) {
// date = date.format(block.hash.format);
// }
// return date;
// });
// Handlebars.registerHelper("duration", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = 0;
// }
// var duration = moment.duration(context);
// var hasFormat = false;
// // Reset the language back to default before doing anything else
// duration = duration.lang("en");
// for (var i in block.hash) {
// if (i === "format") {
// hasFormat = true;
// } else if (duration[i]) {
// duration = duration[i](block.hash[i]);
// } else {
// console.log('moment.js duration does not support "' + i + '"');
// }
// }
// if (hasFormat) {
// duration = duration.format(block.hash.format);
// }
// return duration;
// });
exports.render = (req, res) => {
// //Perform request validation
// let view;
// console.log("[HJS Render] New Render Request.");
// //console.log("[HJS Render] Context", req.body.context);
// if (req.body.context.bodyshop.template_header) {
// console.log("[HJS Render] Including Header");
// //view = req.body.view;
// view = `${req.body.context.bodyshop.template_header}${req.body.view}`;
// } else {
// console.log("[HJS Render] No header to include.");
// view = req.body.view;
// }
// var template = Handlebars.compile(view);
// res.send(template(req.body.context));
};

View File

@@ -3,7 +3,7 @@ const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
@@ -12,10 +12,10 @@ require("dotenv").config({
});
exports.job = async (req, res) => {
const BearerToken = req.headers.authorization;
const { jobId } = req.body;
try {
const BearerToken = req.headers.authorization;
const { jobId } = req.body;
console.log("exports.job -> jobId", jobId);
logger.log("smart-scheduling-start", "DEBUG", req.user.email, jobId, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
@@ -133,10 +133,12 @@ exports.job = async (req, res) => {
)
possibleDates.push(new Date(bmkey).toISOString().substr(0, 10));
});
console.log("possibleDates", possibleDates, "bucketMatrix", bucketMatrix);
res.json(possibleDates);
} catch (error) {
console.log("error", error);
logger.log("smart-scheduling-error", "ERROR", req.user.email, jobId, {
error,
});
res.status(400).send(error);
}
};

View File

@@ -10,16 +10,29 @@ const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const { phone } = require("phone");
const admin = require("../firebase/firebase-handler").admin;
const logger = require("../utils/logger");
exports.receive = (req, res) => {
//Perform request validation
console.log("[SMS Receive] Inbound Twilio Message.", req.body.SmsMessageSid);
console.log("req.body", req.body);
logger.log("sms-inbound", "DEBUG", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
});
if (
!!!req.body ||
!!!req.body.MessagingServiceSid ||
!!!req.body.SmsMessageSid
) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
type: "malformed-request",
});
res.status(400);
res.json({ success: false, error: "Malformed Request" });
} else {
@@ -29,8 +42,6 @@ exports.receive = (req, res) => {
phone: phone(req.body.From).phoneNumber,
})
.then((response) => {
console.log("re", req.body);
let newMessage = {
msid: req.body.SmsMessageSid,
text: req.body.Body,
@@ -55,10 +66,14 @@ exports.receive = (req, res) => {
response.bodyshops[0].conversations[0].id;
} else {
//We should never get here.
console.log(
"Massive Error: Duplicate Phone Numbers for MSSID: " +
req.body.MessagingServiceSid
);
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
type: "duplicate-phone",
});
}
client
@@ -67,6 +82,9 @@ exports.receive = (req, res) => {
conversationid: response.bodyshops[0].conversations[0].id,
})
.then((r2) => {
logger.log("sms-inbound-success", "DEBUG", "api", null, {
newMessage,
});
res.status(200).send("");
const arrayOfAllUserFcmTokens =
@@ -109,7 +127,15 @@ exports.receive = (req, res) => {
// });
})
.catch((e2) => {
console.log("e2", e2);
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
error: e2,
});
res.sendStatus(500).json(e2);
});
}

View File

@@ -9,7 +9,7 @@ require("dotenv").config({
const twilio = require("twilio");
const { phone } = require("phone");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const client = twilio(
process.env.TWILIO_AUTH_TOKEN,
process.env.TWILIO_AUTH_KEY
@@ -19,9 +19,21 @@ const gqlClient = require("../graphql-client/graphql-client").client;
exports.send = (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia } =
req.body;
console.log("[Sending Sms] " + conversationid + " | " + body);
logger.log("sms-outbound", "DEBUG", req.user.email, null, {
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src),
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : [],
});
if (!!to && !!messagingServiceSid && !!body && !!conversationid) {
console.log(phone(to));
client.messages
.create({
body: body,
@@ -46,40 +58,47 @@ exports.send = (req, res) => {
.request(queries.INSERT_MESSAGE, { msg: newMessage })
.then((r2) => {
//console.log("Responding GQL Message ID", JSON.stringify(r2));
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
conversationid,
});
res.sendStatus(200);
})
.catch((e2) => {
console.log("e2", e2);
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
msid: message.sid,
conversationid,
error: e2,
});
//res.json({ success: false, message: e2 });
});
})
.catch((e1) => {
//res.json({ success: false, message: error });
console.log("e1", e1);
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
conversationid,
error: e1,
});
});
} else {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
type: "missing-parameters",
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0
? selectedMedia.map((i) => i.src)
: [],
});
res
.status(400)
.json({ success: false, message: "Missing required parameter(s)." });
}
};
// //Image
// acc.push({
// src: `${process.env.REACT_APP_CLOUDINARY_ENDPOINT}/${DetermineFileType(
// value.type
// )}/upload/${value.key}`,
// thumbnail: `${
// process.env.REACT_APP_CLOUDINARY_ENDPOINT
// }/${DetermineFileType(value.type)}/upload/${
// process.env.REACT_APP_CLOUDINARY_THUMB_TRANSFORMATIONS
// }/${value.key}`,
// thumbnailHeight: 225,
// thumbnailWidth: 225,
// isSelected: false,
// key: value.key,
// extension: value.extension,
// id: value.id,
// type: value.type,
// tags: [{ value: value.type, title: value.type }],
// });

View File

@@ -9,6 +9,7 @@ require("dotenv").config({
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const { phone } = require("phone");
const logger = require("../utils/logger");
exports.status = (req, res) => {
const { SmsSid, SmsStatus } = req.body;
@@ -18,10 +19,17 @@ exports.status = (req, res) => {
fields: { status: SmsStatus },
})
.then((response) => {
console.log("Message Updated", JSON.stringify(response));
logger.log("sms-status-update", "DEBUG", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
});
})
.catch((error) => {
console.log("Error updating message status", error);
logger.log("sms-status-update-error", "ERROR", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
error,
});
});
res.sendStatus(200);
};

View File

@@ -1,7 +1,7 @@
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const path = require("path");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(
process.cwd(),
@@ -11,7 +11,7 @@ require("dotenv").config({
exports.techLogin = async (req, res) => {
const { shopid, employeeid, pin } = req.body;
logger.log("tech-console-login", "DEBUG", req.user.email, null, null);
try {
const result = await client.request(queries.QUERY_EMPLOYEE_PIN, {
shopId: shopid,
@@ -28,14 +28,23 @@ exports.techLogin = async (req, res) => {
delete dbRecord.pin;
technician = dbRecord;
} else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "wrong-pin",
});
error = "The employee ID and PIN combination are not correct.";
}
} else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "invalid-employee",
});
error = "The employee ID does not exist.";
}
res.json({ valid, technician, error });
} catch (error) {
console.log("error", error);
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
error,
});
res.status(400).send(error);
}
};

View File

@@ -7,7 +7,7 @@ const logger = new graylog2.graylog({
function log(message, type, user, record, object) {
console.log(message, {
type,
env: process.env.NODE_ENV,
env: process.env.NODE_ENV || "development",
user,
record,
...object,
@@ -22,4 +22,3 @@ function log(message, type, user, record, object) {
}
module.exports = { log };
//const logger = require("./server/utils/logger");

716
yarn.lock

File diff suppressed because it is too large Load Diff