Merged in release/2021-08-27 (pull request #191)

Release/2021 08 27
This commit is contained in:
Patrick Fic
2021-08-25 21:59:23 +00:00
51 changed files with 45402 additions and 1127 deletions

View File

@@ -1,4 +1,4 @@
<babeledit_project be_version="2.7.1" version="1.2"> <babeledit_project version="1.2" be_version="2.7.1">
<!-- <!--
BabelEdit project file BabelEdit project file
@@ -7594,6 +7594,27 @@
</concept_node> </concept_node>
</children> </children>
</folder_node> </folder_node>
<concept_node>
<name>emaillater</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node> <concept_node>
<name>employees</name> <name>employees</name>
<definition_loaded>false</definition_loaded> <definition_loaded>false</definition_loaded>
@@ -7825,6 +7846,27 @@
</translation> </translation>
</translations> </translations>
</concept_node> </concept_node>
<concept_node>
<name>printlater</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node> <concept_node>
<name>rbac</name> <name>rbac</name>
<definition_loaded>false</definition_loaded> <definition_loaded>false</definition_loaded>
@@ -35796,6 +35838,27 @@
</translation> </translation>
</translations> </translations>
</concept_node> </concept_node>
<concept_node>
<name>flat_rate</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node> <concept_node>
<name>memo</name> <name>memo</name>
<definition_loaded>false</definition_loaded> <definition_loaded>false</definition_loaded>

View File

@@ -1,9 +1,25 @@
// craco.config.js // craco.config.js
const TerserPlugin = require("terser-webpack-plugin"); const TerserPlugin = require("terser-webpack-plugin");
const CracoLessPlugin = require("craco-less"); const CracoLessPlugin = require("craco-less");
const SentryWebpackPlugin = require("@sentry/webpack-plugin");
module.exports = { module.exports = {
plugins: [ plugins: [
{
plugin: SentryWebpackPlugin,
options: {
// sentry-cli configuration
authToken:
"6b45b028a02342db97a9a2f92c0959058665443d379d4a3a876430009e744260",
org: "snapt-software",
project: "imexonline",
release: process.env.REACT_APP_GIT_SHA,
// webpack-specific configuration
include: ".",
ignore: ["node_modules", "webpack.config.js"],
},
},
{ {
plugin: CracoLessPlugin, plugin: CracoLessPlugin,
options: { options: {
@@ -53,4 +69,5 @@ module.exports = {
}, },
}), }),
}, },
devtool: "source-map",
}; };

43861
client/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -8,6 +8,10 @@
"@craco/craco": "^6.2.0", "@craco/craco": "^6.2.0",
"@fingerprintjs/fingerprintjs": "^3.2.0", "@fingerprintjs/fingerprintjs": "^3.2.0",
"@lourenci/react-kanban": "^2.1.0", "@lourenci/react-kanban": "^2.1.0",
"@openreplay/tracker": "^3.2.1",
"@openreplay/tracker-assist": "^3.0.3",
"@openreplay/tracker-graphql": "^3.0.0",
"@openreplay/tracker-redux": "^3.0.0",
"@sentry/react": "^6.10.0", "@sentry/react": "^6.10.0",
"@sentry/tracing": "^6.10.0", "@sentry/tracing": "^6.10.0",
"@stripe/react-stripe-js": "^1.4.0", "@stripe/react-stripe-js": "^1.4.0",
@@ -108,6 +112,7 @@
] ]
}, },
"devDependencies": { "devDependencies": {
"@sentry/webpack-plugin": "^1.17.1",
"redux-logger": "^3.0.6", "redux-logger": "^3.0.6",
"source-map-explorer": "^2.5.2" "source-map-explorer": "^2.5.2"
} }

View File

@@ -8,8 +8,28 @@ import { useTranslation } from "react-i18next";
import GlobalLoadingBar from "../components/global-loading-bar/global-loading-bar.component"; import GlobalLoadingBar from "../components/global-loading-bar/global-loading-bar.component";
import client from "../utils/GraphQLClient"; import client from "../utils/GraphQLClient";
import App from "./App"; import App from "./App";
import trackerGraphQL from "@openreplay/tracker-graphql";
//import trackerRedux from "@openreplay/tracker-redux";
import Tracker from "@openreplay/tracker";
//import trackerAssist from "@openreplay/tracker-assist";
moment.locale("en-US"); moment.locale("en-US");
export const tracker = new Tracker({
projectKey: "FPjeYIbwJyvhrVVTTLHo",
ingestPoint: "https://replay.bodyshop.app/ingest",
// ...(process.env.NODE_ENV === null || process.env.NODE_ENV === "development"
// ? { __DISABLE_SECURE_MODE: true }
// : {}),
// beaconSize: 10485760,
onStart: ({ sessionID }) =>
console.log(
"******** OpenReplay tracker started with session: ",
sessionID
),
});
//tracker.use(trackerAssist({ confirmText: "Confimr hep" })); // check the list of available options below
export const recordGraphQL = tracker.use(trackerGraphQL());
tracker.start();
if (process.env.NODE_ENV === "production") LogRocket.init("gvfvfw/bodyshopapp"); if (process.env.NODE_ENV === "production") LogRocket.init("gvfvfw/bodyshopapp");
export default function AppContainer() { export default function AppContainer() {

View File

@@ -83,8 +83,10 @@ export function ContractsList({
render: (text, record) => ( render: (text, record) => (
<Link to={`/manage/courtesycars/${record.courtesycar.id}`}>{`${ <Link to={`/manage/courtesycars/${record.courtesycar.id}`}>{`${
record.courtesycar.year record.courtesycar.year
} ${record.courtesycar.make} ${record.courtesycar.model} ${ } ${record.courtesycar.make} ${record.courtesycar.model}${
record.courtesycar.plate ? `(${record.courtesycar.plate})` : "" record.courtesycar.plate ? ` (${record.courtesycar.plate})` : ""
}${
record.courtesycar.fleetnumber ? ` (${record.courtesycar.fleetnumber})` : ""
}`}</Link> }`}</Link>
), ),
}, },

View File

@@ -97,6 +97,24 @@ export function JobsAvailableContainer({
}); });
return; return;
} }
//IO-539 Check for Parts Rate on PAL for SGI use case.
if (
estData.est_data.parts_tax_rates &&
estData.est_data.parts_tax_rates.PAL &&
(estData.est_data.parts_tax_rates.PAL.prt_tax_rt === null ||
estData.est_data.parts_tax_rates.PAL.prt_tax_rt === 0)
) {
console.log("checking");
const res = await confirmDialog(
`ImEX Online has detected that there is a missing tax rate for used parts. Pressing OK will set the tax rate to ${bodyshop.bill_tax_rates.state_tax_rate}%.`
);
if (res) {
estData.est_data.parts_tax_rates.PAL.prt_tax_rt =
bodyshop.bill_tax_rates.state_tax_rate / 100;
estData.est_data.parts_tax_rates.PAL.prt_tax_in = true;
}
}
const newTotals = ( const newTotals = (
await Axios.post("/job/totals", { await Axios.post("/job/totals", {
@@ -196,6 +214,24 @@ export function JobsAvailableContainer({
message: t("jobs.errors.creating", { error: "No job data present." }), message: t("jobs.errors.creating", { error: "No job data present." }),
}); });
} else { } else {
//IO-539 Check for Parts Rate on PAL for SGI use case.
if (
estData.est_data.parts_tax_rates &&
estData.est_data.parts_tax_rates.PAL &&
(estData.est_data.parts_tax_rates.PAL.prt_tax_rt === null ||
estData.est_data.parts_tax_rates.PAL.prt_tax_rt === 0)
) {
console.log("checking");
const res = await confirmDialog(
`ImEX Online has detected that there is a missing tax rate for used parts. Pressing OK will set the tax rate to ${bodyshop.bill_tax_rates.state_tax_rate}%.`
);
if (res) {
estData.est_data.parts_tax_rates.PAL.prt_tax_rt =
bodyshop.bill_tax_rates.state_tax_rate / 100;
estData.est_data.parts_tax_rates.PAL.prt_tax_in = true;
}
}
//create upsert job //create upsert job
let supp = replaceEmpty({ ...estData.est_data }); let supp = replaceEmpty({ ...estData.est_data });
@@ -388,3 +424,11 @@ function replaceEmpty(someObj, replaceValue = null) {
console.log("Parsed", JSON.parse(temp)); console.log("Parsed", JSON.parse(temp));
return JSON.parse(temp); return JSON.parse(temp);
} }
function confirmDialog(msg) {
return new Promise(function (resolve, reject) {
let confirmed = window.confirm(msg);
return confirmed ? resolve(true) : reject(false);
});
}

View File

@@ -106,6 +106,12 @@ export default function PartsOrderModalComponent({
label={t("parts_orders.fields.quantity")} label={t("parts_orders.fields.quantity")}
key={`${index}quantity`} key={`${index}quantity`}
name={[field.name, "quantity"]} name={[field.name, "quantity"]}
rules={[
{
required: true,
//message: t("general.validation.required"),
},
]}
> >
<InputNumber /> <InputNumber />
</Form.Item> </Form.Item>

View File

@@ -165,6 +165,20 @@ export default function ShopInfoGeneral({ form }) {
); );
}} }}
</Form.Item> </Form.Item>
<Form.Item
label={t("bodyshop.labels.printlater")}
valuePropName="checked"
name={["accountingconfig", "printlater"]}
>
<Switch />
</Form.Item>
<Form.Item
label={t("bodyshop.labels.emaillater")}
valuePropName="checked"
name={["accountingconfig", "emaillater"]}
>
<Switch />
</Form.Item>
<Form.Item <Form.Item
label={t("bodyshop.fields.inhousevendorid")} label={t("bodyshop.fields.inhousevendorid")}
name={"inhousevendorid"} name={"inhousevendorid"}

View File

@@ -1,5 +1,5 @@
import { useQuery } from "@apollo/client"; import { useQuery } from "@apollo/client";
import { Form, Input, InputNumber, Select } from "antd"; import { Form, Input, InputNumber, Select, Switch } from "antd";
import React from "react"; import React from "react";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { connect } from "react-redux"; import { connect } from "react-redux";
@@ -113,7 +113,17 @@ export function TimeTicketModalComponent({
}, },
]} ]}
> >
<EmployeeSearchSelect options={employeeAutoCompleteOptions} /> <EmployeeSearchSelect
options={employeeAutoCompleteOptions}
onSelect={(value) => {
console.log(value);
const emps =
employeeAutoCompleteOptions &&
employeeAutoCompleteOptions.filter((e) => e.id === value)[0];
console.log(emps);
form.setFieldsValue({ flat_rate: emps && emps.flat_rate });
}}
/>
</Form.Item> </Form.Item>
<Form.Item <Form.Item
shouldUpdate={(prev, cur) => prev.employeeid !== cur.employeeid} shouldUpdate={(prev, cur) => prev.employeeid !== cur.employeeid}
@@ -140,6 +150,14 @@ export function TimeTicketModalComponent({
); );
}} }}
</Form.Item> </Form.Item>
<Form.Item
name="flat_rate"
label={t("timetickets.fields.flat_rate")}
valuePropName="checked"
>
<Switch />
</Form.Item>
</LayoutFormRow> </LayoutFormRow>
<LayoutFormRow> <LayoutFormRow>

View File

@@ -58,6 +58,7 @@ export const GET_LINE_TICKET_BY_PK = gql`
jobid jobid
employeeid employeeid
memo memo
flat_rate
employee { employee {
id id
first_name first_name

View File

@@ -318,6 +318,7 @@ export const QUERY_JOB_COSTING_DETAILS = gql`
cost_center cost_center
actualhrs actualhrs
productivehrs productivehrs
flat_rate
} }
} }
} }

View File

@@ -14,6 +14,7 @@ export const QUERY_TICKETS_BY_JOBID = gql`
id id
memo memo
jobid jobid
flat_rate
employee { employee {
employee_number employee_number
first_name first_name
@@ -42,6 +43,7 @@ export const QUERY_TIME_TICKETS_IN_RANGE = gql`
productivehrs productivehrs
memo memo
jobid jobid
flat_rate
job { job {
id id
ro_number ro_number
@@ -72,6 +74,7 @@ export const INSERT_NEW_TIME_TICKET = gql`
ciecacode ciecacode
date date
memo memo
flat_rate
} }
} }
} }
@@ -98,6 +101,7 @@ export const UPDATE_TIME_TICKET = gql`
updated_at updated_at
jobid jobid
date date
flat_rate
memo memo
} }
} }
@@ -121,6 +125,7 @@ export const QUERY_ACTIVE_TIME_TICKETS = gql`
clockon clockon
memo memo
cost_center cost_center
flat_rate
jobid jobid
job { job {
id id

View File

@@ -27,7 +27,7 @@ const middlewares = [
if (process.env.NODE_ENV === "development") { if (process.env.NODE_ENV === "development") {
middlewares.push(createLogger({ collapsed: true, diff: true })); middlewares.push(createLogger({ collapsed: true, diff: true }));
} }
//middlewares.push(Tracker.use(trackerRedux()));
const composeEnhancers = const composeEnhancers =
typeof window === "object" && window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ typeof window === "object" && window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__
? window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__({ ? window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__({

View File

@@ -29,6 +29,7 @@ import {
} from "./user.actions"; } from "./user.actions";
import UserActionTypes from "./user.types"; import UserActionTypes from "./user.types";
import * as Sentry from "@sentry/browser"; import * as Sentry from "@sentry/browser";
import { tracker } from "../../App/App.container";
export function* onEmailSignInStart() { export function* onEmailSignInStart() {
yield takeLatest(UserActionTypes.EMAIL_SIGN_IN_START, signInWithEmail); yield takeLatest(UserActionTypes.EMAIL_SIGN_IN_START, signInWithEmail);
@@ -68,6 +69,7 @@ export function* isUserAuthenticated() {
} }
LogRocket.identify(user.email); LogRocket.identify(user.email);
tracker.setUserID(user.email);
yield put( yield put(
signInSuccess({ signInSuccess({
uid: user.uid, uid: user.uid,
@@ -169,6 +171,8 @@ export function* onSignInSuccess() {
export function* signInSuccessSaga({ payload }) { export function* signInSuccessSaga({ payload }) {
LogRocket.identify(payload.email); LogRocket.identify(payload.email);
tracker.setUserID(payload.email);
try { try {
// window.$crisp.push(["set", "user:email", [payload.email]]); // window.$crisp.push(["set", "user:email", [payload.email]]);
console.log("$crisp set nickname", [payload.displayName || payload.email]); console.log("$crisp set nickname", [payload.displayName || payload.email]);

View File

@@ -477,6 +477,7 @@
"cdk_dealerid": "CDK Dealer ID", "cdk_dealerid": "CDK Dealer ID",
"title": "DMS" "title": "DMS"
}, },
"emaillater": "Email Later",
"employees": "Employees", "employees": "Employees",
"insurancecos": "Insurance Companies", "insurancecos": "Insurance Companies",
"intakechecklist": "Intake Checklist", "intakechecklist": "Intake Checklist",
@@ -488,6 +489,7 @@
"notespresets": "Notes Presets", "notespresets": "Notes Presets",
"orderstatuses": "Order Statuses", "orderstatuses": "Order Statuses",
"partslocations": "Parts Locations", "partslocations": "Parts Locations",
"printlater": "Print Later",
"rbac": "Role Based Access Control", "rbac": "Role Based Access Control",
"responsibilitycenters": { "responsibilitycenters": {
"costs": "Cost Centers", "costs": "Cost Centers",
@@ -2160,6 +2162,7 @@
"date": "Ticket Date", "date": "Ticket Date",
"efficiency": "Efficiency", "efficiency": "Efficiency",
"employee": "Employee", "employee": "Employee",
"flat_rate": "Flat Rate?",
"memo": "Memo", "memo": "Memo",
"productivehrs": "Productive Hours", "productivehrs": "Productive Hours",
"ro_number": "Job to Post Against" "ro_number": "Job to Post Against"

View File

@@ -477,6 +477,7 @@
"cdk_dealerid": "", "cdk_dealerid": "",
"title": "" "title": ""
}, },
"emaillater": "",
"employees": "", "employees": "",
"insurancecos": "", "insurancecos": "",
"intakechecklist": "", "intakechecklist": "",
@@ -488,6 +489,7 @@
"notespresets": "", "notespresets": "",
"orderstatuses": "", "orderstatuses": "",
"partslocations": "", "partslocations": "",
"printlater": "",
"rbac": "", "rbac": "",
"responsibilitycenters": { "responsibilitycenters": {
"costs": "", "costs": "",
@@ -2160,6 +2162,7 @@
"date": "", "date": "",
"efficiency": "", "efficiency": "",
"employee": "", "employee": "",
"flat_rate": "",
"memo": "", "memo": "",
"productivehrs": "", "productivehrs": "",
"ro_number": "" "ro_number": ""

View File

@@ -477,6 +477,7 @@
"cdk_dealerid": "", "cdk_dealerid": "",
"title": "" "title": ""
}, },
"emaillater": "",
"employees": "", "employees": "",
"insurancecos": "", "insurancecos": "",
"intakechecklist": "", "intakechecklist": "",
@@ -488,6 +489,7 @@
"notespresets": "", "notespresets": "",
"orderstatuses": "", "orderstatuses": "",
"partslocations": "", "partslocations": "",
"printlater": "",
"rbac": "", "rbac": "",
"responsibilitycenters": { "responsibilitycenters": {
"costs": "", "costs": "",
@@ -2160,6 +2162,7 @@
"date": "", "date": "",
"efficiency": "", "efficiency": "",
"employee": "", "employee": "",
"flat_rate": "",
"memo": "", "memo": "",
"productivehrs": "", "productivehrs": "",
"ro_number": "" "ro_number": ""

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
- args:
cascade: false
read_only: false
sql: ALTER TABLE "public"."timetickets" DROP COLUMN "flat_rate";
type: run_sql

View File

@@ -0,0 +1,6 @@
- args:
cascade: false
read_only: false
sql: ALTER TABLE "public"."timetickets" ADD COLUMN "flat_rate" boolean NULL DEFAULT
false;
type: run_sql

View File

@@ -0,0 +1,39 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_insert_permission
- args:
permission:
check:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
set: {}
role: user
table:
name: timetickets
schema: public
type: create_insert_permission

View File

@@ -0,0 +1,40 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_insert_permission
- args:
permission:
check:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
set: {}
role: user
table:
name: timetickets
schema: public
type: create_insert_permission

View File

@@ -0,0 +1,40 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_select_permission
- args:
permission:
allow_aggregations: false
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
computed_fields: []
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
role: user
table:
name: timetickets
schema: public
type: create_select_permission

View File

@@ -0,0 +1,41 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_select_permission
- args:
permission:
allow_aggregations: false
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
computed_fields: []
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
role: user
table:
name: timetickets
schema: public
type: create_select_permission

View File

@@ -0,0 +1,39 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_update_permission
- args:
permission:
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
set: {}
role: user
table:
name: timetickets
schema: public
type: create_update_permission

View File

@@ -0,0 +1,40 @@
- args:
role: user
table:
name: timetickets
schema: public
type: drop_update_permission
- args:
permission:
columns:
- actualhrs
- bodyshopid
- ciecacode
- clockoff
- clockon
- cost_center
- created_at
- date
- employeeid
- flat_rate
- id
- jobid
- memo
- productivehrs
- rate
- updated_at
filter:
bodyshop:
associations:
_and:
- user:
authid:
_eq: X-Hasura-User-Id
- active:
_eq: true
set: {}
role: user
table:
name: timetickets
schema: public
type: create_update_permission

View File

@@ -4097,6 +4097,7 @@ tables:
- created_at - created_at
- date - date
- employeeid - employeeid
- flat_rate
- id - id
- jobid - jobid
- memo - memo
@@ -4116,6 +4117,7 @@ tables:
- created_at - created_at
- date - date
- employeeid - employeeid
- flat_rate
- id - id
- jobid - jobid
- memo - memo
@@ -4144,6 +4146,7 @@ tables:
- created_at - created_at
- date - date
- employeeid - employeeid
- flat_rate
- id - id
- jobid - jobid
- memo - memo

View File

@@ -34,24 +34,13 @@ app.post("/sendemail", fb.validateFirebaseIdToken, sendEmail.sendEmail);
//Test route to ensure Express is responding. //Test route to ensure Express is responding.
app.get("/test", async function (req, res) { app.get("/test", async function (req, res) {
logger.log("test-api", null, null, null); const commit = require("child_process").execSync(
const Commit = require("child_process").execSync(
"git rev-parse --short HEAD" "git rev-parse --short HEAD"
); );
logger.log("test-api-status", "DEBUG", "api", { commit });
res.status(200).send(`OK - ${Commit}`); res.status(200).send(`OK - ${commit}`);
}); });
const test = require("./server/_test/test.js");
app.post("/test", test.testResponse);
//Accounting-IIF
const accountingIIF = require("./server/accounting/iif/iif");
app.post(
"/accounting/iif/receivables",
fb.validateFirebaseIdToken,
accountingIIF.receivables
);
//Accounting Qbxml //Accounting Qbxml
const accountQbxml = require("./server/accounting/qbxml/qbxml"); const accountQbxml = require("./server/accounting/qbxml/qbxml");
@@ -109,8 +98,8 @@ var scheduling = require("./server/scheduling/scheduling-job");
app.post("/scheduling/job", fb.validateFirebaseIdToken, scheduling.job); app.post("/scheduling/job", fb.validateFirebaseIdToken, scheduling.job);
//Handlebars Paths for Email/Report Rendering //Handlebars Paths for Email/Report Rendering
var renderHandlebars = require("./server/render/renderHandlebars"); // var renderHandlebars = require("./server/render/renderHandlebars");
app.post("/render", fb.validateFirebaseIdToken, renderHandlebars.render); // app.post("/render", fb.validateFirebaseIdToken, renderHandlebars.render);
var inlineCss = require("./server/render/inlinecss"); var inlineCss = require("./server/render/inlinecss");
app.post("/render/inlinecss", fb.validateFirebaseIdToken, inlineCss.inlinecss); app.post("/render/inlinecss", fb.validateFirebaseIdToken, inlineCss.inlinecss);
@@ -168,7 +157,11 @@ const io = new Server(server, {
server.listen(port, (error) => { server.listen(port, (error) => {
if (error) throw error; if (error) throw error;
logger.log(`[${process.env.NODE_ENV}] Server running on port ${port}`); logger.log(
`[${process.env.NODE_ENV || "DEVELOPMENT"}] Server running on port ${port}`,
"DEBUG",
"api"
);
}); });
exports.io = io; exports.io = io;
require("./server/web-sockets/web-socket"); require("./server/web-sockets/web-socket");

View File

@@ -1,74 +0,0 @@
const path = require("path");
const admin = require("../firebase/firebase-handler").admin;
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
exports.testResponse = async (req, res) => {
console.log("Test");
const uniqueTokens = [
"f7B-k-ceDNCEAIFYCfhF3M:APA91bEn-xOmUahCBMJBBDqXpVOZJnnb_qhWlo8eOPrIkvFeSc2nqaKd4D8zs3qqZ_VNgS_OhifsetJXcwtczO8N4k3xfDzCyI3i6j6YTUNK56QC-WNmVOLR2C_g-owy7hSvhGhWilZ3",
"eNdzsUqRBBZCM8LQKvqk6e:APA91bFgL0VQLf_TooYmHKQ7_b4H--ZmUYCdgiZpT7dxHSyEkpcCHUz33K7sKqgifUk8rMAEhSsHWa0TJgLbOJxWD6lJaGEpXn8G3PbunkJsJCNCA3CprMONylBr9d6hnQ5wnjUX2Gt6",
];
var message = {
notification: {
title: "Test Notification",
body: "Test Body",
//click_action: "TEST CLICK ACTION",
},
data: {
jobid: "1234",
title: "Test Notification",
body: "Test Body",
},
tokens: uniqueTokens,
// android: {
// notification: {
// body: "This is an FCM notification specifically for android.",
// title: "FCM Notification for Android",
// image: "/logo192.png",
// badge: "/logo192.png",
// },
// },
webpush: {
headers: {
// Urgency: "high",
},
notification: {
body: "This is a message from FCM to web",
requireInteraction: "true",
actions: [{ action: "the action - matched in sw", title: "Read" }],
// renotify: true,
//tag: "1234", image: "/logo192.png",
badge: "/logo192.png",
//badge: "/badge-icon.png",
},
},
};
// Send a message to the device corresponding to the provided
// registration token.
admin
.messaging()
.sendMulticast(message)
.then((response) => {
// Response is a message ID string.
console.log(
"[TEST] Successfully sent FCM Broadcast.:",
response
//JSON.stringify(response)
);
})
.catch((error) => {
console.log("Error sending message:", error);
});
res.status(200).send("OK");
};

View File

@@ -1,167 +0,0 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const path = require("path");
require("dotenv").config({
path: path.resolve(
process.cwd(),
`.env.${process.env.NODE_ENV || "development"}`
),
});
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
exports.default = async (req, res) => {
const BearerToken = req.headers.authorization;
const { jobId } = req.body;
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: {
Authorization: BearerToken,
},
});
try {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, { id: jobId });
const { jobs_by_pk } = result;
const { bodyshop } = jobs_by_pk;
//Build the IIF file.
const response = [];
response.push(TRNS_HEADER);
response.push(
generateInvoiceHeader(jobs_by_pk, bodyshop.md_responsibility_centers.ar)
);
//Allocations
const invoice_allocation = jobs_by_pk.invoice_allocation;
Object.keys(invoice_allocation.partsAllocations).forEach(
(partsAllocationKey) => {
if (
!!!invoice_allocation.partsAllocations[partsAllocationKey].allocations
)
return;
invoice_allocation.partsAllocations[
partsAllocationKey
].allocations.forEach((alloc) => {
response.push(
generateInvoiceLine(
jobs_by_pk,
alloc,
bodyshop.md_responsibility_centers
)
);
});
}
);
Object.keys(invoice_allocation.labMatAllocations).forEach(
(AllocationKey) => {
if (!!!invoice_allocation.labMatAllocations[AllocationKey].allocations)
return;
invoice_allocation.labMatAllocations[AllocationKey].allocations.forEach(
(alloc) => {
response.push(
generateInvoiceLine(
jobs_by_pk,
alloc,
bodyshop.md_responsibility_centers
)
);
}
);
}
);
//End Allocations
//Taxes
const taxMapping = bodyshop.md_responsibility_centers.taxes;
const { federal_tax, state_tax, local_tax } = JSON.parse(
jobs_by_pk.job_totals
).totals;
const federal_tax_dinero = Dinero(federal_tax);
const state_tax_dinero = Dinero(state_tax);
const local_tax_dinero = Dinero(local_tax);
if (federal_tax_dinero.getAmount() > 0) {
response.push(
generateTaxLine(jobs_by_pk, federal_tax_dinero, "federal", taxMapping)
);
}
if (state_tax_dinero.getAmount() > 0) {
response.push(
generateTaxLine(jobs_by_pk, state_tax_dinero, "state", taxMapping)
);
}
if (local_tax_dinero.getAmount() > 0) {
response.push(
generateTaxLine(jobs_by_pk, local_tax_dinero, "local", taxMapping)
);
}
//End Taxes
response.push(END_TRNS);
//Prep the response and send it.
res.setHeader("Content-type", "application/octet-stream");
res.setHeader("Content-disposition", "attachment; filename=file.txt");
res.setHeader("filename", `${jobs_by_pk.ro_number}-RECEIVABLES.iif`);
res.send(response.join("\n"));
} catch (error) {
console.log("error", error);
res.status(400).send(JSON.stringify(error));
}
};
const TRNS_HEADER = `!TRNS TRNSID TRNSTYPE DATE ACCNT NAME CLASS AMOUNT DOCNUM MEMO CLEAR TOPRINT NAMEISTAXABLE ADDR1 ADDR2 ADDR3 ADDR4 DUEDATE TERMS OTHER1 PONUM
!SPL SPLID TRNSTYPE DATE ACCNT NAME CLASS AMOUNT DOCNUM MEMO CLEAR QNTY PRICE INVITEM PAYMETH TAXABLE VALADJ SERVICEDATE OTHER2 EXTRA
!ENDTRNS`;
const generateInvoiceHeader = (job, arMapping) =>
`TRNS INVOICE ${generateJobInvoiceDate(job)} ${arMapping.name} GUO DA Acct.# ${
job.ownerid
}:${job.ro_number} 0100 ${job.clm_total} ${job.ro_number} N N Y GUO DA Acct.# ${
job.ownr_id
}:${job.ro_number} ${job.ownr_addr1} ${job.ownr_city} ${job.ownr_st} ${
job.ownr_zip
} `;
const generateInvoiceLine = (job, allocation, responsibilityCenters) => {
const { amount, center } = allocation;
const DineroAmount = Dinero(amount);
const account = responsibilityCenters.profits.find(
(i) => i.name.toLowerCase() === center.toLowerCase()
);
if (!!!account) {
throw new Error(
`A matching account does not exist for the allocation. Center: ${center}`
);
}
return `SPL INVOICE ${generateJobInvoiceDate(job)} ${
account.accountname
} 0100 ${DineroAmount.multiply(-1).toFormat(DineroQbFormat)} ${job.ro_number} ${
account.accountdesc
} N ${DineroAmount.toFormat(DineroQbFormat)} ${account.accountitem} Y N `;
};
const generateTaxLine = (job, amount, type, taxMapping) => {
return `SPL INVOICE ${generateJobInvoiceDate(job)} ${
taxMapping[type].accountname
} ${taxMapping[type].accountdesc} 0100 ${amount
.multiply(-1)
.toFormat(DineroQbFormat)} ${job.ro_number} N ${taxMapping[type].rate.toFixed(
2
)}% ${taxMapping[type].accountitem} N N AUTOSTAX `;
};
const END_TRNS = `ENDTRNS`;
const generateJobInvoiceDate = (job) => {
return `${new Date(job.date_invoiced).getMonth() + 1}/${new Date(
job.date_invoiced
).getDate()}/${new Date(job.date_invoiced).getFullYear()}`;
};

View File

@@ -1 +0,0 @@
exports.receivables = require("./iif-receivables").default

View File

@@ -7,7 +7,6 @@ var builder = require("xmlbuilder2");
const QbXmlUtils = require("./qbxml-utils"); const QbXmlUtils = require("./qbxml-utils");
const moment = require("moment"); const moment = require("moment");
const logger = require("../../utils/logger"); const logger = require("../../utils/logger");
require("dotenv").config({ require("dotenv").config({
path: path.resolve( path: path.resolve(
process.cwd(), process.cwd(),
@@ -26,6 +25,13 @@ exports.default = async (req, res) => {
}); });
try { try {
logger.log(
"qbxml-payable-create",
"DEBUG",
req.user.email,
req.body.billsToQuery
);
const result = await client const result = await client
.setHeaders({ Authorization: BearerToken }) .setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, { .request(queries.QUERY_BILLS_FOR_PAYABLES_EXPORT, {
@@ -43,15 +49,14 @@ exports.default = async (req, res) => {
}); });
//For each invoice. //For each invoice.
res.status(200).json(QbXmlToExecute); res.status(200).json(QbXmlToExecute);
} catch (error) { } catch (error) {
logger.log( logger.log(
"qbxml-payable-error", "qbxml-payable-error",
"error", "ERROR",
req.body.user, req.user.email,
req.body.billsToQuery, req.body.billsToQuery,
error { error }
); );
res.status(400).send(JSON.stringify(error)); res.status(400).send(JSON.stringify(error));
} }
@@ -119,13 +124,6 @@ const generateBillLine = (billLine, responsibilityCenters, jobClass) => {
}; };
}; };
// [
// {
// AccountRef: { FullName: "BODY SHOP COST:SUBLET" },
// Amount: invoice.amount,
// },
// ],
const findTaxCode = (billLine, taxcode) => { const findTaxCode = (billLine, taxcode) => {
const { const {
applicable_taxes: { local, state, federal }, applicable_taxes: { local, state, federal },

View File

@@ -29,6 +29,14 @@ exports.default = async (req, res) => {
}); });
try { try {
logger.log(
"qbxml-payments-create",
"DEBUG",
req.user.email,
req.body.paymentsToQuery,
null
);
const result = await client const result = await client
.setHeaders({ Authorization: BearerToken }) .setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_PAYMENTS_FOR_EXPORT, { .request(queries.QUERY_PAYMENTS_FOR_EXPORT, {
@@ -85,7 +93,7 @@ exports.default = async (req, res) => {
logger.log( logger.log(
"qbxml-payments-error", "qbxml-payments-error",
"error", "error",
req.body.user, req.user.email,
req.body.paymentsToQuery, req.body.paymentsToQuery,
error error
); );

View File

@@ -29,6 +29,14 @@ exports.default = async (req, res) => {
}); });
try { try {
logger.log(
"qbxml-receivables-create",
"DEBUG",
req.user.email,
req.body.jobIds,
null
);
const result = await client const result = await client
.setHeaders({ Authorization: BearerToken }) .setHeaders({ Authorization: BearerToken })
.request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, { ids: jobIds }); .request(queries.QUERY_JOBS_FOR_RECEIVABLES_EXPORT, { ids: jobIds });
@@ -96,9 +104,9 @@ exports.default = async (req, res) => {
res.status(200).json(QbXmlToExecute); res.status(200).json(QbXmlToExecute);
} catch (error) { } catch (error) {
logger.log( logger.log(
"qbxml-payments-error", "qbxml-receivables-error",
"error", "error",
req.body.user, req.user.email,
req.body.jobIds, req.body.jobIds,
error error
); );
@@ -114,13 +122,13 @@ const generateSourceCustomerQbxml = (jobs_by_pk, bodyshop) => {
CustomerAddRq: { CustomerAddRq: {
CustomerAdd: { CustomerAdd: {
Name: jobs_by_pk.ins_co_nm, Name: jobs_by_pk.ins_co_nm,
BillAddress: { // BillAddress: {
Addr1: jobs_by_pk.ownr_addr1, // Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2, // Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city, // City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st, // State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownrzip, // PostalCode: jobs_by_pk.ownr_zip,
}, // },
}, },
}, },
}, },
@@ -173,6 +181,25 @@ const generateJobQbxml = (
FullName: ParentRefName, FullName: ParentRefName,
} }
: null, : null,
...(tierLevel === 3
? {
BillAddress: {
Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
ShipAddress: {
Addr1: jobs_by_pk.ownr_addr1,
Addr2: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
Email: jobs_by_pk.ownr_ea,
}
: {}),
}, },
}, },
}, },
@@ -215,7 +242,7 @@ const generateInvoiceQbxml = (
jobs_by_pk.joblines.map((jobline) => { jobs_by_pk.joblines.map((jobline) => {
//Parts Lines //Parts Lines
if (jobline.db_ref === "936008") { if (jobline.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals calculations. //If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true; hasMapaLine = true;
} }
if (jobline.db_ref === "936007") { if (jobline.db_ref === "936007") {
@@ -304,7 +331,7 @@ const generateInvoiceQbxml = (
// console.log("Done creating hash", JSON.stringify(invoiceLineHash)); // console.log("Done creating hash", JSON.stringify(invoiceLineHash));
if (!hasMapaLine && jobs_by_pk.job_totals.rates.mapa.total.amount > 0) { if (!hasMapaLine && jobs_by_pk.job_totals.rates.mapa.total.amount > 0) {
console.log("Adding MAPA Line Manually."); // console.log("Adding MAPA Line Manually.");
const mapaAccountName = responsibilityCenters.defaults.profits.MAPA; const mapaAccountName = responsibilityCenters.defaults.profits.MAPA;
const mapaAccount = responsibilityCenters.profits.find( const mapaAccount = responsibilityCenters.profits.find(
@@ -329,7 +356,7 @@ const generateInvoiceQbxml = (
} }
if (!hasMashLine && jobs_by_pk.job_totals.rates.mash.total.amount > 0) { if (!hasMashLine && jobs_by_pk.job_totals.rates.mash.total.amount > 0) {
console.log("Adding MASH Line Manually."); // console.log("Adding MASH Line Manually.");
const mashAccountName = responsibilityCenters.defaults.profits.MASH; const mashAccountName = responsibilityCenters.defaults.profits.MASH;
@@ -350,7 +377,7 @@ const generateInvoiceQbxml = (
}, },
}); });
} else { } else {
console.log("NO MASH ACCOUNT FOUND!!"); // console.log("NO MASH ACCOUNT FOUND!!");
} }
} }
@@ -438,6 +465,18 @@ const generateInvoiceQbxml = (
TxnDate: moment(jobs_by_pk.date_invoiced).format("YYYY-MM-DD"), TxnDate: moment(jobs_by_pk.date_invoiced).format("YYYY-MM-DD"),
RefNumber: jobs_by_pk.ro_number, RefNumber: jobs_by_pk.ro_number,
BillAddress: {
Addr1: jobs_by_pk.ownr_co_nm
? jobs_by_pk.ownr_co_nm.substring(0, 30)
: `${`${jobs_by_pk.ownr_ln || ""} ${
jobs_by_pk.ownr_fn || ""
}`.substring(0, 30)}`,
Addr2: jobs_by_pk.ownr_addr1,
Addr3: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownr_zip,
},
ShipAddress: { ShipAddress: {
Addr1: jobs_by_pk.ownr_co_nm Addr1: jobs_by_pk.ownr_co_nm
? jobs_by_pk.ownr_co_nm.substring(0, 30) ? jobs_by_pk.ownr_co_nm.substring(0, 30)
@@ -448,9 +487,14 @@ const generateInvoiceQbxml = (
Addr3: jobs_by_pk.ownr_addr2, Addr3: jobs_by_pk.ownr_addr2,
City: jobs_by_pk.ownr_city, City: jobs_by_pk.ownr_city,
State: jobs_by_pk.ownr_st, State: jobs_by_pk.ownr_st,
PostalCode: jobs_by_pk.ownrzip, PostalCode: jobs_by_pk.ownr_zip,
}, },
PONumber: jobs_by_pk.clm_no, PONumber: jobs_by_pk.clm_no,
IsToBePrinted: bodyshop.accountingconfig.printlater,
...(jobs_by_pk.ownr_ea
? { IsToBeEmailed: bodyshop.accountingconfig.emaillater }
: {}),
InvoiceLineAdd: InvoiceLineAdd, InvoiceLineAdd: InvoiceLineAdd,
}, },
}, },

View File

@@ -4,7 +4,7 @@ const Dinero = require("dinero.js");
const moment = require("moment"); const moment = require("moment");
var builder = require("xmlbuilder2"); var builder = require("xmlbuilder2");
const _ = require("lodash"); const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({ require("dotenv").config({
path: path.resolve( path: path.resolve(
process.cwd(), process.cwd(),
@@ -25,7 +25,7 @@ const ftpSetup = {
port: process.env.AUTOHOUSE_PORT, port: process.env.AUTOHOUSE_PORT,
username: process.env.AUTOHOUSE_USER, username: process.env.AUTOHOUSE_USER,
password: process.env.AUTOHOUSE_PASSWORD, password: process.env.AUTOHOUSE_PASSWORD,
debug: console.log, debug: (message, ...data) => logger.log(message, "DEBUG", "api", null, data),
algorithms: { algorithms: {
serverHostKey: ["ssh-rsa", "ssh-dss"], serverHostKey: ["ssh-rsa", "ssh-dss"],
}, },
@@ -33,14 +33,16 @@ const ftpSetup = {
exports.default = async (req, res) => { exports.default = async (req, res) => {
//Query for the List of Bodyshop Clients. //Query for the List of Bodyshop Clients.
console.log("Starting Autohouse datapump request."); logger.log("autohouse-start", "DEBUG", "api", null, null);
const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS); const { bodyshops } = await client.request(queries.GET_AUTOHOUSE_SHOPS);
const allxmlsToUpload = []; const allxmlsToUpload = [];
const allErrors = []; const allErrors = [];
try { try {
for (const bodyshop of bodyshops) { for (const bodyshop of bodyshops) {
console.log("Starting extract for ", bodyshop.shopname); logger.log("autohouse-start-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname,
});
const erroredJobs = []; const erroredJobs = [];
try { try {
const { jobs } = await client.request(queries.AUTOHOUSE_QUERY, { const { jobs } = await client.request(queries.AUTOHOUSE_QUERY, {
@@ -60,12 +62,12 @@ exports.default = async (req, res) => {
}, },
}; };
console.log( if (erroredJobs.length > 0) {
bodyshop.shopname, logger.log("autohouse-failed-jobs", "ERROR", "api", bodyshop.id, {
"***Number of Failed jobs***: ", count: erroredJobs.length,
erroredJobs.length, jobs: JSON.stringify(erroredJobs.map((j) => j.job.ro_number)),
JSON.stringify(erroredJobs.map((j) => j.job.ro_number)) });
); }
var ret = builder var ret = builder
.create(autoHouseObject, { .create(autoHouseObject, {
@@ -81,10 +83,15 @@ exports.default = async (req, res) => {
)}.xml`, )}.xml`,
}); });
console.log("Finished extract for shop ", bodyshop.shopname); logger.log("autohouse-end-shop-extract", "DEBUG", "api", bodyshop.id, {
shopname: bodyshop.shopname,
});
} catch (error) { } catch (error) {
//Error at the shop level. //Error at the shop level.
console.log("Error at shop level", bodyshop.shopname, error); logger.log("autohouse-error-shop", "ERROR", "api", bodyshop.id, {
error,
});
allErrors.push({ allErrors.push({
bodyshopid: bodyshop.id, bodyshopid: bodyshop.id,
imexshopid: bodyshop.imexshopid, imexshopid: bodyshop.imexshopid,
@@ -102,7 +109,9 @@ exports.default = async (req, res) => {
let sftp = new Client(); let sftp = new Client();
sftp.on("error", (errors) => sftp.on("error", (errors) =>
console.log("Error in FTP client", JSON.stringify(errors)) logger.log("autohouse-sftp-error", "ERROR", "api", null, {
errors,
})
); );
try { try {
//Connect to the FTP and upload all. //Connect to the FTP and upload all.
@@ -110,20 +119,24 @@ exports.default = async (req, res) => {
await sftp.connect(ftpSetup); await sftp.connect(ftpSetup);
for (const xmlObj of allxmlsToUpload) { for (const xmlObj of allxmlsToUpload) {
console.log("Uploading", xmlObj.filename); logger.log("autohouse-sftp-upload", "DEBUG", "api", null, {
filename: xmlObj.filename,
});
const uploadResult = await sftp.put( const uploadResult = await sftp.put(
Buffer.from(xmlObj.xml), Buffer.from(xmlObj.xml),
`/${xmlObj.filename}` `/${xmlObj.filename}`
); );
console.log( logger.log("autohouse-sftp-upload-result", "DEBUG", "api", null, {
"🚀 ~ file: autohouse.js ~ line 94 ~ uploadResult", uploadResult,
uploadResult });
);
} }
//***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml //***TODO Change filing naming when creating the cron job. IM_ShopInternalName_DDMMYYYY_HHMMSS.xml
} catch (error) { } catch (error) {
console.log("Error when connecting to FTP", error); logger.log("autohouse-sftp-error", "ERROR", "api", null, {
error,
});
} finally { } finally {
sftp.end(); sftp.end();
} }
@@ -498,7 +511,10 @@ const CreateRepairOrderTag = (job, errorCallback) => {
}; };
return ret; return ret;
} catch (error) { } catch (error) {
console.log("Error calculating job", error); logger.log("autohouse-job-calculate-error", "ERROR", "api", null, {
error,
});
errorCallback({ job, error }); errorCallback({ job, error });
} }
}; };
@@ -510,7 +526,7 @@ const CreateCosts = (job) => {
//At the bill level. //At the bill level.
bill_val.billlines.map((line_val) => { bill_val.billlines.map((line_val) => {
//At the bill line level. //At the bill line level.
//console.log("JobCostingPartsTable -> line_val", line_val);
if (!bill_acc[line_val.cost_center]) if (!bill_acc[line_val.cost_center])
bill_acc[line_val.cost_center] = Dinero(); bill_acc[line_val.cost_center] = Dinero();

View File

@@ -8,7 +8,7 @@ require("dotenv").config({
const axios = require("axios"); const axios = require("axios");
let nodemailer = require("nodemailer"); let nodemailer = require("nodemailer");
let aws = require("aws-sdk"); let aws = require("aws-sdk");
const logger = require("../utils/logger");
const ses = new aws.SES({ const ses = new aws.SES({
apiVersion: "2010-12-01", apiVersion: "2010-12-01",
region: "ca-central-1", region: "ca-central-1",
@@ -19,9 +19,13 @@ let transporter = nodemailer.createTransport({
}); });
exports.sendEmail = async (req, res) => { exports.sendEmail = async (req, res) => {
if (process.env.NODE_ENV !== "production") { logger.log("send-email", "DEBUG", req.user.email, null, {
console.log("[EMAIL] Incoming Message", req.body.from.name); from: `${req.body.from.name} <${req.body.from.address}>`,
} replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
});
let downloadedMedia = []; let downloadedMedia = [];
if (req.body.media && req.body.media.length > 0) { if (req.body.media && req.body.media.length > 0) {
@@ -30,7 +34,14 @@ exports.sendEmail = async (req, res) => {
try { try {
return getImage(m); return getImage(m);
} catch (error) { } catch (error) {
console.log(error); logger.log("send-email-error", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error,
});
} }
}) })
); );
@@ -73,10 +84,26 @@ exports.sendEmail = async (req, res) => {
(err, info) => { (err, info) => {
console.log(err || info); console.log(err || info);
if (info) { if (info) {
console.log("[EMAIL] Email sent: " + info); logger.log("send-email-success", "DEBUG", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
info,
});
res.json({ success: true, response: info }); res.json({ success: true, response: info });
} else { } else {
console.log("[EMAIL] Email send failed. ", err); logger.log("send-email-failure", "ERROR", req.user.email, null, {
from: `${req.body.from.name} <${req.body.from.address}>`,
replyTo: req.body.ReplyTo.Email,
to: req.body.to,
cc: req.body.cc,
subject: req.body.subject,
error: err,
});
res.json({ success: false, error: err }); res.json({ success: false, error: err });
} }
} }

View File

@@ -1,5 +1,5 @@
var admin = require("firebase-admin"); var admin = require("firebase-admin");
const logger = require("../utils/logger");
const path = require("path"); const path = require("path");
require("dotenv").config({ require("dotenv").config({
path: path.resolve( path: path.resolve(
@@ -26,8 +26,20 @@ const adminEmail = [
]; ];
exports.updateUser = (req, res) => { exports.updateUser = (req, res) => {
console.log("USer Requesting", req.user); logger.log("admin-update-user", "WARN", req.user.email, null, {
request: req.body,
});
if (!adminEmail.includes(req.user.email)) { if (!adminEmail.includes(req.user.email)) {
logger.log(
"admin-update-user-unauthorized",
"ERROR",
req.user.email,
null,
{
request: req.body,
user: req.user,
}
);
res.sendStatus(404); res.sendStatus(404);
} }
@@ -48,11 +60,16 @@ exports.updateUser = (req, res) => {
) )
.then((userRecord) => { .then((userRecord) => {
// See the UserRecord reference doc for the contents of userRecord. // See the UserRecord reference doc for the contents of userRecord.
console.log("Successfully updated user", userRecord.toJSON());
logger.log("admin-update-user-success", "DEBUG", req.user.email, null, {
userRecord,
});
res.json(userRecord); res.json(userRecord);
}) })
.catch((error) => { .catch((error) => {
console.log("Error updating user:", error); logger.log("admin-update-user-error", "ERROR", req.user.email, null, {
error,
});
res.status(500).json(error); res.status(500).json(error);
}); });
}; };
@@ -85,8 +102,6 @@ exports.sendNotification = (req, res) => {
}; };
exports.validateFirebaseIdToken = async (req, res, next) => { exports.validateFirebaseIdToken = async (req, res, next) => {
console.log("Check if request is authorized with Firebase ID token");
if ( if (
(!req.headers.authorization || (!req.headers.authorization ||
!req.headers.authorization.startsWith("Bearer ")) && !req.headers.authorization.startsWith("Bearer ")) &&
@@ -112,7 +127,10 @@ exports.validateFirebaseIdToken = async (req, res, next) => {
} else { } else {
// No cookie // No cookie
console.error("Unauthorized attempt. No cookie provided."); console.error("Unauthorized attempt. No cookie provided.");
logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "no-cookie",
});
res.status(403).send("Unauthorized"); res.status(403).send("Unauthorized");
return; return;
} }
@@ -124,7 +142,12 @@ exports.validateFirebaseIdToken = async (req, res, next) => {
next(); next();
return; return;
} catch (error) { } catch (error) {
console.error("Error while verifying Firebase ID token:", error); logger.log("api-unauthorized-call", "WARN", null, null, {
req,
type: "unauthroized",
error,
});
res.status(403).send("Unauthorized"); res.status(403).send("Unauthorized");
return; return;
} }

View File

@@ -63,6 +63,7 @@ query QUERY_JOBS_FOR_RECEIVABLES_EXPORT($ids: [uuid!]!) {
ownr_zip ownr_zip
ownr_city ownr_city
ownr_st ownr_st
ownr_ea
ins_co_nm ins_co_nm
job_totals job_totals
rate_la1 rate_la1
@@ -751,6 +752,7 @@ exports.QUERY_JOB_COSTING_DETAILS = ` query QUERY_JOB_COSTING_DETAILS($id: uuid!
ca_customer_gst ca_customer_gst
joblines(where: { removed: { _eq: false } }) { joblines(where: { removed: { _eq: false } }) {
id id
db_ref
unq_seq unq_seq
line_ind line_ind
tax_part tax_part
@@ -790,6 +792,7 @@ exports.QUERY_JOB_COSTING_DETAILS = ` query QUERY_JOB_COSTING_DETAILS($id: uuid!
cost_center cost_center
actualhrs actualhrs
productivehrs productivehrs
flat_rate
} }
bodyshop{ bodyshop{
id id
@@ -852,6 +855,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
ca_customer_gst ca_customer_gst
joblines(where: {removed: {_eq: false}}) { joblines(where: {removed: {_eq: false}}) {
id id
db_ref
unq_seq unq_seq
line_ind line_ind
tax_part tax_part
@@ -891,6 +895,7 @@ exports.QUERY_JOB_COSTING_DETAILS_MULTI = ` query QUERY_JOB_COSTING_DETAILS_MULT
cost_center cost_center
actualhrs actualhrs
productivehrs productivehrs
flat_rate
} }
bodyshop { bodyshop {
id id

View File

@@ -3,16 +3,16 @@ const queries = require("../graphql-client/queries");
//const client = require("../graphql-client/graphql-client").client; //const client = require("../graphql-client/graphql-client").client;
const _ = require("lodash"); const _ = require("lodash");
const GraphQLClient = require("graphql-request").GraphQLClient; const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD"; // Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA"; // Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN"; Dinero.globalRoundingMode = "HALF_EVEN";
async function JobCosting(req, res) { async function JobCosting(req, res) {
const { jobid } = req.body; const { jobid } = req.body;
console.time("Query for Data");
const BearerToken = req.headers.authorization;
const BearerToken = req.headers.authorization;
logger.log("job-costing-start", "DEBUG", req.user.email, jobid, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: { headers: {
Authorization: BearerToken, Authorization: BearerToken,
@@ -25,15 +25,16 @@ async function JobCosting(req, res) {
.request(queries.QUERY_JOB_COSTING_DETAILS, { .request(queries.QUERY_JOB_COSTING_DETAILS, {
id: jobid, id: jobid,
}); });
console.timeEnd("querydata");
console.time(`generatecostingdata-${resp.jobs_by_pk.id}`);
const ret = GenerateCostingData(resp.jobs_by_pk); const ret = GenerateCostingData(resp.jobs_by_pk);
console.timeEnd(`generatecostingdata-${resp.jobs_by_pk.id}`);
res.status(200).json(ret); res.status(200).json(ret);
} catch (error) { } catch (error) {
console.log("error", error); logger.log("job-costing-error", "ERROR", req.user.email, jobid, {
jobid,
error,
});
res.status(400).send(JSON.stringify(error)); res.status(400).send(JSON.stringify(error));
} }
} }
@@ -41,7 +42,8 @@ async function JobCosting(req, res) {
async function JobCostingMulti(req, res) { async function JobCostingMulti(req, res) {
const { jobids } = req.body; const { jobids } = req.body;
const BearerToken = req.headers.authorization; const BearerToken = req.headers.authorization;
console.time("JobCostingMultiQueryExecution"); logger.log("job-costing-multi-start", "DEBUG", req.user.email, jobids, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: { headers: {
Authorization: BearerToken, Authorization: BearerToken,
@@ -78,12 +80,8 @@ async function JobCostingMulti(req, res) {
const ret = {}; const ret = {};
resp.jobs.map((job) => { resp.jobs.map((job) => {
console.time(`CostingData-${job.id}`);
const costingData = GenerateCostingData(job); const costingData = GenerateCostingData(job);
ret[job.id] = costingData; ret[job.id] = costingData;
console.timeEnd(`CostingData-${job.id}`);
console.time(`SummaryOfCostingData-${job.id}`);
//Merge on a cost center basis. //Merge on a cost center basis.
@@ -165,7 +163,6 @@ async function JobCostingMulti(req, res) {
costingData.summaryData.totalPartsGp costingData.summaryData.totalPartsGp
); );
console.timeEnd(`SummaryOfCostingData-${job.id}`);
//Take the summary data & add it to total summary data. //Take the summary data & add it to total summary data.
}); });
@@ -220,15 +217,16 @@ async function JobCostingMulti(req, res) {
//Calculate thte total gross profit percentages. //Calculate thte total gross profit percentages.
console.timeEnd("JobCostingMultiQueryExecution");
res.status(200).json({ res.status(200).json({
allCostCenterData: finalCostingdata, allCostCenterData: finalCostingdata,
allSummaryData: multiSummary.summaryData, allSummaryData: multiSummary.summaryData,
data: ret, data: ret,
}); });
} catch (error) { } catch (error) {
console.log("error", error); logger.log("job-costing-multi-error", "ERROR", req.user.email, [jobids], {
jobids,
error,
});
res.status(400).send(JSON.stringify(error)); res.status(400).send(JSON.stringify(error));
} }
} }
@@ -242,12 +240,22 @@ function GenerateCostingData(job) {
); );
const materialsHours = { mapaHrs: 0, mashHrs: 0 }; const materialsHours = { mapaHrs: 0, mashHrs: 0 };
let hasMapaLine = false;
let hasMashLine = false;
//Massage the data. //Massage the data.
const jobLineTotalsByProfitCenter = const jobLineTotalsByProfitCenter =
job && job &&
job.joblines.reduce( job.joblines.reduce(
(acc, val) => { (acc, val) => {
//Parts Lines
if (val.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
}
if (val.db_ref === "936007") {
hasMashLine = true;
}
if (val.mod_lbr_ty) { if (val.mod_lbr_ty) {
const laborProfitCenter = const laborProfitCenter =
val.profitcenter_labor || defaultProfits[val.mod_lbr_ty] || "?"; val.profitcenter_labor || defaultProfits[val.mod_lbr_ty] || "?";
@@ -265,32 +273,11 @@ function GenerateCostingData(job) {
acc.labor[laborProfitCenter].add(laborAmount); acc.labor[laborProfitCenter].add(laborAmount);
if (val.mod_lbr_ty === "LAR") { if (val.mod_lbr_ty === "LAR") {
if (!acc.parts[defaultProfits["MAPA"]])
acc.parts[defaultProfits["MAPA"]] = Dinero();
materialsHours.mapaHrs += val.mod_lb_hrs || 0; materialsHours.mapaHrs += val.mod_lb_hrs || 0;
acc.parts[defaultProfits["MAPA"]] = acc.parts[
defaultProfits["MAPA"]
].add(
Dinero({
amount: Math.round((job.rate_mapa || 0) * 100),
}).multiply(val.mod_lb_hrs || 0)
);
} }
if (!acc.parts[defaultProfits["MASH"]])
acc.parts[defaultProfits["MASH"]] = Dinero();
if (val.mod_lbr_ty !== "LAR") { if (val.mod_lbr_ty !== "LAR") {
acc.parts[defaultProfits["MASH"]] = acc.parts[
defaultProfits["MASH"]
].add(
Dinero({
amount: Math.round((job.rate_mash || 0) * 100),
}).multiply(val.mod_lb_hrs || 0)
);
materialsHours.mashHrs += val.mod_lb_hrs || 0; materialsHours.mashHrs += val.mod_lb_hrs || 0;
} }
//If labor line, add to paint and shop materials.
} }
if (val.part_type && val.part_type !== "PAE") { if (val.part_type && val.part_type !== "PAE") {
@@ -358,6 +345,27 @@ function GenerateCostingData(job) {
{ parts: {}, labor: {} } { parts: {}, labor: {} }
); );
if (!hasMapaLine) {
if (!jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]])
jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]] = Dinero();
jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]] =
jobLineTotalsByProfitCenter.parts[defaultProfits["MAPA"]].add(
Dinero({
amount: Math.round((job.rate_mapa || 0) * 100),
}).multiply(materialsHours.mapaHrs || 0)
);
}
if (!hasMashLine) {
if (!jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]])
jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]] = Dinero();
jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]] =
jobLineTotalsByProfitCenter.parts[defaultProfits["MASH"]].add(
Dinero({
amount: Math.round((job.rate_mash || 0) * 100),
}).multiply(materialsHours.mashHrs || 0)
);
}
const billTotalsByCostCenters = job.bills.reduce((bill_acc, bill_val) => { const billTotalsByCostCenters = job.bills.reduce((bill_acc, bill_val) => {
//At the bill level. //At the bill level.
bill_val.billlines.map((line_val) => { bill_val.billlines.map((line_val) => {
@@ -437,7 +445,11 @@ function GenerateCostingData(job) {
].add( ].add(
Dinero({ Dinero({
amount: Math.round((ticket_val.rate || 0) * 100), amount: Math.round((ticket_val.rate || 0) * 100),
}).multiply(ticket_val.actualhrs || ticket_val.productivehrs || 0) }).multiply(
ticket_val.flat_rate
? ticket_val.productivehrs || ticket_val.actualhrs || 0
: ticket_val.actualhrs || ticket_val.productivehrs || 0
) //Should base this on the employee.
); );
return ticket_acc; return ticket_acc;
@@ -603,17 +615,14 @@ const formatGpPercent = (gppercent) => {
//Verify that this stays in line with jobs-close-auto-allocate logic from the application. //Verify that this stays in line with jobs-close-auto-allocate logic from the application.
const getAdditionalCostCenter = (jl, profitCenters) => { const getAdditionalCostCenter = (jl, profitCenters) => {
console.log("Checking additional cost center", jl.line_desc);
if (!jl.part_type && !jl.mod_lbr_ty) { if (!jl.part_type && !jl.mod_lbr_ty) {
const lineDesc = jl.line_desc ? jl.line_desc.toLowerCase() : ""; const lineDesc = jl.line_desc.toLowerCase();
//This logic is covered prior and assigned based on the labor type of the lines
// if (lineDesc.includes("shop materials")) { if (lineDesc.includes("shop mat")) {
// return profitCenters["MASH"]; return profitCenters["MASH"];
// } else if (lineDesc.includes("paint/materials")) { } else if (lineDesc.includes("paint/mat")) {
// return profitCenters["MAPA"]; return profitCenters["MAPA"];
// } else } else if (lineDesc.includes("ats amount")) {
//End covered logic
if (lineDesc.includes("ats amount")) {
return profitCenters["ATS"]; return profitCenters["ATS"];
} else { } else {
return null; return null;

View File

@@ -1,7 +1,7 @@
const Dinero = require("dinero.js"); const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries"); const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient; const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD"; // Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA"; // Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN"; Dinero.globalRoundingMode = "HALF_EVEN";
@@ -9,7 +9,7 @@ Dinero.globalRoundingMode = "HALF_EVEN";
exports.totalsSsu = async function (req, res) { exports.totalsSsu = async function (req, res) {
const BearerToken = req.headers.authorization; const BearerToken = req.headers.authorization;
const { id } = req.body; const { id } = req.body;
logger.log("job-totals-ssu", "DEBUG", req.user.email, id, null);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: { headers: {
Authorization: BearerToken, Authorization: BearerToken,
@@ -43,7 +43,10 @@ exports.totalsSsu = async function (req, res) {
res.status(200).send(); res.status(200).send();
} catch (error) { } catch (error) {
console.log(error); logger.log("job-totals-ssu-error", "ERROR", req.user.email, id, {
jobid: id,
error,
});
res.status(503).send(); res.status(503).send();
} }
}; };
@@ -51,9 +54,6 @@ exports.totalsSsu = async function (req, res) {
//IMPORTANT*** These two functions MUST be mirrrored. //IMPORTANT*** These two functions MUST be mirrrored.
async function TotalsServerSide(req, res) { async function TotalsServerSide(req, res) {
const { job } = req.body; const { job } = req.body;
console.log(
`Calculating Job Totals on the server side for ${job.id} - ${job.ro_number}`
);
try { try {
let ret = { let ret = {
parts: CalculatePartsTotals(job.joblines), parts: CalculatePartsTotals(job.joblines),
@@ -64,14 +64,20 @@ async function TotalsServerSide(req, res) {
return ret; return ret;
} catch (error) { } catch (error) {
console.log("error", error); logger.log("job-totals-ssu-error", "ERROR", req.user.email, job.id, {
jobid: job.id,
error,
});
res.status(400).send(JSON.stringify(error)); res.status(400).send(JSON.stringify(error));
} }
} }
async function Totals(req, res) { async function Totals(req, res) {
const { job } = req.body; const { job } = req.body;
console.log(`Calculating Job Totals for ${job.id} - ${job.ro_number}`); logger.log("job-totals", "DEBUG", req.user.email, job.id, {
jobid: job.id,
});
try { try {
let ret = { let ret = {
parts: CalculatePartsTotals(job.joblines), parts: CalculatePartsTotals(job.joblines),
@@ -82,7 +88,10 @@ async function Totals(req, res) {
res.status(200).json(ret); res.status(200).json(ret);
} catch (error) { } catch (error) {
console.log("error", error); logger.log("job-totals-error", "ERROR", req.user.email, job.id, {
jobid: job.id,
error,
});
res.status(400).send(JSON.stringify(error)); res.status(400).send(JSON.stringify(error));
} }
} }
@@ -157,7 +166,28 @@ function CalculateRatesTotals(ratesList) {
}, },
}; };
//Determine if there are MAPA and MASH lines already on the estimate.
//If there are, don't do anything extra (mitchell estimate)
//Otherwise, calculate them and add them to the default MAPA and MASH centers.
let hasMapaLine = false;
let hasMashLine = false;
jobLines.forEach((item) => { jobLines.forEach((item) => {
//IO-1317 Use the lines on the estimate if they exist instead.
if (item.db_ref === "936008") {
//If either of these DB REFs change, they also need to change in job-totals/job-costing calculations.
hasMapaLine = true;
ret["mapa"].total = Dinero({
amount: Math.round((item.act_price || 0) * 100),
});
}
if (item.db_ref === "936007") {
hasMashLine = true;
ret["mash"].total = Dinero({
amount: Math.round((item.act_price || 0) * 100),
});
}
if (item.mod_lbr_ty) { if (item.mod_lbr_ty) {
//There's a labor type, assign the hours. //There's a labor type, assign the hours.
ret[item.mod_lbr_ty.toLowerCase()].hours = ret[item.mod_lbr_ty.toLowerCase()].hours =
@@ -173,11 +203,22 @@ function CalculateRatesTotals(ratesList) {
let subtotal = Dinero({ amount: 0 }); let subtotal = Dinero({ amount: 0 });
let rates_subtotal = Dinero({ amount: 0 }); let rates_subtotal = Dinero({ amount: 0 });
for (const property in ret) { for (const property in ret) {
ret[property].total = Dinero({ //Skip calculating mapa and mash if we got the amounts.
amount: Math.round((ret[property].rate || 0) * 100), if (
}).multiply(ret[property].hours); !(
(property === "mapa" && hasMapaLine) ||
(property === "mash" && hasMashLine)
)
) {
ret[property].total = Dinero({
amount: Math.round((ret[property].rate || 0) * 100),
}).multiply(ret[property].hours);
}
subtotal = subtotal.add(ret[property].total); subtotal = subtotal.add(ret[property].total);
if (property !== "mapa" && property !== "mash") if (property !== "mapa" && property !== "mash")
rates_subtotal = rates_subtotal.add(ret[property].total); rates_subtotal = rates_subtotal.add(ret[property].total);
} }
@@ -363,7 +404,8 @@ function CalculateTaxesTotals(job, otherTotals) {
job.joblines job.joblines
.filter((jl) => !jl.removed) .filter((jl) => !jl.removed)
.forEach((val) => { .forEach((val) => {
if (!val.tax_part || (!val.part_type && IsAdditionalCost(val))) { if (!val.tax_part) return;
if (!val.part_type && IsAdditionalCost(val)) {
additionalItemsTax = additionalItemsTax.add( additionalItemsTax = additionalItemsTax.add(
Dinero({ amount: Math.round((val.act_price || 0) * 100) }) Dinero({ amount: Math.round((val.act_price || 0) * 100) })
.multiply(val.part_qty || 0) .multiply(val.part_qty || 0)

View File

@@ -1,5 +1,7 @@
const path = require("path"); const path = require("path");
const _ = require("lodash"); const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({ require("dotenv").config({
path: path.resolve( path: path.resolve(
process.cwd(), process.cwd(),
@@ -11,8 +13,7 @@ var cloudinary = require("cloudinary").v2;
cloudinary.config(process.env.CLOUDINARY_URL); cloudinary.config(process.env.CLOUDINARY_URL);
exports.createSignedUploadURL = (req, res) => { exports.createSignedUploadURL = (req, res) => {
console.log("Request to create signed upload URL for Cloudinary.", req.body); logger.log("media-signed-upload", "DEBUG", req.user.email, null, null);
res.send( res.send(
cloudinary.utils.api_sign_request( cloudinary.utils.api_sign_request(
req.body, req.body,
@@ -23,6 +24,7 @@ exports.createSignedUploadURL = (req, res) => {
exports.downloadFiles = (req, res) => { exports.downloadFiles = (req, res) => {
const { ids } = req.body; const { ids } = req.body;
logger.log("media-bulk-download", "DEBUG", req.user.email, ids, null);
const url = cloudinary.utils.download_zip_url({ const url = cloudinary.utils.download_zip_url({
public_ids: ids, public_ids: ids,
@@ -34,7 +36,8 @@ exports.downloadFiles = (req, res) => {
exports.deleteFiles = async (req, res) => { exports.deleteFiles = async (req, res) => {
const { ids } = req.body; const { ids } = req.body;
const types = _.groupBy(ids, (x) => DetermineFileType(x.type)); const types = _.groupBy(ids, (x) => DetermineFileType(x.type));
console.log("🚀 ~ file: media.js ~ line 28 ~ types", types);
logger.log("media-bulk-delete", "DEBUG", req.user.email, ids, null);
const returns = []; const returns = [];
if (types.image) { if (types.image) {
@@ -65,16 +68,15 @@ exports.deleteFiles = async (req, res) => {
) )
); );
} }
console.log("🚀 ~ file: media.js ~ line 40 ~ returns", returns);
res.send(returns); res.send(returns);
}; };
exports.renameKeys = async (req, res) => { exports.renameKeys = async (req, res) => {
const { documents } = req.body; const { documents } = req.body;
//{id: "", from: "", to:""} logger.log("media-bulk-rename", "DEBUG", req.user.email, null, documents);
const proms = []; const proms = [];
console.log("Documents", documents);
documents.forEach((d) => { documents.forEach((d) => {
proms.push( proms.push(
(async () => { (async () => {

View File

@@ -5,23 +5,25 @@ require("dotenv").config({
`.env.${process.env.NODE_ENV || "development"}` `.env.${process.env.NODE_ENV || "development"}`
), ),
}); });
const logger = require("../utils/logger");
const inlineCssTool = require("inline-css"); const inlineCssTool = require("inline-css");
exports.inlinecss = (req, res) => { exports.inlinecss = (req, res) => {
//Perform request validation //Perform request validation
console.log("[CSS] New Inline CSS Request."); logger.log("email-inline-css", "DEBUG", req.user.email, null, null);
const { html, url } = req.body; const { html, url } = req.body;
inlineCssTool(html, { url: url }) inlineCssTool(html, { url: url })
.then((inlinedHtml) => { .then((inlinedHtml) => {
console.log("Inline success.");
res.send(inlinedHtml); res.send(inlinedHtml);
}) })
.catch((error) => { .catch((error) => {
console.log("Error while inlining CSS", JSON.stringify(error)); logger.log("email-inline-css-error", "ERROR", req.user.email, null, {
error,
});
res.send(error); res.send(error);
}); });
}; };

View File

@@ -1,212 +0,0 @@
// const path = require("path");
// const moment = require("moment");
// require("dotenv").config({
// path: path.resolve(
// process.cwd(),
// `.env.${process.env.NODE_ENV || "development"}`
// ),
// });
// var _ = require("lodash");
// const Handlebars = require("handlebars");
// const phone = require("phone");
// var Dinero = require("dinero.js");
// Dinero.defaultCurrency = "CAD";
// Dinero.globalLocale = "en-CA";
// //Usage: {{moment appointments_by_pk.start format="dddd, DD MMMM YYYY"}}
// Handlebars.registerHelper("round", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// try {
// return context.toFixed(2);
// } catch {
// return context;
// }
// });
// Handlebars.registerHelper("dinerof", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// var amount = Dinero(context);
// if (context) {
// return amount.toFormat();
// }
// return "";
// });
// Handlebars.registerHelper("phonef", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// var ph = phone(context)[0];
// if (context) {
// return ph;
// }
// return "";
// });
// Handlebars.registerHelper("partType", function (context, block) {
// if (!context) return "";
// switch (context.toUpperCase()) {
// case "PAA":
// return "Aftermarket";
// case "PAE":
// return "Existing";
// case "PAN":
// return "OEM";
// case "PAO":
// return "Other";
// case "PAS":
// return "Sublet";
// case "PASL":
// return "Sublet";
// case "PAL":
// return "LKQ";
// case "PAM":
// return "Remanufactured";
// case "PAC":
// return "Chrome";
// case "PAP":
// return "OEM Partial";
// case "PAR":
// return "Record";
// default:
// return context;
// }
// });
// Handlebars.registerHelper("lbrType", function (context, block) {
// if (!context) return "";
// switch (context.toUpperCase()) {
// case "LAA":
// return "Aluminum";
// case "LAB":
// return "Body";
// case "LAD":
// return "Diagnostic";
// case "LAF":
// return "Frame";
// case "LAG":
// return "Glass";
// case "LAM":
// return "Mechanical";
// case "LAR":
// return "Refinish";
// case "LAS":
// return "Structural";
// case "LAU":
// return "Detail";
// default:
// return context;
// }
// });
// Handlebars.registerHelper("objectKeys", function (obj, block) {
// var accum = "";
// obj &&
// Object.keys(obj).map((key) => {
// accum += block.fn({ key, value: obj[key] });
// });
// return accum;
// });
// Handlebars.registerHelper("dinero", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// var amount = Dinero({
// amount: Math.round((context || 0) * 100),
// currency: "CAD",
// });
// return amount.toFormat();
// });
// Handlebars.registerHelper("moment", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = undefined;
// }
// if (!!!context) return "";
// var date = moment(context);
// if (block.hash.timezone) {
// date.tz(block.hash.timezone);
// }
// var hasFormat = false;
// // Reset the language back to default before doing anything else
// date.locale("en");
// for (var i in block.hash) {
// if (i === "format") {
// hasFormat = true;
// } else if (date[i]) {
// date = date[i](block.hash[i]);
// } else {
// console.log('moment.js does not support "' + i + '"');
// }
// }
// if (hasFormat) {
// date = date.format(block.hash.format);
// }
// return date;
// });
// Handlebars.registerHelper("duration", function (context, block) {
// if (context && context.hash) {
// block = _.cloneDeep(context);
// context = 0;
// }
// var duration = moment.duration(context);
// var hasFormat = false;
// // Reset the language back to default before doing anything else
// duration = duration.lang("en");
// for (var i in block.hash) {
// if (i === "format") {
// hasFormat = true;
// } else if (duration[i]) {
// duration = duration[i](block.hash[i]);
// } else {
// console.log('moment.js duration does not support "' + i + '"');
// }
// }
// if (hasFormat) {
// duration = duration.format(block.hash.format);
// }
// return duration;
// });
exports.render = (req, res) => {
// //Perform request validation
// let view;
// console.log("[HJS Render] New Render Request.");
// //console.log("[HJS Render] Context", req.body.context);
// if (req.body.context.bodyshop.template_header) {
// console.log("[HJS Render] Including Header");
// //view = req.body.view;
// view = `${req.body.context.bodyshop.template_header}${req.body.view}`;
// } else {
// console.log("[HJS Render] No header to include.");
// view = req.body.view;
// }
// var template = Handlebars.compile(view);
// res.send(template(req.body.context));
};

View File

@@ -3,7 +3,7 @@ const path = require("path");
const queries = require("../graphql-client/queries"); const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js"); const Dinero = require("dinero.js");
const moment = require("moment"); const moment = require("moment");
const logger = require("../utils/logger");
require("dotenv").config({ require("dotenv").config({
path: path.resolve( path: path.resolve(
process.cwd(), process.cwd(),
@@ -12,10 +12,10 @@ require("dotenv").config({
}); });
exports.job = async (req, res) => { exports.job = async (req, res) => {
const BearerToken = req.headers.authorization;
const { jobId } = req.body;
try { try {
const BearerToken = req.headers.authorization; logger.log("smart-scheduling-start", "DEBUG", req.user.email, jobId, null);
const { jobId } = req.body;
console.log("exports.job -> jobId", jobId);
const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, { const client = new GraphQLClient(process.env.GRAPHQL_ENDPOINT, {
headers: { headers: {
@@ -133,10 +133,12 @@ exports.job = async (req, res) => {
) )
possibleDates.push(new Date(bmkey).toISOString().substr(0, 10)); possibleDates.push(new Date(bmkey).toISOString().substr(0, 10));
}); });
console.log("possibleDates", possibleDates, "bucketMatrix", bucketMatrix);
res.json(possibleDates); res.json(possibleDates);
} catch (error) { } catch (error) {
console.log("error", error); logger.log("smart-scheduling-error", "ERROR", req.user.email, jobId, {
error,
});
res.status(400).send(error); res.status(400).send(error);
} }
}; };

View File

@@ -10,16 +10,29 @@ const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries"); const queries = require("../graphql-client/queries");
const { phone } = require("phone"); const { phone } = require("phone");
const admin = require("../firebase/firebase-handler").admin; const admin = require("../firebase/firebase-handler").admin;
const logger = require("../utils/logger");
exports.receive = (req, res) => { exports.receive = (req, res) => {
//Perform request validation //Perform request validation
console.log("[SMS Receive] Inbound Twilio Message.", req.body.SmsMessageSid);
console.log("req.body", req.body); logger.log("sms-inbound", "DEBUG", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
});
if ( if (
!!!req.body || !!!req.body ||
!!!req.body.MessagingServiceSid || !!!req.body.MessagingServiceSid ||
!!!req.body.SmsMessageSid !!!req.body.SmsMessageSid
) { ) {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
type: "malformed-request",
});
res.status(400); res.status(400);
res.json({ success: false, error: "Malformed Request" }); res.json({ success: false, error: "Malformed Request" });
} else { } else {
@@ -29,8 +42,6 @@ exports.receive = (req, res) => {
phone: phone(req.body.From).phoneNumber, phone: phone(req.body.From).phoneNumber,
}) })
.then((response) => { .then((response) => {
console.log("re", req.body);
let newMessage = { let newMessage = {
msid: req.body.SmsMessageSid, msid: req.body.SmsMessageSid,
text: req.body.Body, text: req.body.Body,
@@ -55,10 +66,14 @@ exports.receive = (req, res) => {
response.bodyshops[0].conversations[0].id; response.bodyshops[0].conversations[0].id;
} else { } else {
//We should never get here. //We should never get here.
console.log( logger.log("sms-inbound-error", "ERROR", "api", null, {
"Massive Error: Duplicate Phone Numbers for MSSID: " + msid: req.body.SmsMessageSid,
req.body.MessagingServiceSid text: req.body.Body,
); image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
type: "duplicate-phone",
});
} }
client client
@@ -67,6 +82,9 @@ exports.receive = (req, res) => {
conversationid: response.bodyshops[0].conversations[0].id, conversationid: response.bodyshops[0].conversations[0].id,
}) })
.then((r2) => { .then((r2) => {
logger.log("sms-inbound-success", "DEBUG", "api", null, {
newMessage,
});
res.status(200).send(""); res.status(200).send("");
const arrayOfAllUserFcmTokens = const arrayOfAllUserFcmTokens =
@@ -109,7 +127,15 @@ exports.receive = (req, res) => {
// }); // });
}) })
.catch((e2) => { .catch((e2) => {
console.log("e2", e2); logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,
image: !!req.body.MediaUrl0,
image_path: generateMediaArray(req.body),
messagingServiceSid: req.body.MessagingServiceSid,
error: e2,
});
res.sendStatus(500).json(e2); res.sendStatus(500).json(e2);
}); });
} }

View File

@@ -9,7 +9,7 @@ require("dotenv").config({
const twilio = require("twilio"); const twilio = require("twilio");
const { phone } = require("phone"); const { phone } = require("phone");
const queries = require("../graphql-client/queries"); const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
const client = twilio( const client = twilio(
process.env.TWILIO_AUTH_TOKEN, process.env.TWILIO_AUTH_TOKEN,
process.env.TWILIO_AUTH_KEY process.env.TWILIO_AUTH_KEY
@@ -19,9 +19,21 @@ const gqlClient = require("../graphql-client/graphql-client").client;
exports.send = (req, res) => { exports.send = (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia } = const { to, messagingServiceSid, body, conversationid, selectedMedia } =
req.body; req.body;
console.log("[Sending Sms] " + conversationid + " | " + body);
logger.log("sms-outbound", "DEBUG", req.user.email, null, {
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
mediaUrl: selectedMedia.map((i) => i.src),
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0 ? selectedMedia.map((i) => i.src) : [],
});
if (!!to && !!messagingServiceSid && !!body && !!conversationid) { if (!!to && !!messagingServiceSid && !!body && !!conversationid) {
console.log(phone(to));
client.messages client.messages
.create({ .create({
body: body, body: body,
@@ -46,40 +58,47 @@ exports.send = (req, res) => {
.request(queries.INSERT_MESSAGE, { msg: newMessage }) .request(queries.INSERT_MESSAGE, { msg: newMessage })
.then((r2) => { .then((r2) => {
//console.log("Responding GQL Message ID", JSON.stringify(r2)); //console.log("Responding GQL Message ID", JSON.stringify(r2));
logger.log("sms-outbound-success", "DEBUG", req.user.email, null, {
msid: message.sid,
conversationid,
});
res.sendStatus(200); res.sendStatus(200);
}) })
.catch((e2) => { .catch((e2) => {
console.log("e2", e2); logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
msid: message.sid,
conversationid,
error: e2,
});
//res.json({ success: false, message: e2 }); //res.json({ success: false, message: e2 });
}); });
}) })
.catch((e1) => { .catch((e1) => {
//res.json({ success: false, message: error }); //res.json({ success: false, message: error });
console.log("e1", e1); logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
conversationid,
error: e1,
});
}); });
} else { } else {
logger.log("sms-outbound-error", "ERROR", req.user.email, null, {
type: "missing-parameters",
messagingServiceSid: messagingServiceSid,
to: phone(to).phoneNumber,
text: body,
conversationid,
isoutbound: true,
userid: req.user.email,
image: req.body.selectedMedia.length > 0,
image_path:
req.body.selectedMedia.length > 0
? selectedMedia.map((i) => i.src)
: [],
});
res res
.status(400) .status(400)
.json({ success: false, message: "Missing required parameter(s)." }); .json({ success: false, message: "Missing required parameter(s)." });
} }
}; };
// //Image
// acc.push({
// src: `${process.env.REACT_APP_CLOUDINARY_ENDPOINT}/${DetermineFileType(
// value.type
// )}/upload/${value.key}`,
// thumbnail: `${
// process.env.REACT_APP_CLOUDINARY_ENDPOINT
// }/${DetermineFileType(value.type)}/upload/${
// process.env.REACT_APP_CLOUDINARY_THUMB_TRANSFORMATIONS
// }/${value.key}`,
// thumbnailHeight: 225,
// thumbnailWidth: 225,
// isSelected: false,
// key: value.key,
// extension: value.extension,
// id: value.id,
// type: value.type,
// tags: [{ value: value.type, title: value.type }],
// });

View File

@@ -9,6 +9,7 @@ require("dotenv").config({
const client = require("../graphql-client/graphql-client").client; const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries"); const queries = require("../graphql-client/queries");
const { phone } = require("phone"); const { phone } = require("phone");
const logger = require("../utils/logger");
exports.status = (req, res) => { exports.status = (req, res) => {
const { SmsSid, SmsStatus } = req.body; const { SmsSid, SmsStatus } = req.body;
@@ -18,10 +19,17 @@ exports.status = (req, res) => {
fields: { status: SmsStatus }, fields: { status: SmsStatus },
}) })
.then((response) => { .then((response) => {
console.log("Message Updated", JSON.stringify(response)); logger.log("sms-status-update", "DEBUG", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
});
}) })
.catch((error) => { .catch((error) => {
console.log("Error updating message status", error); logger.log("sms-status-update-error", "ERROR", "api", null, {
msid: SmsSid,
fields: { status: SmsStatus },
error,
});
}); });
res.sendStatus(200); res.sendStatus(200);
}; };

View File

@@ -1,7 +1,7 @@
const client = require("../graphql-client/graphql-client").client; const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries"); const queries = require("../graphql-client/queries");
const path = require("path"); const path = require("path");
const logger = require("../utils/logger");
require("dotenv").config({ require("dotenv").config({
path: path.resolve( path: path.resolve(
process.cwd(), process.cwd(),
@@ -11,7 +11,7 @@ require("dotenv").config({
exports.techLogin = async (req, res) => { exports.techLogin = async (req, res) => {
const { shopid, employeeid, pin } = req.body; const { shopid, employeeid, pin } = req.body;
logger.log("tech-console-login", "DEBUG", req.user.email, null, null);
try { try {
const result = await client.request(queries.QUERY_EMPLOYEE_PIN, { const result = await client.request(queries.QUERY_EMPLOYEE_PIN, {
shopId: shopid, shopId: shopid,
@@ -28,14 +28,23 @@ exports.techLogin = async (req, res) => {
delete dbRecord.pin; delete dbRecord.pin;
technician = dbRecord; technician = dbRecord;
} else { } else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "wrong-pin",
});
error = "The employee ID and PIN combination are not correct."; error = "The employee ID and PIN combination are not correct.";
} }
} else { } else {
logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
type: "invalid-employee",
});
error = "The employee ID does not exist."; error = "The employee ID does not exist.";
} }
res.json({ valid, technician, error }); res.json({ valid, technician, error });
} catch (error) { } catch (error) {
console.log("error", error); logger.log("tech-console-login-error", "DEBUG", req.user.email, null, {
error,
});
res.status(400).send(error); res.status(400).send(error);
} }
}; };

View File

@@ -7,7 +7,7 @@ const logger = new graylog2.graylog({
function log(message, type, user, record, object) { function log(message, type, user, record, object) {
console.log(message, { console.log(message, {
type, type,
env: process.env.NODE_ENV, env: process.env.NODE_ENV || "development",
user, user,
record, record,
...object, ...object,
@@ -22,4 +22,3 @@ function log(message, type, user, record, object) {
} }
module.exports = { log }; module.exports = { log };
//const logger = require("./server/utils/logger");

716
yarn.lock

File diff suppressed because it is too large Load Diff