- the great reformat

Signed-off-by: Dave Richer <dave@imexsystems.ca>
This commit is contained in:
Dave Richer
2024-02-06 18:20:58 -05:00
parent 30c530bcc4
commit e83badb454
912 changed files with 108516 additions and 107493 deletions

View File

@@ -1,47 +1,47 @@
import { notification } from "antd";
import {notification} from "antd";
import i18n from "i18next";
import { logImEXEvent } from "../../firebase/firebase.utils";
import { UPDATE_JOB } from "../../graphql/jobs.queries";
import { insertAuditTrail } from "../../redux/application/application.actions";
import { store } from "../../redux/store";
import {logImEXEvent} from "../../firebase/firebase.utils";
import {UPDATE_JOB} from "../../graphql/jobs.queries";
import {insertAuditTrail} from "../../redux/application/application.actions";
import {store} from "../../redux/store";
import AuditTrailMapping from "../../utils/AuditTrailMappings";
export default function AddToProduction(
apolloClient,
jobId,
completionCallback,
remove = false
apolloClient,
jobId,
completionCallback,
remove = false
) {
logImEXEvent("job_add_to_production");
logImEXEvent("job_add_to_production");
//get a list of all fields on the job
apolloClient
.mutate({
mutation: UPDATE_JOB,
variables: { jobId: jobId, job: { inproduction: !remove } },
})
.then((res) => {
notification["success"]({
message: i18n.t("jobs.successes.save"),
});
store.dispatch(
insertAuditTrail({
jobid: jobId,
operation: AuditTrailMapping.jobinproductionchange(!remove),
//get a list of all fields on the job
apolloClient
.mutate({
mutation: UPDATE_JOB,
variables: {jobId: jobId, job: {inproduction: !remove}},
})
);
if (completionCallback) completionCallback();
})
.catch((error) => {
notification["errors"]({
message: i18n.t("jobs.errors.saving", {
error: JSON.stringify(error),
}),
});
});
.then((res) => {
notification["success"]({
message: i18n.t("jobs.successes.save"),
});
//insert the new job. call the callback with the returned ID when done.
store.dispatch(
insertAuditTrail({
jobid: jobId,
operation: AuditTrailMapping.jobinproductionchange(!remove),
})
);
if (completionCallback) completionCallback();
})
.catch((error) => {
notification["errors"]({
message: i18n.t("jobs.errors.saving", {
error: JSON.stringify(error),
}),
});
});
return;
//insert the new job. call the callback with the returned ID when done.
return;
}

View File

@@ -1,138 +1,138 @@
import Axios from "axios";
import _ from "lodash";
import { logImEXEvent } from "../../firebase/firebase.utils";
import { INSERT_NEW_JOB, QUERY_JOB_FOR_DUPE } from "../../graphql/jobs.queries";
import {logImEXEvent} from "../../firebase/firebase.utils";
import {INSERT_NEW_JOB, QUERY_JOB_FOR_DUPE} from "../../graphql/jobs.queries";
import dayjs from "../../utils/day";
import i18n from "i18next";
export default async function DuplicateJob(
apolloClient,
jobId,
config,
completionCallback,
keepJobLines = false
apolloClient,
jobId,
config,
completionCallback,
keepJobLines = false
) {
logImEXEvent("job_duplicate");
logImEXEvent("job_duplicate");
const { defaultOpenStatus } = config;
//get a list of all fields on the job
const res = await apolloClient.query({
query: QUERY_JOB_FOR_DUPE,
variables: { id: jobId },
});
const {defaultOpenStatus} = config;
//get a list of all fields on the job
const res = await apolloClient.query({
query: QUERY_JOB_FOR_DUPE,
variables: {id: jobId},
});
const { jobs_by_pk } = res.data;
const existingJob = _.cloneDeep(jobs_by_pk);
delete existingJob.__typename;
delete existingJob.id;
delete existingJob.createdat;
delete existingJob.updatedat;
delete existingJob.cieca_stl;
delete existingJob.cieca_ttl;
const {jobs_by_pk} = res.data;
const existingJob = _.cloneDeep(jobs_by_pk);
delete existingJob.__typename;
delete existingJob.id;
delete existingJob.createdat;
delete existingJob.updatedat;
delete existingJob.cieca_stl;
delete existingJob.cieca_ttl;
const newJob = {
...existingJob,
status: defaultOpenStatus,
};
const newJob = {
...existingJob,
status: defaultOpenStatus,
};
const _tempLines = _.cloneDeep(existingJob.joblines);
_tempLines.forEach((line) => {
delete line.id;
delete line.__typename;
line.manual_line = true;
});
newJob.joblines = keepJobLines ? _tempLines : [];
const _tempLines = _.cloneDeep(existingJob.joblines);
_tempLines.forEach((line) => {
delete line.id;
delete line.__typename;
line.manual_line = true;
});
newJob.joblines = keepJobLines ? _tempLines : [];
delete newJob.joblines;
newJob.joblines = keepJobLines ? { data: _tempLines } : null;
delete newJob.joblines;
newJob.joblines = keepJobLines ? {data: _tempLines} : null;
const res2 = await apolloClient.mutate({
mutation: INSERT_NEW_JOB,
variables: { job: [newJob] },
});
await Axios.post("/job/totalsssu", {
id: res2.data.insert_jobs.returning[0].id,
});
const res2 = await apolloClient.mutate({
mutation: INSERT_NEW_JOB,
variables: {job: [newJob]},
});
await Axios.post("/job/totalsssu", {
id: res2.data.insert_jobs.returning[0].id,
});
if (completionCallback)
completionCallback(res2.data.insert_jobs.returning[0].id);
if (completionCallback)
completionCallback(res2.data.insert_jobs.returning[0].id);
//insert the new job. call the callback with the returned ID when done.
//insert the new job. call the callback with the returned ID when done.
return;
return;
}
export async function CreateIouForJob(
apolloClient,
jobId,
config,
jobLinesToKeep
apolloClient,
jobId,
config,
jobLinesToKeep
) {
logImEXEvent("job_create_iou");
logImEXEvent("job_create_iou");
const { status } = config;
//get a list of all fields on the job
const res = await apolloClient.query({
query: QUERY_JOB_FOR_DUPE,
variables: { id: jobId },
});
const {status} = config;
//get a list of all fields on the job
const res = await apolloClient.query({
query: QUERY_JOB_FOR_DUPE,
variables: {id: jobId},
});
const { jobs_by_pk } = res.data;
const existingJob = _.cloneDeep(jobs_by_pk);
delete existingJob.__typename;
delete existingJob.id;
delete existingJob.createdat;
delete existingJob.updatedat;
delete existingJob.cieca_stl;
delete existingJob.cieca_ttl;
const {jobs_by_pk} = res.data;
const existingJob = _.cloneDeep(jobs_by_pk);
delete existingJob.__typename;
delete existingJob.id;
delete existingJob.createdat;
delete existingJob.updatedat;
delete existingJob.cieca_stl;
delete existingJob.cieca_ttl;
const newJob = {
...existingJob,
const newJob = {
...existingJob,
converted: true,
status: status,
iouparent: jobId,
date_open: dayjs(),
audit_trails: {
data: [
{
useremail: config.useremail,
bodyshopid: config.bodyshopid,
operation: i18n.t("audit_trail.messages.jobioucreated"),
converted: true,
status: status,
iouparent: jobId,
date_open: dayjs(),
audit_trails: {
data: [
{
useremail: config.useremail,
bodyshopid: config.bodyshopid,
operation: i18n.t("audit_trail.messages.jobioucreated"),
},
],
},
],
},
};
};
const selectedJoblinesIds = jobLinesToKeep.map((l) => l.id);
const selectedJoblinesIds = jobLinesToKeep.map((l) => l.id);
const _tempLines = _.cloneDeep(existingJob.joblines).filter((l) =>
selectedJoblinesIds.includes(l.id)
);
_tempLines.forEach((line) => {
delete line.id;
delete line.__typename;
line.oem_partno = `${line.oem_partno ? `${line.oem_partno} - ` : ``}IOU $${
(line.act_price && line.act_price.toFixed(2)) || 0
}/${line.mod_lb_hrs || 0}hrs`;
line.act_price = 0;
line.mod_lb_hrs = 0;
line.manual_line = true;
});
const _tempLines = _.cloneDeep(existingJob.joblines).filter((l) =>
selectedJoblinesIds.includes(l.id)
);
_tempLines.forEach((line) => {
delete line.id;
delete line.__typename;
line.oem_partno = `${line.oem_partno ? `${line.oem_partno} - ` : ``}IOU $${
(line.act_price && line.act_price.toFixed(2)) || 0
}/${line.mod_lb_hrs || 0}hrs`;
line.act_price = 0;
line.mod_lb_hrs = 0;
line.manual_line = true;
});
delete newJob.joblines;
newJob.joblines = { data: _tempLines };
delete newJob.joblines;
newJob.joblines = {data: _tempLines};
const res2 = await apolloClient.mutate({
mutation: INSERT_NEW_JOB,
variables: { job: [newJob] },
});
const res2 = await apolloClient.mutate({
mutation: INSERT_NEW_JOB,
variables: {job: [newJob]},
});
Axios.post("/job/totalsssu", {
id: res2.data.insert_jobs.returning[0].id,
});
Axios.post("/job/totalsssu", {
id: res2.data.insert_jobs.returning[0].id,
});
//insert the new job. call the callback with the returned ID when done.
//insert the new job. call the callback with the returned ID when done.
return res2.data.insert_jobs.returning[0].id;
return res2.data.insert_jobs.returning[0].id;
}