IO-1605 Refactor smart scheduling.
This commit is contained in:
@@ -4,6 +4,7 @@ const queries = require("../graphql-client/queries");
|
||||
const Dinero = require("dinero.js");
|
||||
const moment = require("moment");
|
||||
const logger = require("../utils/logger");
|
||||
const _ = require("lodash");
|
||||
require("dotenv").config({
|
||||
path: path.resolve(
|
||||
process.cwd(),
|
||||
@@ -30,7 +31,7 @@ exports.job = async (req, res) => {
|
||||
jobId: jobId,
|
||||
});
|
||||
|
||||
const { appointments, jobs } = result;
|
||||
const { jobs_by_pk, blockedDays, prodJobs, arrJobs, compJobs } = result;
|
||||
const { ssbuckets, workingdays } = result.jobs_by_pk.bodyshop;
|
||||
const jobHrs = result.jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
|
||||
|
||||
@@ -39,105 +40,164 @@ exports.job = async (req, res) => {
|
||||
bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
|
||||
)[0];
|
||||
|
||||
const bucketMatrix = {};
|
||||
const yesterday = moment().subtract(1, "day");
|
||||
//Get latest date + add 5 days to allow for back end adding..
|
||||
const load = {
|
||||
productionTotal: {},
|
||||
};
|
||||
//Set the current load.
|
||||
ssbuckets.forEach((bucket) => {
|
||||
load.productionTotal[bucket.id] = { count: 0, label: bucket.label };
|
||||
});
|
||||
|
||||
const totalMatrixDays = moment
|
||||
.max([
|
||||
...appointments.map((a) => moment(a.start)),
|
||||
...jobs
|
||||
.map((p) => moment(p.scheduled_completion))
|
||||
.filter((p) => p.isValid() && p.isAfter(yesterday)),
|
||||
])
|
||||
.add("5", "days")
|
||||
.diff(moment(), "days");
|
||||
const filteredProdJobsList = prodJobs.filter(
|
||||
(j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
|
||||
);
|
||||
|
||||
//Initialize the bucket matrix
|
||||
for (var i = 0; i < totalMatrixDays; i++) {
|
||||
const theDate = moment().add(i, "days").format("yyyy-MM-DD");
|
||||
//Only need to create a matrix for jobs of the same bucket.
|
||||
bucketMatrix[theDate] = { in: 0, out: 0 };
|
||||
|
||||
// ssbuckets.forEach((bucket) => {
|
||||
// bucketMatrix[theDate] = {
|
||||
// ...bucketMatrix[theDate],
|
||||
// [bucket.id]: { in: 0, out: 0 },
|
||||
// };
|
||||
// });
|
||||
}
|
||||
|
||||
//Populate the jobs scheduled to come in.
|
||||
appointments.forEach((appointment) => {
|
||||
if (!appointment.block) {
|
||||
const jobHrs =
|
||||
appointment.job.joblines_aggregate.aggregate.sum.mod_lb_hrs;
|
||||
//Is the job in the same bucket?
|
||||
const appointmentBucket = ssbuckets.filter(
|
||||
(bucket) =>
|
||||
bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
|
||||
)[0];
|
||||
if (appointmentBucket.id === JobBucket.id) {
|
||||
//Theyre the same classification. Add it to the matrix.
|
||||
const appDate = moment(appointment.start).format("yyyy-MM-DD");
|
||||
bucketMatrix[appDate] = {
|
||||
...bucketMatrix[appDate],
|
||||
in: bucketMatrix[appDate].in + 1,
|
||||
};
|
||||
}
|
||||
filteredProdJobsList.forEach((item) => {
|
||||
//Add all of the jobs currently in production to the buckets so that we have a starting point.
|
||||
const bucketId = CheckJobBucket(ssbuckets, item);
|
||||
if (bucketId) {
|
||||
load.productionTotal[bucketId].count =
|
||||
load.productionTotal[bucketId].count + 1;
|
||||
} else {
|
||||
//remove the date from the possible list.
|
||||
const appDate = moment(appointment.start).format("yyyy-MM-DD");
|
||||
bucketMatrix[appDate] = {
|
||||
...bucketMatrix[appDate],
|
||||
blocked: true,
|
||||
console.log("Uh oh, this job doesn't fit in a bucket!", item);
|
||||
}
|
||||
});
|
||||
|
||||
const filteredArrJobs = arrJobs.filter(
|
||||
(j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
|
||||
);
|
||||
|
||||
filteredArrJobs.forEach((item) => {
|
||||
const itemDate = moment(item.scheduled_in).format("yyyy-MM-DD");
|
||||
if (!!load[itemDate]) {
|
||||
load[itemDate].hoursIn =
|
||||
(load[itemDate].hoursIn || 0) +
|
||||
item.labhrs.aggregate.sum.mod_lb_hrs +
|
||||
item.larhrs.aggregate.sum.mod_lb_hrs;
|
||||
load[itemDate].jobsIn.push(item);
|
||||
} else {
|
||||
load[itemDate] = {
|
||||
jobsIn: [item],
|
||||
jobsOut: [],
|
||||
hoursIn:
|
||||
item.labhrs.aggregate.sum.mod_lb_hrs +
|
||||
item.larhrs.aggregate.sum.mod_lb_hrs,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
//Populate the jobs that are leaving today.
|
||||
const todayIsoString = moment().format("yyyy-MM-DD");
|
||||
jobs.forEach((pjob) => {
|
||||
const jobHrs =
|
||||
pjob.larhrs.aggregate.sum.mod_lb_hrs +
|
||||
pjob.labhrs.aggregate.sum.mod_lb_hrs;
|
||||
//Is the job in the same bucket?
|
||||
const pjobBucket = ssbuckets.filter(
|
||||
(bucket) =>
|
||||
bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
|
||||
)[0];
|
||||
if (pjobBucket.id === JobBucket.id) {
|
||||
//Theyre the same classification. Add it to the matrix.
|
||||
const compDate = moment(pjob.scheduled_completion);
|
||||
//Is the schedule completion behind today? If so, use today as it.
|
||||
let dateToUse;
|
||||
dateToUse = compDate.isValid()
|
||||
? moment().diff(compDate, "days") < 0
|
||||
? compDate.format("yyyy-MM-DD")
|
||||
: todayIsoString
|
||||
: todayIsoString;
|
||||
//Get the completing jobs.
|
||||
let problemJobs = [];
|
||||
const filteredCompJobs = compJobs.filter(
|
||||
(j) => JobBucket.id === CheckJobBucket(ssbuckets, j)
|
||||
);
|
||||
|
||||
bucketMatrix[dateToUse] = {
|
||||
...bucketMatrix[dateToUse],
|
||||
out: (bucketMatrix[dateToUse].out || 0) + 1,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
//Propose the first 5 dates where we are below target.
|
||||
|
||||
const possibleDates = [];
|
||||
const bucketMatrixKeys = Object.keys(bucketMatrix);
|
||||
bucketMatrixKeys.forEach((bmkey) => {
|
||||
const isShopOpen =
|
||||
workingdays[dayOfWeekMapper(moment(bmkey).day())] &&
|
||||
!bucketMatrix[bmkey].blocked;
|
||||
filteredCompJobs.forEach((item) => {
|
||||
const inProdJobs = filteredProdJobsList.find((p) => p.id === item.id);
|
||||
const inArrJobs = filteredArrJobs.find((p) => p.id === item.id);
|
||||
|
||||
if (
|
||||
JobBucket.target > bucketMatrix[bmkey].in - bucketMatrix[bmkey].out &&
|
||||
!(inProdJobs || inArrJobs) &&
|
||||
!moment(item.actual_completion || item.scheduled_completion).isSame(
|
||||
moment(),
|
||||
"day"
|
||||
)
|
||||
) {
|
||||
// NOT FOUND!
|
||||
console.log("PROBLEM JOB", item);
|
||||
problemJobs.push({
|
||||
...item,
|
||||
code: "Job is scheduled for completion, but it is not marked in production nor is it an arriving job in this period. Check the scheduled in and completion dates",
|
||||
});
|
||||
return;
|
||||
} else {
|
||||
const itemDate = moment(
|
||||
item.actual_completion || item.scheduled_completion
|
||||
).format("yyyy-MM-DD");
|
||||
if (!!load[itemDate]) {
|
||||
load[itemDate].hoursOut =
|
||||
(load[itemDate].hoursOut || 0) +
|
||||
item.labhrs.aggregate.sum.mod_lb_hrs +
|
||||
item.larhrs.aggregate.sum.mod_lb_hrs;
|
||||
load[itemDate].jobsOut.push(item);
|
||||
} else {
|
||||
load[itemDate] = {
|
||||
jobsOut: [item],
|
||||
hoursOut:
|
||||
item.labhrs.aggregate.sum.mod_lb_hrs +
|
||||
item.larhrs.aggregate.sum.mod_lb_hrs,
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//Propagate the expected load to each day.
|
||||
const yesterday = moment().subtract(1, "day");
|
||||
const today = moment().startOf("day");
|
||||
|
||||
const end = moment.max([
|
||||
...filteredArrJobs.map((a) => moment(a.scheduled_in)),
|
||||
...filteredCompJobs
|
||||
.map((p) => moment(p.actual_completion || p.scheduled_completion))
|
||||
.filter((p) => p.isValid() && p.isAfter(yesterday)),
|
||||
]);
|
||||
const range = Math.round(moment.duration(end.diff(today)).asDays());
|
||||
for (var day = 0; day < range; day++) {
|
||||
const current = moment(today).add(day, "days").format("yyyy-MM-DD");
|
||||
const prev = moment(today)
|
||||
.add(day - 1, "days")
|
||||
.format("yyyy-MM-DD");
|
||||
if (!!!load[current]) {
|
||||
load[current] = {};
|
||||
}
|
||||
if (day === 0) {
|
||||
//Starting on day 1. The load is current.
|
||||
load[current].expectedLoad = CalculateLoad(
|
||||
load.productionTotal,
|
||||
ssbuckets,
|
||||
load[current].jobsIn || [],
|
||||
load[current].jobsOut || []
|
||||
);
|
||||
} else {
|
||||
load[current].expectedLoad = CalculateLoad(
|
||||
load[prev].expectedLoad,
|
||||
ssbuckets,
|
||||
load[current].jobsIn || [],
|
||||
load[current].jobsOut || []
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//Add in all of the blocked days.
|
||||
|
||||
blockedDays.forEach((b) => {
|
||||
//Find it in the load, set it as blocked.
|
||||
const startIsoFormat = moment(b.start).format("YYYY-MM-DD");
|
||||
if (load[startIsoFormat]) load[startIsoFormat].blocked = true;
|
||||
else {
|
||||
load[startIsoFormat] = { blocked: true };
|
||||
}
|
||||
});
|
||||
// //Propose the first 5 dates where we are below target.
|
||||
|
||||
const possibleDates = [];
|
||||
delete load.productionTotal;
|
||||
const loadKeys = Object.keys(load).sort((a, b) =>
|
||||
moment(a).isAfter(moment(b)) ? 1 : -1
|
||||
);
|
||||
|
||||
loadKeys.forEach((loadKey) => {
|
||||
const isShopOpen =
|
||||
(workingdays[dayOfWeekMapper(moment(loadKey).day())] || false) &&
|
||||
!load[loadKey].blocked;
|
||||
|
||||
if (
|
||||
load[loadKey].expectedLoad &&
|
||||
load[loadKey].expectedLoad[JobBucket.id] &&
|
||||
JobBucket.target > load[loadKey].expectedLoad[JobBucket.id].count &&
|
||||
isShopOpen
|
||||
)
|
||||
possibleDates.push(new Date(bmkey).toISOString().substr(0, 10));
|
||||
possibleDates.push(new Date(loadKey).toISOString().substr(0, 10));
|
||||
});
|
||||
|
||||
if (possibleDates.length < 6) {
|
||||
@@ -147,7 +207,7 @@ exports.job = async (req, res) => {
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log("smart-scheduling-error", "ERROR", req.user.email, jobId, {
|
||||
error: JSON.stringify(error),
|
||||
error,
|
||||
});
|
||||
res.status(400).send(error);
|
||||
}
|
||||
@@ -171,3 +231,47 @@ const dayOfWeekMapper = (numberOfDay) => {
|
||||
return "saturday";
|
||||
}
|
||||
};
|
||||
|
||||
const CheckJobBucket = (buckets, job) => {
|
||||
const jobHours =
|
||||
job.labhrs.aggregate.sum.mod_lb_hrs + job.larhrs.aggregate.sum.mod_lb_hrs;
|
||||
|
||||
const matchingBucket = buckets.filter((b) =>
|
||||
b.gte <= jobHours && b.lt ? b.lt > jobHours : true
|
||||
);
|
||||
|
||||
return matchingBucket[0] && matchingBucket[0].id;
|
||||
};
|
||||
|
||||
const CalculateLoad = (currentLoad, buckets, jobsIn, jobsOut) => {
|
||||
//Add the jobs coming
|
||||
const newLoad = _.cloneDeep(currentLoad);
|
||||
jobsIn.forEach((job) => {
|
||||
const bucketId = CheckJobBucket(buckets, job);
|
||||
if (bucketId) {
|
||||
newLoad[bucketId].count = newLoad[bucketId].count + 1;
|
||||
} else {
|
||||
console.log(
|
||||
"[Util Arr Job]Uh oh, this job doesn't fit in a bucket!",
|
||||
job
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
jobsOut.forEach((job) => {
|
||||
const bucketId = CheckJobBucket(buckets, job);
|
||||
if (bucketId) {
|
||||
newLoad[bucketId].count = newLoad[bucketId].count - 1;
|
||||
if (newLoad[bucketId].count < 0) {
|
||||
console.log("***ERROR: NEGATIVE LOAD Bucket =>", bucketId, job);
|
||||
}
|
||||
} else {
|
||||
console.log(
|
||||
"[Util Out Job]Uh oh, this job doesn't fit in a bucket!",
|
||||
job
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return newLoad;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user