2022-02-01 12:04:08 +08:00
|
|
|
import FlowNodeModel from "../models/FlowNode";
|
|
|
|
import JobModel from "../models/Job";
|
2022-06-20 23:29:21 +08:00
|
|
|
import Processor from "../Processor";
|
|
|
|
import { JOB_STATUS } from "../constants";
|
2022-02-01 12:04:08 +08:00
|
|
|
|
|
|
|
export const PARALLEL_MODE = {
|
|
|
|
ALL: 'all',
|
|
|
|
ANY: 'any',
|
|
|
|
RACE: 'race'
|
|
|
|
} as const;
|
|
|
|
|
|
|
|
const StatusGetters = {
|
|
|
|
[PARALLEL_MODE.ALL](result) {
|
|
|
|
if (result.some(j => j && j.status === JOB_STATUS.REJECTED)) {
|
|
|
|
return JOB_STATUS.REJECTED;
|
|
|
|
}
|
|
|
|
if (result.every(j => j && j.status === JOB_STATUS.RESOLVED)) {
|
|
|
|
return JOB_STATUS.RESOLVED;
|
|
|
|
}
|
|
|
|
return JOB_STATUS.PENDING;
|
|
|
|
},
|
|
|
|
[PARALLEL_MODE.ANY](result) {
|
|
|
|
return result.some(j => j && j.status === JOB_STATUS.RESOLVED)
|
|
|
|
? JOB_STATUS.RESOLVED
|
|
|
|
: (
|
|
|
|
result.some(j => j && j.status === JOB_STATUS.PENDING)
|
|
|
|
? JOB_STATUS.PENDING
|
|
|
|
: JOB_STATUS.REJECTED
|
|
|
|
)
|
|
|
|
},
|
|
|
|
[PARALLEL_MODE.RACE](result) {
|
|
|
|
return result.some(j => j && j.status === JOB_STATUS.RESOLVED)
|
|
|
|
? JOB_STATUS.RESOLVED
|
|
|
|
: (
|
|
|
|
result.some(j => j && j.status === JOB_STATUS.REJECTED)
|
|
|
|
? JOB_STATUS.REJECTED
|
|
|
|
: JOB_STATUS.PENDING
|
|
|
|
)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
export default {
|
2022-06-24 23:28:49 +08:00
|
|
|
async run(node: FlowNodeModel, prevJob: JobModel, processor: Processor) {
|
2022-06-20 23:29:21 +08:00
|
|
|
const branches = processor.nodes
|
2022-06-24 23:28:49 +08:00
|
|
|
.filter(item => item.upstream === node && item.branchIndex !== null)
|
2022-02-01 12:04:08 +08:00
|
|
|
.sort((a, b) => a.branchIndex - b.branchIndex);
|
|
|
|
|
2022-06-20 23:29:21 +08:00
|
|
|
const job = await processor.saveJob({
|
2022-02-01 12:04:08 +08:00
|
|
|
status: JOB_STATUS.PENDING,
|
|
|
|
result: Array(branches.length).fill(null),
|
2022-06-24 23:28:49 +08:00
|
|
|
nodeId: node.id,
|
2022-02-01 12:04:08 +08:00
|
|
|
upstreamId: prevJob?.id ?? null
|
|
|
|
});
|
|
|
|
|
|
|
|
// NOTE:
|
|
|
|
// use `reduce` but not `Promise.all` here to avoid racing manupulating db.
|
|
|
|
// for users, this is almost equivalent to `Promise.all`,
|
|
|
|
// because of the delay is not significant sensible.
|
2022-06-20 23:29:21 +08:00
|
|
|
// another benifit of this is, it could handle sequenced branches in future.
|
|
|
|
await branches.reduce((promise: Promise<any>, branch) => promise.then(() => processor.run(branch, job)), Promise.resolve());
|
2022-02-01 12:04:08 +08:00
|
|
|
|
2022-06-24 23:28:49 +08:00
|
|
|
return processor.end(node, job);
|
2022-02-01 12:04:08 +08:00
|
|
|
},
|
|
|
|
|
2022-06-24 23:28:49 +08:00
|
|
|
async resume(node: FlowNodeModel, branchJob, processor: Processor) {
|
|
|
|
const job = processor.findBranchParentJob(branchJob, node);
|
2022-02-01 12:04:08 +08:00
|
|
|
|
|
|
|
const { result, status } = job;
|
|
|
|
// if parallel has been done (resolved / rejected), do not care newly executed branch jobs.
|
|
|
|
if (status !== JOB_STATUS.PENDING) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
// find the index of the node which start the branch
|
2022-06-20 23:29:21 +08:00
|
|
|
const jobNode = processor.nodesMap.get(branchJob.nodeId);
|
|
|
|
const { branchIndex } = processor.findBranchStartNode(jobNode);
|
2022-06-24 23:28:49 +08:00
|
|
|
const { mode = PARALLEL_MODE.ALL } = node.config || {};
|
2022-02-01 12:04:08 +08:00
|
|
|
|
|
|
|
const newResult = [...result.slice(0, branchIndex), branchJob.get(), ...result.slice(branchIndex + 1)];
|
|
|
|
job.set({
|
|
|
|
result: newResult,
|
|
|
|
status: StatusGetters[mode](newResult)
|
|
|
|
});
|
|
|
|
|
|
|
|
if (job.status === JOB_STATUS.PENDING) {
|
2022-06-20 23:29:21 +08:00
|
|
|
await job.save({ transaction: processor.transaction });
|
2022-06-24 23:28:49 +08:00
|
|
|
return processor.end(node, job);
|
2022-02-01 12:04:08 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return job;
|
|
|
|
}
|
2022-02-27 22:58:41 +08:00
|
|
|
};
|