From 4a8c9c8d87b7767d7c3b09b04208fad3209e734a Mon Sep 17 00:00:00 2001 From: Junyi Date: Tue, 28 Jun 2022 17:00:19 +0800 Subject: [PATCH] feat(plugin-workflow): add race mode (#542) * feat(plugin-workflow): add race mode * fix(plugin-workflow): fix test cases * fix(plugin-workflow): fix node create action --- packages/core/client/src/locale/zh_CN.ts | 3 + .../core/client/src/workflow/nodes/delay.tsx | 1 + .../client/src/workflow/nodes/parallel.tsx | 12 +- packages/plugins/workflow/src/Processor.ts | 23 +- .../{workflow.test.ts => Plugin.test.ts} | 2 +- .../workflow/src/__tests__/Processor.test.ts | 146 +----- .../__tests__/instructions/parallel.test.ts | 415 ++++++++++++++++++ .../plugins/workflow/src/actions/nodes.ts | 2 + .../workflow/src/instructions/delay.ts | 18 +- .../workflow/src/instructions/parallel.ts | 84 ++-- 10 files changed, 524 insertions(+), 182 deletions(-) rename packages/plugins/workflow/src/__tests__/{workflow.test.ts => Plugin.test.ts} (99%) create mode 100644 packages/plugins/workflow/src/__tests__/instructions/parallel.test.ts diff --git a/packages/core/client/src/locale/zh_CN.ts b/packages/core/client/src/locale/zh_CN.ts index d91de6e5e..4c93e6dbf 100644 --- a/packages/core/client/src/locale/zh_CN.ts +++ b/packages/core/client/src/locale/zh_CN.ts @@ -548,10 +548,13 @@ export default { 'Conditions': '条件配置', 'Parallel branch': '分支', + 'Add branch': '增加分支', 'All succeeded': '全部成功', 'Any succeeded': '任意成功', + 'Any succeeded or failed': '任意成功或失败', 'Continue after all branches succeeded': '全部分支都成功后才能继续', 'Continue after any branch succeeded': '任意分支成功后就继续', + 'Continue after any branch succeeded, or exit after any branch failed': '任意分支成功继续,或失败后退出', 'Delay': '延时', 'Duration': '时长', diff --git a/packages/core/client/src/workflow/nodes/delay.tsx b/packages/core/client/src/workflow/nodes/delay.tsx index 954a4d42b..9af853190 100644 --- a/packages/core/client/src/workflow/nodes/delay.tsx +++ b/packages/core/client/src/workflow/nodes/delay.tsx @@ -11,6 +11,7 @@ export default { title: '{{t("Duration")}}', 'x-decorator': 'FormItem', 'x-component': 'Duration', + default: 60000 }, 'config.endStatus': { type: 'number', diff --git a/packages/core/client/src/workflow/nodes/parallel.tsx b/packages/core/client/src/workflow/nodes/parallel.tsx index 6d9ca4e43..ed6e89707 100644 --- a/packages/core/client/src/workflow/nodes/parallel.tsx +++ b/packages/core/client/src/workflow/nodes/parallel.tsx @@ -46,7 +46,17 @@ export default { ) }, - // { value: 'race', label: '任意退出' }, + { + value: 'race', + label: ( + + {i18n.t('Any succeeded or failed')} + + ) + }, ], default: 'all' } diff --git a/packages/plugins/workflow/src/Processor.ts b/packages/plugins/workflow/src/Processor.ts index 5fa4c8e4e..93c9ccf42 100644 --- a/packages/plugins/workflow/src/Processor.ts +++ b/packages/plugins/workflow/src/Processor.ts @@ -201,11 +201,12 @@ export default class Processor { } // parent node should take over the control - public end(node, job) { + public async end(node, job) { const parentNode = this.findBranchParentNode(node); // no parent, means on main flow if (parentNode) { - return this.recall(parentNode, job); + await this.recall(parentNode, job); + return job; } // really done for all nodes @@ -256,11 +257,23 @@ export default class Processor { return job; } + getBranches(node: FlowNodeModel): FlowNodeModel[] { + return this.nodes + .filter(item => item.upstream === node && item.branchIndex !== null) + .sort((a, b) => a.branchIndex - b.branchIndex); + } + // find the first node in current branch - findBranchStartNode(node: FlowNodeModel): FlowNodeModel | null { + findBranchStartNode(node: FlowNodeModel, parent?: FlowNodeModel): FlowNodeModel | null { for (let n = node; n; n = n.upstream) { - if (n.branchIndex !== null) { - return n; + if (!parent) { + if (n.branchIndex !== null) { + return n; + } + } else { + if (n.upstream === parent) { + return n; + } } } return null; diff --git a/packages/plugins/workflow/src/__tests__/workflow.test.ts b/packages/plugins/workflow/src/__tests__/Plugin.test.ts similarity index 99% rename from packages/plugins/workflow/src/__tests__/workflow.test.ts rename to packages/plugins/workflow/src/__tests__/Plugin.test.ts index 27611dcd4..162b8768e 100644 --- a/packages/plugins/workflow/src/__tests__/workflow.test.ts +++ b/packages/plugins/workflow/src/__tests__/Plugin.test.ts @@ -4,7 +4,7 @@ import { getApp } from '.'; -describe('workflow > workflow', () => { +describe('workflow > Plugin', () => { let app: MockServer; let agent; let db: Database; diff --git a/packages/plugins/workflow/src/__tests__/Processor.test.ts b/packages/plugins/workflow/src/__tests__/Processor.test.ts index 510547d96..6d71a6ffb 100644 --- a/packages/plugins/workflow/src/__tests__/Processor.test.ts +++ b/packages/plugins/workflow/src/__tests__/Processor.test.ts @@ -22,7 +22,6 @@ describe('workflow > Processor', () => { PostRepo = db.getCollection('posts').repository; workflow = await WorkflowModel.create({ - title: 'test workflow', enabled: true, type: 'collection', config: { @@ -45,7 +44,6 @@ describe('workflow > Processor', () => { it('execute resolved workflow', async () => { await workflow.createNode({ - title: 'echo', type: 'echo' }); @@ -62,7 +60,6 @@ describe('workflow > Processor', () => { it('workflow with single simple node', async () => { await workflow.createNode({ - title: 'echo', type: 'echo' }); @@ -108,7 +105,6 @@ describe('workflow > Processor', () => { it('workflow with error node', async () => { await workflow.createNode({ - title: 'error', type: 'error' }); @@ -128,12 +124,10 @@ describe('workflow > Processor', () => { describe('manual nodes', () => { it('manual node should suspend execution, and could be manually resume', async () => { const n1 = await workflow.createNode({ - title: 'prompt', type: 'prompt', }); const n2 = await workflow.createNode({ - title: 'echo', type: 'echo', upstreamId: n1.id }); @@ -163,11 +157,9 @@ describe('workflow > Processor', () => { it('manual node should suspend execution, resuming with error should end execution', async () => { const n1 = await workflow.createNode({ - title: 'prompt error', type: 'prompt->error', }); const n2 = await workflow.createNode({ - title: 'echo', type: 'echo', upstreamId: n1.id }); @@ -196,20 +188,17 @@ describe('workflow > Processor', () => { describe('branch: condition', () => { it('condition node link to different downstreams', async () => { const n1 = await workflow.createNode({ - title: 'condition', type: 'condition', // no config means always true }); const n2 = await workflow.createNode({ - title: 'true to echo', type: 'echo', branchIndex: BRANCH_INDEX.ON_TRUE, upstreamId: n1.id }); await workflow.createNode({ - title: 'false to echo', type: 'echo', branchIndex: BRANCH_INDEX.ON_FALSE, upstreamId: n1.id @@ -229,20 +218,17 @@ describe('workflow > Processor', () => { it('suspend downstream in condition branch, then go on', async () => { const n1 = await workflow.createNode({ - title: 'condition', type: 'condition', // no config means always true }); const n2 = await workflow.createNode({ - title: 'manual', type: 'prompt', branchIndex: BRANCH_INDEX.ON_TRUE, upstreamId: n1.id }); const n3 = await workflow.createNode({ - title: 'echo input value', type: 'echo', upstreamId: n1.id }); @@ -265,20 +251,17 @@ describe('workflow > Processor', () => { it('resume error downstream in condition branch, should reject', async () => { const n1 = await workflow.createNode({ - title: 'condition', type: 'condition', // no config means always true }); const n2 = await workflow.createNode({ - title: 'manual', type: 'prompt->error', branchIndex: BRANCH_INDEX.ON_TRUE, upstreamId: n1.id }); const n3 = await workflow.createNode({ - title: 'echo input value', type: 'echo', upstreamId: n1.id }); @@ -301,132 +284,19 @@ describe('workflow > Processor', () => { }); }); - describe('branch: parallel node', () => { - it('link to single branch', async () => { - const n1 = await workflow.createNode({ - title: 'parallel', - type: 'parallel' - }); - - const n2 = await workflow.createNode({ - title: 'echo1', - type: 'echo', - upstreamId: n1.id, - branchIndex: 0 - }); - - const n3 = await workflow.createNode({ - title: 'echo2', - type: 'echo', - upstreamId: n1.id - }); - - await n1.setDownstream(n3); - - const post = await PostRepo.create({ values: { title: 't1' } }); - - const [execution] = await workflow.getExecutions(); - expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); - const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); - expect(jobs.length).toEqual(3); - }); - - it('link to multipe branches', async () => { - const n1 = await workflow.createNode({ - title: 'parallel', - type: 'parallel' - }); - - const n2 = await workflow.createNode({ - title: 'echo1', - type: 'echo', - upstreamId: n1.id, - branchIndex: 0 - }); - - const n3 = await workflow.createNode({ - title: 'echo2', - type: 'echo', - upstreamId: n1.id, - branchIndex: 1 - }); - - const n4 = await workflow.createNode({ - title: 'echo on end', - type: 'echo', - upstreamId: n1.id - }); - - await n1.setDownstream(n4); - - const post = await PostRepo.create({ values: { title: 't1' } }); - - const [execution] = await workflow.getExecutions(); - expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); - const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); - expect(jobs.length).toEqual(4); - }); - - it('downstream has manual node', async () => { - const n1 = await workflow.createNode({ - title: 'parallel', - type: 'parallel' - }); - - const n2 = await workflow.createNode({ - title: 'prompt', - type: 'prompt', - upstreamId: n1.id, - branchIndex: 0 - }); - - const n3 = await workflow.createNode({ - title: 'echo', - type: 'echo', - upstreamId: n1.id, - branchIndex: 1 - }); - - const n4 = await workflow.createNode({ - title: 'echo on end', - type: 'echo', - upstreamId: n1.id - }); - - await n1.setDownstream(n4); - - const post = await PostRepo.create({ values: { title: 't1' } }); - - const [execution] = await workflow.getExecutions(); - expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); - - const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); - pending.set('result', 123); - const processor = plugin.createProcessor(execution); - await processor.resume(pending); - - expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); - const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); - expect(jobs.length).toEqual(4); - }); - }); - describe('branch: mixed', () => { it('condition branches contains parallel', async () => { const n1 = await workflow.createNode({ - title: 'condition', type: 'condition' }); const n2 = await workflow.createNode({ - title: 'parallel', type: 'parallel', branchIndex: BRANCH_INDEX.ON_TRUE, upstreamId: n1.id }); const n3 = await workflow.createNode({ - title: 'prompt', type: 'prompt', upstreamId: n2.id, branchIndex: 0 @@ -467,19 +337,16 @@ describe('workflow > Processor', () => { it('parallel branches contains condition', async () => { const n1 = await workflow.createNode({ - title: 'parallel', type: 'parallel' }); const n2 = await workflow.createNode({ - title: 'prompt', type: 'prompt', upstreamId: n1.id, branchIndex: 0 }); const n3 = await workflow.createNode({ - title: 'condition', type: 'condition', upstreamId: n1.id, branchIndex: 1 @@ -502,19 +369,20 @@ describe('workflow > Processor', () => { const post = await PostRepo.create({ values: { title: 't1' } }); - const [execution] = await workflow.getExecutions(); - expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + const [e1] = await workflow.getExecutions(); + expect(e1.status).toEqual(EXECUTION_STATUS.STARTED); - const pendingJobs = await execution.getJobs(); + const pendingJobs = await e1.getJobs(); expect(pendingJobs.length).toBe(4); const pending = pendingJobs.find(item => item.nodeId === n2.id ); pending.set('result', 123); - const processor = plugin.createProcessor(execution); + const processor = plugin.createProcessor(e1); await processor.resume(pending); - expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); - const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + const [e2] = await workflow.getExecutions(); + expect(e2.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await e2.getJobs({ order: [['id', 'ASC']] }); expect(jobs.length).toEqual(5); }); }); diff --git a/packages/plugins/workflow/src/__tests__/instructions/parallel.test.ts b/packages/plugins/workflow/src/__tests__/instructions/parallel.test.ts new file mode 100644 index 000000000..11a10f8c8 --- /dev/null +++ b/packages/plugins/workflow/src/__tests__/instructions/parallel.test.ts @@ -0,0 +1,415 @@ +import { Application } from '@nocobase/server'; +import Database from '@nocobase/database'; +import { getApp, sleep } from '..'; +import { EXECUTION_STATUS } from '../../constants'; + + + +describe('workflow > instructions > parallel', () => { + let app: Application; + let db: Database; + let PostRepo; + let WorkflowModel; + let workflow; + let plugin; + + beforeEach(async () => { + app = await getApp(); + plugin = app.pm.get('@nocobase/plugin-workflow'); + + db = app.db; + WorkflowModel = db.getCollection('workflows').model; + PostRepo = db.getCollection('posts').repository; + + workflow = await WorkflowModel.create({ + enabled: true, + type: 'collection', + config: { + mode: 1, + collection: 'posts' + } + }); + }); + + afterEach(() => app.stop()); + + describe('single all', () => { + it('all resolved', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + const n2 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(3); + }); + + it('some rejected', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + const n2 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.REJECTED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(3); + }); + + it('first branch rejected', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.REJECTED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(2); + }); + }); + + describe('single any', () => { + it('first resolved', async () => { + const n1 = await workflow.createNode({ + type: 'parallel', + config: { + mode: 'any' + } + }); + const n2 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(2); + }); + + it('first rejected', async () => { + const n1 = await workflow.createNode({ + type: 'parallel', + config: { + mode: 'any' + } + }); + const n2 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(3); + }); + + it('all rejected', async () => { + const n1 = await workflow.createNode({ + type: 'parallel', + config: { + mode: 'any' + } + }); + const n2 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.REJECTED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(3); + }); + }); + + describe('single race', () => { + it('first resolved', async () => { + const n1 = await workflow.createNode({ + type: 'parallel', + config: { + mode: 'race' + } + }); + const n2 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(2); + }); + + it('first rejected', async () => { + const n1 = await workflow.createNode({ + type: 'parallel', + config: { + mode: 'race' + } + }); + const n2 = await workflow.createNode({ + type: 'error', + upstreamId: n1.id, + branchIndex: 0 + }); + const n3 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.REJECTED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(2); + }); + }); + + describe('branch and join', () => { + it('link to single branch', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'echo1', + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'echo2', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n3); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(3); + }); + + it('link to multipe branches', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'echo1', + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'echo2', + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const n4 = await workflow.createNode({ + title: 'echo on end', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n4); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(4); + }); + + it('random branch index', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'echo1', + type: 'echo', + upstreamId: n1.id, + branchIndex: 3 + }); + + const n3 = await workflow.createNode({ + title: 'echo2', + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(3); + }); + + it('downstream has manual node', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'prompt', + type: 'prompt', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'echo', + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const n4 = await workflow.createNode({ + title: 'echo on end', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n4); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.STARTED); + + const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); + pending.set('result', 123); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); + + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(4); + }); + }); + + describe('nested', () => { + it('nested 2 levels', async () => { + const n1 = await workflow.createNode({ + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + type: 'parallel', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const n4 = await workflow.createNode({ + type: 'echo', + upstreamId: n2.id, + branchIndex: 0 + }); + + const n5 = await workflow.createNode({ + type: 'echo', + upstreamId: n1.id + }); + await n1.setDownstream(n5); + + const post = await PostRepo.create({ values: { title: 't1' } }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toBe(5); + }); + }); +}); diff --git a/packages/plugins/workflow/src/actions/nodes.ts b/packages/plugins/workflow/src/actions/nodes.ts index 9d8bc1771..2bb54db51 100644 --- a/packages/plugins/workflow/src/actions/nodes.ts +++ b/packages/plugins/workflow/src/actions/nodes.ts @@ -79,6 +79,8 @@ export async function create(context: Context, next) { } instance.set('upstream', upstream); + + return instance; }); await next(); diff --git a/packages/plugins/workflow/src/instructions/delay.ts b/packages/plugins/workflow/src/instructions/delay.ts index 3ac1239cb..0355019d6 100644 --- a/packages/plugins/workflow/src/instructions/delay.ts +++ b/packages/plugins/workflow/src/instructions/delay.ts @@ -1,5 +1,5 @@ import Plugin from '..'; -import { JOB_STATUS } from "../constants"; +import { EXECUTION_STATUS, JOB_STATUS } from "../constants"; import ExecutionModel from '../models/Execution'; import JobModel from '../models/Job'; import Processor from '../Processor'; @@ -27,10 +27,16 @@ export default class { }, include: [ { - association: 'execution' + association: 'execution', + attributes: [], + where: { + status: EXECUTION_STATUS.STARTED + }, + required: true }, { association: 'node', + attributes: ['config'], where: { type: 'delay' }, @@ -61,9 +67,11 @@ export default class { } async trigger(job) { - const { execution = await job.getExecution() as ExecutionModel } = job; - const processor = this.plugin.createProcessor(execution); - await processor.resume(job); + const execution = await job.getExecution() as ExecutionModel; + if (execution.status === EXECUTION_STATUS.STARTED) { + const processor = this.plugin.createProcessor(execution); + await processor.resume(job); + } if (this.timers.get(job.id)) { this.timers.delete(job.id); } diff --git a/packages/plugins/workflow/src/instructions/parallel.ts b/packages/plugins/workflow/src/instructions/parallel.ts index 9bb41dd5b..7403003b0 100644 --- a/packages/plugins/workflow/src/instructions/parallel.ts +++ b/packages/plugins/workflow/src/instructions/parallel.ts @@ -9,41 +9,54 @@ export const PARALLEL_MODE = { RACE: 'race' } as const; -const StatusGetters = { - [PARALLEL_MODE.ALL](result) { - if (result.some(j => j && j.status === JOB_STATUS.REJECTED)) { +const Modes = { + [PARALLEL_MODE.ALL]: { + next(previous) { + return previous.status !== JOB_STATUS.REJECTED; + }, + getStatus(result) { + if (result.some(status => status != null && status === JOB_STATUS.REJECTED)) { + return JOB_STATUS.REJECTED; + } + if (result.every(status => status != null && status === JOB_STATUS.RESOLVED)) { + return JOB_STATUS.RESOLVED; + } + return JOB_STATUS.PENDING; + } + }, + [PARALLEL_MODE.ANY]: { + next(previous) { + return previous.status !== JOB_STATUS.RESOLVED; + }, + getStatus(result) { + if (result.some(status => status != null && status === JOB_STATUS.RESOLVED)) { + return JOB_STATUS.RESOLVED; + } + if (result.some(status => status != null ? status === JOB_STATUS.PENDING : true)) { + return JOB_STATUS.PENDING; + } return JOB_STATUS.REJECTED; } - if (result.every(j => j && j.status === JOB_STATUS.RESOLVED)) { - return JOB_STATUS.RESOLVED; + }, + [PARALLEL_MODE.RACE]: { + next(previous) { + return previous.status === JOB_STATUS.PENDING; + }, + getStatus(result) { + if (result.some(status => status != null && status === JOB_STATUS.RESOLVED)) { + return JOB_STATUS.RESOLVED; + } + if (result.some(status => status != null && status === JOB_STATUS.REJECTED)) { + return JOB_STATUS.REJECTED; + } + return JOB_STATUS.PENDING; } - return JOB_STATUS.PENDING; - }, - [PARALLEL_MODE.ANY](result) { - return result.some(j => j && j.status === JOB_STATUS.RESOLVED) - ? JOB_STATUS.RESOLVED - : ( - result.some(j => j && j.status === JOB_STATUS.PENDING) - ? JOB_STATUS.PENDING - : JOB_STATUS.REJECTED - ) - }, - [PARALLEL_MODE.RACE](result) { - return result.some(j => j && j.status === JOB_STATUS.RESOLVED) - ? JOB_STATUS.RESOLVED - : ( - result.some(j => j && j.status === JOB_STATUS.REJECTED) - ? JOB_STATUS.REJECTED - : JOB_STATUS.PENDING - ) } }; export default { async run(node: FlowNodeModel, prevJob: JobModel, processor: Processor) { - const branches = processor.nodes - .filter(item => item.upstream === node && item.branchIndex !== null) - .sort((a, b) => a.branchIndex - b.branchIndex); + const branches = processor.getBranches(node); const job = await processor.saveJob({ status: JOB_STATUS.PENDING, @@ -57,7 +70,14 @@ export default { // for users, this is almost equivalent to `Promise.all`, // because of the delay is not significant sensible. // another benifit of this is, it could handle sequenced branches in future. - await branches.reduce((promise: Promise, branch) => promise.then(() => processor.run(branch, job)), Promise.resolve()); + const { mode = PARALLEL_MODE.ALL } = node.config; + await branches.reduce((promise: Promise, branch, i) => + promise.then((previous) => { + if (i && !Modes[mode].next(previous)) { + return Promise.resolve(previous); + } + return processor.run(branch, job); + }), Promise.resolve()); return processor.end(node, job); }, @@ -73,13 +93,15 @@ export default { // find the index of the node which start the branch const jobNode = processor.nodesMap.get(branchJob.nodeId); - const { branchIndex } = processor.findBranchStartNode(jobNode); + const branchStartNode = processor.findBranchStartNode(jobNode, node); + const branches = processor.getBranches(node); + const branchIndex = branches.indexOf(branchStartNode); const { mode = PARALLEL_MODE.ALL } = node.config || {}; - const newResult = [...result.slice(0, branchIndex), branchJob.get(), ...result.slice(branchIndex + 1)]; + const newResult = [...result.slice(0, branchIndex), branchJob.status, ...result.slice(branchIndex + 1)]; job.set({ result: newResult, - status: StatusGetters[mode](newResult) + status: Modes[mode].getStatus(newResult) }); if (job.status === JOB_STATUS.PENDING) {