feat(plugin-workflow): add race mode (#542)
* feat(plugin-workflow): add race mode * fix(plugin-workflow): fix test cases * fix(plugin-workflow): fix node create action
This commit is contained in:
parent
3e8a80a3ae
commit
4a8c9c8d87
@ -548,10 +548,13 @@ export default {
|
||||
'Conditions': '条件配置',
|
||||
|
||||
'Parallel branch': '分支',
|
||||
'Add branch': '增加分支',
|
||||
'All succeeded': '全部成功',
|
||||
'Any succeeded': '任意成功',
|
||||
'Any succeeded or failed': '任意成功或失败',
|
||||
'Continue after all branches succeeded': '全部分支都成功后才能继续',
|
||||
'Continue after any branch succeeded': '任意分支成功后就继续',
|
||||
'Continue after any branch succeeded, or exit after any branch failed': '任意分支成功继续,或失败后退出',
|
||||
|
||||
'Delay': '延时',
|
||||
'Duration': '时长',
|
||||
|
@ -11,6 +11,7 @@ export default {
|
||||
title: '{{t("Duration")}}',
|
||||
'x-decorator': 'FormItem',
|
||||
'x-component': 'Duration',
|
||||
default: 60000
|
||||
},
|
||||
'config.endStatus': {
|
||||
type: 'number',
|
||||
|
@ -46,7 +46,17 @@ export default {
|
||||
</Tooltip>
|
||||
)
|
||||
},
|
||||
// { value: 'race', label: '任意退出' },
|
||||
{
|
||||
value: 'race',
|
||||
label: (
|
||||
<Tooltip
|
||||
title={i18n.t('Continue after any branch succeeded, or exit after any branch failed')}
|
||||
placement="bottom"
|
||||
>
|
||||
{i18n.t('Any succeeded or failed')} <QuestionCircleOutlined style={{ color: '#999' }} />
|
||||
</Tooltip>
|
||||
)
|
||||
},
|
||||
],
|
||||
default: 'all'
|
||||
}
|
||||
|
@ -201,11 +201,12 @@ export default class Processor {
|
||||
}
|
||||
|
||||
// parent node should take over the control
|
||||
public end(node, job) {
|
||||
public async end(node, job) {
|
||||
const parentNode = this.findBranchParentNode(node);
|
||||
// no parent, means on main flow
|
||||
if (parentNode) {
|
||||
return this.recall(parentNode, job);
|
||||
await this.recall(parentNode, job);
|
||||
return job;
|
||||
}
|
||||
|
||||
// really done for all nodes
|
||||
@ -256,12 +257,24 @@ export default class Processor {
|
||||
return job;
|
||||
}
|
||||
|
||||
getBranches(node: FlowNodeModel): FlowNodeModel[] {
|
||||
return this.nodes
|
||||
.filter(item => item.upstream === node && item.branchIndex !== null)
|
||||
.sort((a, b) => a.branchIndex - b.branchIndex);
|
||||
}
|
||||
|
||||
// find the first node in current branch
|
||||
findBranchStartNode(node: FlowNodeModel): FlowNodeModel | null {
|
||||
findBranchStartNode(node: FlowNodeModel, parent?: FlowNodeModel): FlowNodeModel | null {
|
||||
for (let n = node; n; n = n.upstream) {
|
||||
if (!parent) {
|
||||
if (n.branchIndex !== null) {
|
||||
return n;
|
||||
}
|
||||
} else {
|
||||
if (n.upstream === parent) {
|
||||
return n;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import { getApp } from '.';
|
||||
|
||||
|
||||
|
||||
describe('workflow > workflow', () => {
|
||||
describe('workflow > Plugin', () => {
|
||||
let app: MockServer;
|
||||
let agent;
|
||||
let db: Database;
|
@ -22,7 +22,6 @@ describe('workflow > Processor', () => {
|
||||
PostRepo = db.getCollection('posts').repository;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'collection',
|
||||
config: {
|
||||
@ -45,7 +44,6 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('execute resolved workflow', async () => {
|
||||
await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo'
|
||||
});
|
||||
|
||||
@ -62,7 +60,6 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('workflow with single simple node', async () => {
|
||||
await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo'
|
||||
});
|
||||
|
||||
@ -108,7 +105,6 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('workflow with error node', async () => {
|
||||
await workflow.createNode({
|
||||
title: 'error',
|
||||
type: 'error'
|
||||
});
|
||||
|
||||
@ -128,12 +124,10 @@ describe('workflow > Processor', () => {
|
||||
describe('manual nodes', () => {
|
||||
it('manual node should suspend execution, and could be manually resume', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'prompt',
|
||||
type: 'prompt',
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
@ -163,11 +157,9 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('manual node should suspend execution, resuming with error should end execution', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'prompt error',
|
||||
type: 'prompt->error',
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
@ -196,20 +188,17 @@ describe('workflow > Processor', () => {
|
||||
describe('branch: condition', () => {
|
||||
it('condition node link to different downstreams', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'condition',
|
||||
type: 'condition',
|
||||
// no config means always true
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'true to echo',
|
||||
type: 'echo',
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await workflow.createNode({
|
||||
title: 'false to echo',
|
||||
type: 'echo',
|
||||
branchIndex: BRANCH_INDEX.ON_FALSE,
|
||||
upstreamId: n1.id
|
||||
@ -229,20 +218,17 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('suspend downstream in condition branch, then go on', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'condition',
|
||||
type: 'condition',
|
||||
// no config means always true
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'manual',
|
||||
type: 'prompt',
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo input value',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
@ -265,20 +251,17 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('resume error downstream in condition branch, should reject', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'condition',
|
||||
type: 'condition',
|
||||
// no config means always true
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'manual',
|
||||
type: 'prompt->error',
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo input value',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
@ -301,132 +284,19 @@ describe('workflow > Processor', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('branch: parallel node', () => {
|
||||
it('link to single branch', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'parallel',
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo1',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo2',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n3);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toEqual(3);
|
||||
});
|
||||
|
||||
it('link to multipe branches', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'parallel',
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo1',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo2',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const n4 = await workflow.createNode({
|
||||
title: 'echo on end',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n4);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toEqual(4);
|
||||
});
|
||||
|
||||
it('downstream has manual node', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'parallel',
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'prompt',
|
||||
type: 'prompt',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const n4 = await workflow.createNode({
|
||||
title: 'echo on end',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n4);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.STARTED);
|
||||
|
||||
const [pending] = await execution.getJobs({ where: { nodeId: n2.id } });
|
||||
pending.set('result', 123);
|
||||
const processor = plugin.createProcessor(execution);
|
||||
await processor.resume(pending);
|
||||
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toEqual(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('branch: mixed', () => {
|
||||
it('condition branches contains parallel', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'condition',
|
||||
type: 'condition'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'parallel',
|
||||
type: 'parallel',
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'prompt',
|
||||
type: 'prompt',
|
||||
upstreamId: n2.id,
|
||||
branchIndex: 0
|
||||
@ -467,19 +337,16 @@ describe('workflow > Processor', () => {
|
||||
|
||||
it('parallel branches contains condition', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
title: 'parallel',
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'prompt',
|
||||
type: 'prompt',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'condition',
|
||||
type: 'condition',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
@ -502,19 +369,20 @@ describe('workflow > Processor', () => {
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.STARTED);
|
||||
const [e1] = await workflow.getExecutions();
|
||||
expect(e1.status).toEqual(EXECUTION_STATUS.STARTED);
|
||||
|
||||
const pendingJobs = await execution.getJobs();
|
||||
const pendingJobs = await e1.getJobs();
|
||||
expect(pendingJobs.length).toBe(4);
|
||||
|
||||
const pending = pendingJobs.find(item => item.nodeId === n2.id );
|
||||
pending.set('result', 123);
|
||||
const processor = plugin.createProcessor(execution);
|
||||
const processor = plugin.createProcessor(e1);
|
||||
await processor.resume(pending);
|
||||
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
const [e2] = await workflow.getExecutions();
|
||||
expect(e2.status).toEqual(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await e2.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toEqual(5);
|
||||
});
|
||||
});
|
||||
|
@ -0,0 +1,415 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp, sleep } from '..';
|
||||
import { EXECUTION_STATUS } from '../../constants';
|
||||
|
||||
|
||||
|
||||
describe('workflow > instructions > parallel', () => {
|
||||
let app: Application;
|
||||
let db: Database;
|
||||
let PostRepo;
|
||||
let WorkflowModel;
|
||||
let workflow;
|
||||
let plugin;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
plugin = app.pm.get('@nocobase/plugin-workflow');
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostRepo = db.getCollection('posts').repository;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
enabled: true,
|
||||
type: 'collection',
|
||||
config: {
|
||||
mode: 1,
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => app.stop());
|
||||
|
||||
describe('single all', () => {
|
||||
it('all resolved', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(3);
|
||||
});
|
||||
|
||||
it('some rejected', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.REJECTED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(3);
|
||||
});
|
||||
|
||||
it('first branch rejected', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.REJECTED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('single any', () => {
|
||||
it('first resolved', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel',
|
||||
config: {
|
||||
mode: 'any'
|
||||
}
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(2);
|
||||
});
|
||||
|
||||
it('first rejected', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel',
|
||||
config: {
|
||||
mode: 'any'
|
||||
}
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(3);
|
||||
});
|
||||
|
||||
it('all rejected', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel',
|
||||
config: {
|
||||
mode: 'any'
|
||||
}
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.REJECTED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('single race', () => {
|
||||
it('first resolved', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel',
|
||||
config: {
|
||||
mode: 'race'
|
||||
}
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(2);
|
||||
});
|
||||
|
||||
it('first rejected', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel',
|
||||
config: {
|
||||
mode: 'race'
|
||||
}
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'error',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.REJECTED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('branch and join', () => {
|
||||
it('link to single branch', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo1',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo2',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n3);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(3);
|
||||
});
|
||||
|
||||
it('link to multipe branches', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo1',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo2',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const n4 = await workflow.createNode({
|
||||
title: 'echo on end',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n4);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(4);
|
||||
});
|
||||
|
||||
it('random branch index', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo1',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 3
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo2',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(3);
|
||||
});
|
||||
|
||||
it('downstream has manual node', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'prompt',
|
||||
type: 'prompt',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const n4 = await workflow.createNode({
|
||||
title: 'echo on end',
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n4);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.STARTED);
|
||||
|
||||
const [pending] = await execution.getJobs({ where: { nodeId: n2.id } });
|
||||
pending.set('result', 123);
|
||||
const processor = plugin.createProcessor(execution);
|
||||
await processor.resume(pending);
|
||||
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('nested', () => {
|
||||
it('nested 2 levels', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'parallel'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'parallel',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id,
|
||||
branchIndex: 1
|
||||
});
|
||||
|
||||
const n4 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n2.id,
|
||||
branchIndex: 0
|
||||
});
|
||||
|
||||
const n5 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
upstreamId: n1.id
|
||||
});
|
||||
await n1.setDownstream(n5);
|
||||
|
||||
const post = await PostRepo.create({ values: { title: 't1' } });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toBe(EXECUTION_STATUS.RESOLVED);
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toBe(5);
|
||||
});
|
||||
});
|
||||
});
|
@ -79,6 +79,8 @@ export async function create(context: Context, next) {
|
||||
}
|
||||
|
||||
instance.set('upstream', upstream);
|
||||
|
||||
return instance;
|
||||
});
|
||||
|
||||
await next();
|
||||
|
@ -1,5 +1,5 @@
|
||||
import Plugin from '..';
|
||||
import { JOB_STATUS } from "../constants";
|
||||
import { EXECUTION_STATUS, JOB_STATUS } from "../constants";
|
||||
import ExecutionModel from '../models/Execution';
|
||||
import JobModel from '../models/Job';
|
||||
import Processor from '../Processor';
|
||||
@ -27,10 +27,16 @@ export default class {
|
||||
},
|
||||
include: [
|
||||
{
|
||||
association: 'execution'
|
||||
association: 'execution',
|
||||
attributes: [],
|
||||
where: {
|
||||
status: EXECUTION_STATUS.STARTED
|
||||
},
|
||||
required: true
|
||||
},
|
||||
{
|
||||
association: 'node',
|
||||
attributes: ['config'],
|
||||
where: {
|
||||
type: 'delay'
|
||||
},
|
||||
@ -61,9 +67,11 @@ export default class {
|
||||
}
|
||||
|
||||
async trigger(job) {
|
||||
const { execution = await job.getExecution() as ExecutionModel } = job;
|
||||
const execution = await job.getExecution() as ExecutionModel;
|
||||
if (execution.status === EXECUTION_STATUS.STARTED) {
|
||||
const processor = this.plugin.createProcessor(execution);
|
||||
await processor.resume(job);
|
||||
}
|
||||
if (this.timers.get(job.id)) {
|
||||
this.timers.delete(job.id);
|
||||
}
|
||||
|
@ -9,41 +9,54 @@ export const PARALLEL_MODE = {
|
||||
RACE: 'race'
|
||||
} as const;
|
||||
|
||||
const StatusGetters = {
|
||||
[PARALLEL_MODE.ALL](result) {
|
||||
if (result.some(j => j && j.status === JOB_STATUS.REJECTED)) {
|
||||
const Modes = {
|
||||
[PARALLEL_MODE.ALL]: {
|
||||
next(previous) {
|
||||
return previous.status !== JOB_STATUS.REJECTED;
|
||||
},
|
||||
getStatus(result) {
|
||||
if (result.some(status => status != null && status === JOB_STATUS.REJECTED)) {
|
||||
return JOB_STATUS.REJECTED;
|
||||
}
|
||||
if (result.every(j => j && j.status === JOB_STATUS.RESOLVED)) {
|
||||
if (result.every(status => status != null && status === JOB_STATUS.RESOLVED)) {
|
||||
return JOB_STATUS.RESOLVED;
|
||||
}
|
||||
return JOB_STATUS.PENDING;
|
||||
}
|
||||
},
|
||||
[PARALLEL_MODE.ANY](result) {
|
||||
return result.some(j => j && j.status === JOB_STATUS.RESOLVED)
|
||||
? JOB_STATUS.RESOLVED
|
||||
: (
|
||||
result.some(j => j && j.status === JOB_STATUS.PENDING)
|
||||
? JOB_STATUS.PENDING
|
||||
: JOB_STATUS.REJECTED
|
||||
)
|
||||
[PARALLEL_MODE.ANY]: {
|
||||
next(previous) {
|
||||
return previous.status !== JOB_STATUS.RESOLVED;
|
||||
},
|
||||
[PARALLEL_MODE.RACE](result) {
|
||||
return result.some(j => j && j.status === JOB_STATUS.RESOLVED)
|
||||
? JOB_STATUS.RESOLVED
|
||||
: (
|
||||
result.some(j => j && j.status === JOB_STATUS.REJECTED)
|
||||
? JOB_STATUS.REJECTED
|
||||
: JOB_STATUS.PENDING
|
||||
)
|
||||
getStatus(result) {
|
||||
if (result.some(status => status != null && status === JOB_STATUS.RESOLVED)) {
|
||||
return JOB_STATUS.RESOLVED;
|
||||
}
|
||||
if (result.some(status => status != null ? status === JOB_STATUS.PENDING : true)) {
|
||||
return JOB_STATUS.PENDING;
|
||||
}
|
||||
return JOB_STATUS.REJECTED;
|
||||
}
|
||||
},
|
||||
[PARALLEL_MODE.RACE]: {
|
||||
next(previous) {
|
||||
return previous.status === JOB_STATUS.PENDING;
|
||||
},
|
||||
getStatus(result) {
|
||||
if (result.some(status => status != null && status === JOB_STATUS.RESOLVED)) {
|
||||
return JOB_STATUS.RESOLVED;
|
||||
}
|
||||
if (result.some(status => status != null && status === JOB_STATUS.REJECTED)) {
|
||||
return JOB_STATUS.REJECTED;
|
||||
}
|
||||
return JOB_STATUS.PENDING;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default {
|
||||
async run(node: FlowNodeModel, prevJob: JobModel, processor: Processor) {
|
||||
const branches = processor.nodes
|
||||
.filter(item => item.upstream === node && item.branchIndex !== null)
|
||||
.sort((a, b) => a.branchIndex - b.branchIndex);
|
||||
const branches = processor.getBranches(node);
|
||||
|
||||
const job = await processor.saveJob({
|
||||
status: JOB_STATUS.PENDING,
|
||||
@ -57,7 +70,14 @@ export default {
|
||||
// for users, this is almost equivalent to `Promise.all`,
|
||||
// because of the delay is not significant sensible.
|
||||
// another benifit of this is, it could handle sequenced branches in future.
|
||||
await branches.reduce((promise: Promise<any>, branch) => promise.then(() => processor.run(branch, job)), Promise.resolve());
|
||||
const { mode = PARALLEL_MODE.ALL } = node.config;
|
||||
await branches.reduce((promise: Promise<any>, branch, i) =>
|
||||
promise.then((previous) => {
|
||||
if (i && !Modes[mode].next(previous)) {
|
||||
return Promise.resolve(previous);
|
||||
}
|
||||
return processor.run(branch, job);
|
||||
}), Promise.resolve());
|
||||
|
||||
return processor.end(node, job);
|
||||
},
|
||||
@ -73,13 +93,15 @@ export default {
|
||||
|
||||
// find the index of the node which start the branch
|
||||
const jobNode = processor.nodesMap.get(branchJob.nodeId);
|
||||
const { branchIndex } = processor.findBranchStartNode(jobNode);
|
||||
const branchStartNode = processor.findBranchStartNode(jobNode, node);
|
||||
const branches = processor.getBranches(node);
|
||||
const branchIndex = branches.indexOf(branchStartNode);
|
||||
const { mode = PARALLEL_MODE.ALL } = node.config || {};
|
||||
|
||||
const newResult = [...result.slice(0, branchIndex), branchJob.get(), ...result.slice(branchIndex + 1)];
|
||||
const newResult = [...result.slice(0, branchIndex), branchJob.status, ...result.slice(branchIndex + 1)];
|
||||
job.set({
|
||||
result: newResult,
|
||||
status: StatusGetters[mode](newResult)
|
||||
status: Modes[mode].getStatus(newResult)
|
||||
});
|
||||
|
||||
if (job.status === JOB_STATUS.PENDING) {
|
||||
|
Loading…
Reference in New Issue
Block a user