diff --git a/packages/plugin-workflow/.npmignore b/packages/plugin-workflow/.npmignore new file mode 100644 index 000000000..461574b2f --- /dev/null +++ b/packages/plugin-workflow/.npmignore @@ -0,0 +1,7 @@ +node_modules +*.log +docs +__tests__ +tsconfig.json +src +.fatherrc.ts \ No newline at end of file diff --git a/packages/plugin-workflow/package.json b/packages/plugin-workflow/package.json new file mode 100644 index 000000000..e44e1e36f --- /dev/null +++ b/packages/plugin-workflow/package.json @@ -0,0 +1,12 @@ +{ + "name": "@nocobase/plugin-workflow", + "version": "0.6.0-alpha.0", + "main": "lib/index.js", + "private": true, + "license": "MIT", + "dependencies": { + }, + "devDependencies": { + }, + "gitHead": "f0b335ac30f29f25c95d7d137655fa64d8d67f1e" +} diff --git a/packages/plugin-workflow/src/__tests__/collections/posts.ts b/packages/plugin-workflow/src/__tests__/collections/posts.ts new file mode 100644 index 000000000..021dd12ae --- /dev/null +++ b/packages/plugin-workflow/src/__tests__/collections/posts.ts @@ -0,0 +1,15 @@ +import { CollectionOptions } from '@nocobase/database'; + +export default { + name: 'posts', + fields: [ + { + type: 'string', + name: 'title', + }, + { + type: 'boolean', + name: 'published', + } + ] +} as CollectionOptions; diff --git a/packages/plugin-workflow/src/__tests__/collections/targets.ts b/packages/plugin-workflow/src/__tests__/collections/targets.ts new file mode 100644 index 000000000..525e091c6 --- /dev/null +++ b/packages/plugin-workflow/src/__tests__/collections/targets.ts @@ -0,0 +1,15 @@ +import { CollectionOptions } from '@nocobase/database'; + +export default { + name: 'targets', + fields: [ + { + type: 'string', + name: 'col1', + }, + { + type: 'string', + name: 'col2', + } + ], +} as CollectionOptions; diff --git a/packages/plugin-workflow/src/__tests__/execution.test.ts b/packages/plugin-workflow/src/__tests__/execution.test.ts new file mode 100644 index 000000000..00ca0d710 --- /dev/null +++ b/packages/plugin-workflow/src/__tests__/execution.test.ts @@ -0,0 +1,512 @@ +import { Application } from '@nocobase/server'; +import Database from '@nocobase/database'; +import { getApp } from '.'; +import { BRANCH_INDEX, EXECUTION_STATUS, JOB_STATUS } from '../constants'; + +jest.setTimeout(300000); + +describe('execution', () => { + let app: Application; + let db: Database; + let PostModel; + let WorkflowModel; + let workflow; + + beforeEach(async () => { + app = await getApp(); + + db = app.db; + WorkflowModel = db.getCollection('workflows').model; + PostModel = db.getCollection('posts').model; + + workflow = await WorkflowModel.create({ + title: 'test workflow', + enabled: true, + type: 'model', + config: { + mode: 1, + collection: 'posts' + } + }); + }); + + afterEach(() => db.close()); + + describe('base', () => { + it('empty workflow without any nodes', async () => { + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.context.data.title).toEqual(post.title); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + }); + + it('execute resolved workflow', async () => { + await workflow.createNode({ + title: 'echo', + type: 'echo' + }); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + expect(execution.start()).rejects.toThrow(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(1); + }); + + it('workflow with single simple node', async () => { + await workflow.createNode({ + title: 'echo', + type: 'echo' + }); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.context.data.title).toEqual(post.title); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(1); + const { status, result } = jobs[0].get(); + expect(status).toEqual(JOB_STATUS.RESOLVED); + expect(result).toMatchObject({ data: JSON.parse(JSON.stringify(post.toJSON())) }); + }); + + it('workflow with multiple simple nodes', async () => { + const n1 = await workflow.createNode({ + title: 'echo 1', + type: 'echo' + }); + + const n2 = await workflow.createNode({ + title: 'echo 2', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n2); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.context.data.title).toEqual(post.title); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(2); + const { status, result } = jobs[1].get(); + expect(status).toEqual(JOB_STATUS.RESOLVED); + expect(result).toMatchObject({ data: JSON.parse(JSON.stringify(post.toJSON())) }); + }); + + it('workflow with error node', async () => { + await workflow.createNode({ + title: 'error', + type: 'error' + }); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(1); + const { status, result } = jobs[0].get(); + expect(status).toEqual(JOB_STATUS.REJECTED); + expect(result).toBe('Error: definite error'); + }); + }); + + describe('manual nodes', () => { + it('manual node should suspend execution, and could be manually resume', async () => { + const n1 = await workflow.createNode({ + title: 'prompt', + type: 'prompt', + }); + + const n2 = await workflow.createNode({ + title: 'echo', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n2); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + const [pending] = await execution.getJobs(); + expect(pending.status).toEqual(JOB_STATUS.PENDING); + expect(pending.result).toEqual(null); + + pending.set('result', 123); + await execution.resume(pending); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(2); + expect(jobs[0].status).toEqual(JOB_STATUS.RESOLVED); + expect(jobs[0].result).toEqual(123); + expect(jobs[1].status).toEqual(JOB_STATUS.RESOLVED); + expect(jobs[1].result).toEqual(123); + }); + + it('manual node should suspend execution, resuming with error should end execution', async () => { + const n1 = await workflow.createNode({ + title: 'prompt error', + type: 'prompt->error', + }); + const n2 = await workflow.createNode({ + title: 'echo', + type: 'echo', + upstreamId: n1.id + }); + await n1.setDownstream(n2); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + const [pending] = await execution.getJobs(); + expect(pending.status).toEqual(JOB_STATUS.PENDING); + expect(pending.result).toEqual(null); + + pending.set('result', 123); + await execution.resume(pending); + expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(1); + expect(jobs[0].status).toEqual(JOB_STATUS.REJECTED); + expect(jobs[0].result).toEqual('Error: input failed'); + }); + }); + + describe('branch: condition', () => { + it('condition node link to different downstreams', async () => { + const n1 = await workflow.createNode({ + title: 'condition', + type: 'condition', + // no config means always true + }); + + const n2 = await workflow.createNode({ + title: 'true to echo', + type: 'echo', + branchIndex: BRANCH_INDEX.ON_TRUE, + upstreamId: n1.id + }); + + await workflow.createNode({ + title: 'false to echo', + type: 'echo', + branchIndex: BRANCH_INDEX.ON_FALSE, + upstreamId: n1.id + }); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(2); + expect(jobs[0].nodeId).toEqual(n1.id); + expect(jobs[1].nodeId).toEqual(n2.id); + expect(jobs[1].result).toEqual(true); + }); + + it('suspend downstream in condition branch, then go on', async () => { + const n1 = await workflow.createNode({ + title: 'condition', + type: 'condition', + // no config means always true + }); + + const n2 = await workflow.createNode({ + title: 'manual', + type: 'prompt', + branchIndex: BRANCH_INDEX.ON_TRUE, + upstreamId: n1.id + }); + + const n3 = await workflow.createNode({ + title: 'echo input value', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n3); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + + const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); + pending.set('result', 123); + await execution.resume(pending); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(3); + }); + + it('resume error downstream in condition branch, should reject', async () => { + const n1 = await workflow.createNode({ + title: 'condition', + type: 'condition', + // no config means always true + }); + + const n2 = await workflow.createNode({ + title: 'manual', + type: 'prompt->error', + branchIndex: BRANCH_INDEX.ON_TRUE, + upstreamId: n1.id + }); + + const n3 = await workflow.createNode({ + title: 'echo input value', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n3); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + + const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); + pending.set('result', 123); + await execution.resume(pending); + expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(2); + }); + }); + + describe('branch: parallel node', () => { + it('link to single branch', async () => { + const n1 = await workflow.createNode({ + title: 'parallel', + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'echo1', + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'echo2', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n3); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(3); + }); + + it('link to multipe branches', async () => { + const n1 = await workflow.createNode({ + title: 'parallel', + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'echo1', + type: 'echo', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'echo2', + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const n4 = await workflow.createNode({ + title: 'echo on end', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n4); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(4); + }); + + it('downstream has manual node', async () => { + const n1 = await workflow.createNode({ + title: 'parallel', + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'prompt', + type: 'prompt', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'echo', + type: 'echo', + upstreamId: n1.id, + branchIndex: 1 + }); + + const n4 = await workflow.createNode({ + title: 'echo on end', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n4); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + + const [pending] = await execution.getJobs({ nodeId: n2.id }); + pending.set('result', 123); + await execution.resume(pending); + + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(4); + }); + }); + + describe('branch: mixed', () => { + it('condition branches contains parallel', async () => { + const n1 = await workflow.createNode({ + title: 'condition', + type: 'condition' + }); + + const n2 = await workflow.createNode({ + title: 'parallel', + type: 'parallel', + branchIndex: BRANCH_INDEX.ON_TRUE, + upstreamId: n1.id + }); + + const n3 = await workflow.createNode({ + title: 'prompt', + type: 'prompt', + upstreamId: n2.id, + branchIndex: 0 + }); + + const n4 = await workflow.createNode({ + title: 'parallel echo', + type: 'echo', + upstreamId: n2.id, + branchIndex: 1 + }); + + const n5 = await workflow.createNode({ + title: 'last echo', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n5); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + + const pendingJobs = await execution.getJobs(); + expect(pendingJobs.length).toBe(4); + + const pending = pendingJobs.find(item => item.nodeId === n3.id ); + pending.set('result', 123); + await execution.resume(pending); + + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(5); + }); + + it('parallel branches contains condition', async () => { + const n1 = await workflow.createNode({ + title: 'parallel', + type: 'parallel' + }); + + const n2 = await workflow.createNode({ + title: 'prompt', + type: 'prompt', + upstreamId: n1.id, + branchIndex: 0 + }); + + const n3 = await workflow.createNode({ + title: 'condition', + type: 'condition', + upstreamId: n1.id, + branchIndex: 1 + }); + + const n4 = await workflow.createNode({ + title: 'condition echo', + type: 'echo', + upstreamId: n3.id, + branchIndex: BRANCH_INDEX.ON_TRUE + }); + + const n5 = await workflow.createNode({ + title: 'last echo', + type: 'echo', + upstreamId: n1.id + }); + + await n1.setDownstream(n5); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); + + const pendingJobs = await execution.getJobs(); + expect(pendingJobs.length).toBe(4); + + const pending = pendingJobs.find(item => item.nodeId === n2.id ); + pending.set('result', 123); + await execution.resume(pending); + + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); + expect(jobs.length).toEqual(5); + }); + }); +}); diff --git a/packages/plugin-workflow/src/__tests__/index.ts b/packages/plugin-workflow/src/__tests__/index.ts new file mode 100644 index 000000000..65b026537 --- /dev/null +++ b/packages/plugin-workflow/src/__tests__/index.ts @@ -0,0 +1,61 @@ +import path from 'path'; +import { MockServer, mockServer } from '@nocobase/test'; + +import plugin from '../server'; +import { registerInstruction } from '../instructions'; +import { JOB_STATUS } from '../constants'; + +export function sleep(ms: number) { + return new Promise(resolve => { + setTimeout(resolve, ms); + }); +} + +export async function getApp(options = {}): Promise { + const app = mockServer(options); + + app.plugin(plugin); + + // for test only + registerInstruction('echo', { + run(this, { result }, execution) { + return { + status: JOB_STATUS.RESOLVED, + result + }; + } + }); + + registerInstruction('error', { + run(this, input, execution) { + throw new Error('definite error'); + } + }); + + registerInstruction('prompt->error', { + run(this, input, execution) { + return { + status: JOB_STATUS.PENDING + }; + }, + resume(this, input, execution) { + throw new Error('input failed'); + } + }); + + await app.load(); + + await app.db.import({ + directory: path.resolve(__dirname, './collections') + }); + + try { + await app.db.sync(); + } catch (error) { + console.error(error); + } + // TODO: need a better life cycle event than manually trigger + await app.emitAsync('beforeStart'); + + return app; +} diff --git a/packages/plugin-workflow/src/__tests__/instructions/condition.test.ts b/packages/plugin-workflow/src/__tests__/instructions/condition.test.ts new file mode 100644 index 000000000..5fbdcbb17 --- /dev/null +++ b/packages/plugin-workflow/src/__tests__/instructions/condition.test.ts @@ -0,0 +1,119 @@ +import { Application } from '@nocobase/server'; +import Database from '@nocobase/database'; +import { getApp } from '..'; +import { EXECUTION_STATUS, BRANCH_INDEX } from '../../constants'; + + + +describe('workflow > instructions > condition', () => { + let app: Application; + let db: Database; + let PostModel; + let WorkflowModel; + let workflow; + + beforeEach(async () => { + app = await getApp(); + + db = app.db; + WorkflowModel = db.getCollection('workflows').model; + PostModel = db.getCollection('posts').model; + + workflow = await WorkflowModel.create({ + title: 'test workflow', + enabled: true, + type: 'model', + config: { + mode: 1, + collection: 'posts' + } + }); + }); + + afterEach(() => db.close()); + + describe('config.rejectOnFalse', () => { + + }); + + describe('single calculation', () => { + it('calculation to true downstream', async () => { + + const n1 = await workflow.createNode({ + title: 'condition', + type: 'condition', + config: { + // (1 === 1): true + calculation: { + calculator: 'equal', + operands: [{ value: 1 }, { value: 1 }] + } + } + }); + + const n2 = await workflow.createNode({ + title: 'true to echo', + type: 'echo', + branchIndex: BRANCH_INDEX.ON_TRUE, + upstreamId: n1.id + }); + + const n3 = await workflow.createNode({ + title: 'false to echo', + type: 'echo', + branchIndex: BRANCH_INDEX.ON_FALSE, + upstreamId: n1.id + }); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(2); + expect(jobs[1].result).toEqual(true); + }); + + it('calculation to false downstream', async () => { + const n1 = await workflow.createNode({ + title: 'condition', + type: 'condition', + config: { + // (0 === 1): false + calculation: { + calculator: 'equal', + operands: [{ value: 0 }, { value: 1 }] + } + } + }); + + await workflow.createNode({ + title: 'true to echo', + type: 'echo', + branchIndex: BRANCH_INDEX.ON_TRUE, + upstreamId: n1.id + }); + + await workflow.createNode({ + title: 'false to echo', + type: 'echo', + branchIndex: BRANCH_INDEX.ON_FALSE, + upstreamId: n1.id + }); + + const post = await PostModel.create({ title: 't1' }); + + const [execution] = await workflow.getExecutions(); + expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); + + const jobs = await execution.getJobs(); + expect(jobs.length).toEqual(2); + expect(jobs[1].result).toEqual(false); + }); + }); + + describe('group calculation', () => { + + }); +}); diff --git a/packages/plugin-workflow/src/collections/executions.ts b/packages/plugin-workflow/src/collections/executions.ts new file mode 100644 index 000000000..1053e6657 --- /dev/null +++ b/packages/plugin-workflow/src/collections/executions.ts @@ -0,0 +1,33 @@ +import { CollectionOptions } from '@nocobase/database'; + +export default { + name: 'executions', + model: 'ExecutionModel', + title: '执行流程', + fields: [ + { + interface: 'linkTo', + type: 'belongsTo', + name: 'workflow', + title: '所属工作流' + }, + { + interface: 'linkTo', + type: 'hasMany', + name: 'jobs', + title: '流程记录' + }, + { + interface: 'json', + type: 'jsonb', + name: 'context', + title: '上下文数据' + }, + { + interface: 'select', + type: 'integer', + name: 'status', + title: '状态' + } + ] +} as CollectionOptions; diff --git a/packages/plugin-workflow/src/collections/flow_nodes.ts b/packages/plugin-workflow/src/collections/flow_nodes.ts new file mode 100644 index 000000000..0fb32fbbf --- /dev/null +++ b/packages/plugin-workflow/src/collections/flow_nodes.ts @@ -0,0 +1,78 @@ +import { CollectionOptions } from '@nocobase/database'; + +export default { + name: 'flow_nodes', + // model: 'FlowNodeModel', + title: 'Workflow Nodes', + fields: [ + { + interface: 'string', + type: 'string', + name: 'title', + title: '名称', + component: { + showInTable: true, + showInDetail: true, + showInForm: true, + }, + }, + // which workflow belongs to + { + interface: 'linkTo', + name: 'workflow', + type: 'belongsTo', + }, + { + interface: 'linkTo', + name: 'upstream', + type: 'belongsTo', + target: 'flow_nodes' + }, + { + interface: 'linkTo', + name: 'branches', + type: 'hasMany', + target: 'flow_nodes', + sourceKey: 'id', + foreignKey: 'upstream_id', + }, + // only works when upstream node is branching type, like condition and parallel. + // put here because the design of flow-links model is not really necessary for now. + // or it should be put into flow-links model. + { + interface: 'select', + name: 'branchIndex', + type: 'integer', + title: 'branch index' + }, + // for reasons: + // 1. redirect type node to solve cycle flow. + // 2. recognize as true next node after branches. + { + interface: 'linkTo', + name: 'downstream', + type: 'belongsTo', + target: 'flow_nodes' + }, + { + interface: 'select', + type: 'string', + name: 'type', + title: '类型', + // TODO: data for test only now + dataSource: [ + { label: '数据处理', value: 'data' }, + { label: '数据查询', value: 'query' }, + { label: '等待人工输入', value: 'prompt' }, + { label: '条件判断', value: 'condition' }, + ] + }, + { + interface: 'json', + type: 'jsonb', + name: 'config', + title: '配置', + defaultValue: {} + } + ] +} as CollectionOptions; diff --git a/packages/plugin-workflow/src/collections/jobs.ts b/packages/plugin-workflow/src/collections/jobs.ts new file mode 100644 index 000000000..18485ca63 --- /dev/null +++ b/packages/plugin-workflow/src/collections/jobs.ts @@ -0,0 +1,48 @@ +import { CollectionOptions } from '@nocobase/database'; + +export default { + name: 'jobs', + title: '流程记录', + fields: [ + { + interface: 'linkTo', + type: 'belongsTo', + name: 'execution', + title: '所属流程' + }, + { + interface: 'linkTo', + type: 'belongsTo', + name: 'node', + target: 'flow_nodes', + title: '所属节点' + }, + { + interface: 'linkTo', + type: 'belongsTo', + name: 'upstream', + target: 'jobs', + title: '上游记录' + }, + // pending / resolved / rejected + { + interface: 'status', + type: 'integer', + name: 'status', + title: '处理状态' + }, + { + interface: 'json', + type: 'jsonb', + name: 'result', + title: '处理结果' + }, + // TODO: possibly need node snapshot in case if node has been changed + // { + // interface: 'json', + // type: 'jsonb', + // name: 'nodeSnapshot', + // title: 'node snapshot' + // } + ] +} as CollectionOptions; diff --git a/packages/plugin-workflow/src/collections/workflows.ts b/packages/plugin-workflow/src/collections/workflows.ts new file mode 100644 index 000000000..02deea2cb --- /dev/null +++ b/packages/plugin-workflow/src/collections/workflows.ts @@ -0,0 +1,56 @@ +import { CollectionOptions } from '@nocobase/database'; + +export default { + name: 'workflows', + model: 'WorkflowModel', + title: '自动化', + fields: [ + { + interface: 'string', + type: 'string', + name: 'title', + title: '自动化名称', + required: true + }, + { + interface: 'boolean', + type: 'boolean', + name: 'enabled', + title: '启用' + }, + { + interface: 'textarea', + type: 'text', + name: 'description', + title: '描述' + }, + { + interface: 'select', + type: 'string', + title: '触发方式', + name: 'type', + required: true + }, + { + interface: 'json', + type: 'jsonb', + title: '触发配置', + name: 'config', + required: true + }, + { + interface: 'linkTo', + type: 'hasMany', + name: 'nodes', + target: 'flow_nodes', + title: '流程节点' + }, + { + interface: 'linkTo', + type: 'hasMany', + name: 'executions', + target: 'executions', + title: '触发执行' + } + ] +} as CollectionOptions; diff --git a/packages/plugin-workflow/src/constants.ts b/packages/plugin-workflow/src/constants.ts new file mode 100644 index 000000000..8cc682187 --- /dev/null +++ b/packages/plugin-workflow/src/constants.ts @@ -0,0 +1,19 @@ +export const EXECUTION_STATUS = { + STARTED: 0, + RESOLVED: 1, + REJECTED: -1, + CANCELLED: -2 +}; + +export const JOB_STATUS = { + PENDING: 0, + RESOLVED: 1, + REJECTED: -1, + CANCELLED: -2 +}; + +export const BRANCH_INDEX = { + DEFAULT: null, + ON_TRUE: 1, + ON_FALSE: 0 +}; diff --git a/packages/plugin-workflow/src/instructions/condition.ts b/packages/plugin-workflow/src/instructions/condition.ts new file mode 100644 index 000000000..b38df688c --- /dev/null +++ b/packages/plugin-workflow/src/instructions/condition.ts @@ -0,0 +1,110 @@ +// config: { +// not: false, +// group: { +// type: 'and', +// calculations: [ +// { +// calculator: 'time.equal', +// operands: [{ type: 'context', options: { path: 'time' } }, { type: 'fn', options: { name: 'newDate', args: [] } }] +// }, +// { +// calculator: 'value.equal', +// operands: [{ type: 'job.result', options: { id: 213, path: '' } }, { type: 'constant', value: { a: 1 } }] +// } +// ] +// } +// } + +import Sequelize = require('sequelize'); +import { getValue, Operand } from "../utils/getter"; +import { getCalculator } from "../utils/calculators"; +import { JOB_STATUS } from "../constants"; + +type BaseCalculation = { + not?: boolean; +}; + +type SingleCalculation = BaseCalculation & { + calculation: string; + operands?: Operand[]; +}; + +type GroupCalculationOptions = { + type: 'and' | 'or'; + calculations: Calculation[] +}; + +type GroupCalculation = BaseCalculation & { + group: GroupCalculationOptions +}; + +// TODO(type) +type Calculation = SingleCalculation | GroupCalculation; + +function calculate(config, input, execution) { + if (!config) { + return true; + } + + const { not, group } = config; + let result; + if (group) { + const method = group.type === 'and' ? 'every' : 'some'; + result = group.calculations[method](calculation => calculate(calculation, input, execution)); + } else { + const args = config.operands.map(operand => getValue(operand, input, execution)); + const fn = getCalculator(config.calculator); + if (!fn) { + throw new Error(`no calculator function registered for "${config.calculator}"`); + } + result = fn(...args); + } + + return not ? !result : result; +} + + +export default { + async run(this, prevJob, execution) { + // TODO(optimize): loading of jobs could be reduced and turned into incrementally in execution + // const jobs = await execution.getJobs(); + const { calculation } = this.config || {}; + const result = calculate(calculation, prevJob, execution); + + if (!result && this.config.rejectOnFalse) { + return { + status: JOB_STATUS.REJECTED, + result + }; + } + + const job = { + status: JOB_STATUS.RESOLVED, + result, + // TODO(optimize): try unify the building of job + nodeId: this.id, + upstreamId: prevJob instanceof Sequelize.Model ? prevJob.get('id') : null + }; + + const branchNode = execution.nodes + .find(item => item.upstream === this && Boolean(item.branchIndex) === result); + + if (!branchNode) { + return job; + } + + const savedJob = await execution.saveJob(job); + + return execution.exec(branchNode, savedJob); + }, + + async resume(this, branchJob, execution) { + if (branchJob.status === JOB_STATUS.RESOLVED) { + // return to continue this.downstream + return branchJob; + } + + // pass control to upper scope by ending current scope + return execution.end(this, branchJob); + } +} diff --git a/packages/plugin-workflow/src/instructions/index.ts b/packages/plugin-workflow/src/instructions/index.ts new file mode 100644 index 000000000..65a97e483 --- /dev/null +++ b/packages/plugin-workflow/src/instructions/index.ts @@ -0,0 +1,44 @@ +import ExecutionModel from "../models/Execution"; +import FlowNodeModel from "../models/FlowNode"; + +import prompt from './prompt'; +import condition from './condition'; +import parallel from './parallel'; + +export interface Job { + status: number; + result: unknown; + [key: string]: unknown; +} + +export type InstructionResult = Job | Promise; + +// what should a instruction do? +// - base on input and context, do any calculations or system call (io), and produce a result or pending. +export interface Instruction { + run( + this: FlowNodeModel, + // what should input to be? + // - just use previously output result for convenience? + input: any, + // what should context to be? + // - could be the workflow execution object (containing context data) + execution: ExecutionModel + ): InstructionResult; + // for start node in main flow (or branch) to resume when manual sub branch triggered + resume?(): InstructionResult +} + +const registery = new Map(); + +export function getInstruction(key: string): Instruction { + return registery.get(key); +} + +export function registerInstruction(key: string, instruction: any) { + registery.set(key, instruction); +} + +registerInstruction('prompt', prompt); +registerInstruction('condition', condition); +registerInstruction('parallel', parallel); diff --git a/packages/plugin-workflow/src/instructions/parallel.ts b/packages/plugin-workflow/src/instructions/parallel.ts new file mode 100644 index 000000000..9c3a775f4 --- /dev/null +++ b/packages/plugin-workflow/src/instructions/parallel.ts @@ -0,0 +1,92 @@ +import { JOB_STATUS } from "../constants"; +import ExecutionModel from "../models/Execution"; +import FlowNodeModel from "../models/FlowNode"; +import JobModel from "../models/Job"; + +export const PARALLEL_MODE = { + ALL: 'all', + ANY: 'any', + RACE: 'race' +} as const; + +const StatusGetters = { + [PARALLEL_MODE.ALL](result) { + if (result.some(j => j && j.status === JOB_STATUS.REJECTED)) { + return JOB_STATUS.REJECTED; + } + if (result.every(j => j && j.status === JOB_STATUS.RESOLVED)) { + return JOB_STATUS.RESOLVED; + } + return JOB_STATUS.PENDING; + }, + [PARALLEL_MODE.ANY](result) { + return result.some(j => j && j.status === JOB_STATUS.RESOLVED) + ? JOB_STATUS.RESOLVED + : ( + result.some(j => j && j.status === JOB_STATUS.PENDING) + ? JOB_STATUS.PENDING + : JOB_STATUS.REJECTED + ) + }, + [PARALLEL_MODE.RACE](result) { + return result.some(j => j && j.status === JOB_STATUS.RESOLVED) + ? JOB_STATUS.RESOLVED + : ( + result.some(j => j && j.status === JOB_STATUS.REJECTED) + ? JOB_STATUS.REJECTED + : JOB_STATUS.PENDING + ) + } +}; + +export default { + async run(this: FlowNodeModel, prevJob: JobModel, execution: ExecutionModel) { + const branches = execution.nodes + .filter(item => item.upstream === this && item.branchIndex !== null) + .sort((a, b) => a.branchIndex - b.branchIndex); + + const job = await execution.saveJob({ + status: JOB_STATUS.PENDING, + result: Array(branches.length).fill(null), + nodeId: this.id, + upstreamId: prevJob?.id ?? null + }); + + // NOTE: + // use `reduce` but not `Promise.all` here to avoid racing manupulating db. + // for users, this is almost equivalent to `Promise.all`, + // because of the delay is not significant sensible. + // another better aspect of this is, it could handle sequenced branches in future. + await branches.reduce((promise: Promise, branch) => promise.then(() => execution.exec(branch, job)), Promise.resolve()); + + return execution.end(this, job); + }, + + async resume(this, branchJob, execution: ExecutionModel) { + const job = execution.findBranchParentJob(branchJob, this); + + const { result, status } = job; + // if parallel has been done (resolved / rejected), do not care newly executed branch jobs. + if (status !== JOB_STATUS.PENDING) { + return null; + } + + // find the index of the node which start the branch + const jobNode = execution.nodesMap.get(branchJob.nodeId); + const { branchIndex } = execution.findBranchStartNode(jobNode); + const { mode = PARALLEL_MODE.ALL } = this.config || {}; + + const newResult = [...result.slice(0, branchIndex), branchJob.get(), ...result.slice(branchIndex + 1)]; + job.set({ + result: newResult, + status: StatusGetters[mode](newResult) + }); + + if (job.status === JOB_STATUS.PENDING) { + await job.save({ transaction: execution.transaction }); + return execution.end(this, job); + } + + return job; + } +} diff --git a/packages/plugin-workflow/src/instructions/prompt.ts b/packages/plugin-workflow/src/instructions/prompt.ts new file mode 100644 index 000000000..ec3455eb4 --- /dev/null +++ b/packages/plugin-workflow/src/instructions/prompt.ts @@ -0,0 +1,14 @@ +import { JOB_STATUS } from "../constants"; + +export default { + run(this, input, execution) { + return { + status: JOB_STATUS.PENDING + }; + }, + + resume(this, job, execution) { + job.set('status', JOB_STATUS.RESOLVED); + return job; + } +} diff --git a/packages/plugin-workflow/src/models/Execution.ts b/packages/plugin-workflow/src/models/Execution.ts new file mode 100644 index 000000000..26dc50dc1 --- /dev/null +++ b/packages/plugin-workflow/src/models/Execution.ts @@ -0,0 +1,254 @@ +import { + Model, + BelongsToGetAssociationMixin, + Optional, + HasManyGetAssociationsMixin, + Transaction +} from 'sequelize'; + +import Database from '@nocobase/database'; + +import { EXECUTION_STATUS, JOB_STATUS } from '../constants'; +import { getInstruction } from '../instructions'; +import WorkflowModel from './Workflow'; +import FlowNodeModel from './FlowNode'; +import JobModel from './Job'; + +interface ExecutionAttributes { + id: number; + title: string; + context: any; + status: number; +} + +interface ExecutionCreationAttributes extends Optional {} + +export interface ExecutionOptions { + transaction?: Transaction; +} + +export default class ExecutionModel + extends Model + implements ExecutionAttributes { + + declare static readonly database: Database; + + declare id: number; + declare title: string; + declare context: any; + declare status: number; + + declare createdAt: Date; + declare updatedAt: Date; + + declare workflow?: WorkflowModel; + declare getWorkflow: BelongsToGetAssociationMixin; + + declare jobs?: JobModel[]; + declare getJobs: HasManyGetAssociationsMixin; + + options: ExecutionOptions; + transaction: Transaction; + + nodes: Array = []; + nodesMap = new Map(); + jobsMap = new Map(); + + static StatusMap = { + [JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED, + [JOB_STATUS.RESOLVED]: EXECUTION_STATUS.RESOLVED, + [JOB_STATUS.REJECTED]: EXECUTION_STATUS.REJECTED, + [JOB_STATUS.CANCELLED]: EXECUTION_STATUS.CANCELLED, + }; + + // make dual linked nodes list then cache + makeNodes(nodes = []) { + this.nodes = nodes; + + nodes.forEach(node => { + this.nodesMap.set(node.id, node); + }); + + nodes.forEach(node => { + if (node.upstreamId) { + node.upstream = this.nodesMap.get(node.upstreamId); + } + + if (node.downstreamId) { + node.downstream = this.nodesMap.get(node.downstreamId); + } + }); + } + + makeJobs(jobs: Array) { + jobs.forEach(job => { + this.jobsMap.set(job.id, job); + }); + } + + async prepare(options) { + if (this.status !== EXECUTION_STATUS.STARTED) { + throw new Error(`execution was ended with status ${this.status}`); + } + + this.options = options || {}; + const { transaction = await (this.constructor).database.sequelize.transaction() } = this.options; + this.transaction = transaction; + + if (!this.workflow) { + this.workflow = await this.getWorkflow({ transaction }); + } + + const nodes = await this.workflow.getNodes({ transaction }); + + this.makeNodes(nodes); + + const jobs = await this.getJobs({ transaction }); + + this.makeJobs(jobs); + } + + async start(options: ExecutionOptions) { + await this.prepare(options); + if (this.nodes.length) { + const head = this.nodes.find(item => !item.upstream); + await this.exec(head, { result: this.context }); + } else { + await this.exit(null); + } + await this.commit(); + } + + async resume(job: JobModel, options: ExecutionOptions) { + await this.prepare(options); + const node = this.nodesMap.get(job.nodeId); + await this.recall(node, job); + await this.commit(); + } + + private async commit() { + if (!this.options || !this.options.transaction) { + await this.transaction.commit(); + } + } + + private async run(instruction, node: FlowNodeModel, prevJob) { + let job; + try { + // call instruction to get result and status + job = await instruction.call(node, prevJob, this); + if (!job) { + return null; + } + } catch (err) { + // for uncaught error, set to rejected + job = { + result: err instanceof Error ? err.toString() : err, + status: JOB_STATUS.REJECTED + }; + // if previous job is from resuming + if (prevJob && prevJob.nodeId === node.id) { + prevJob.set(job); + job = prevJob; + } + } + + let savedJob: JobModel; + // TODO(optimize): many checking of resuming or new could be improved + // could be implemented separately in exec() / resume() + if (job instanceof Model) { + savedJob = await job.save({ transaction: this.transaction }) as JobModel; + } else { + const upstreamId = prevJob instanceof Model ? prevJob.get('id') : null; + savedJob = await this.saveJob({ + nodeId: node.id, + upstreamId, + ...job + }); + } + + if (savedJob.get('status') === JOB_STATUS.RESOLVED && node.downstream) { + // run next node + return this.exec(node.downstream, savedJob); + } + + // all nodes in scope have been executed + return this.end(node, savedJob); + } + + async exec(node, input?) { + const { run } = getInstruction(node.type); + + return this.run(run, node, input); + } + + // parent node should take over the control + end(node, job) { + const parentNode = this.findBranchParentNode(node); + // no parent, means on main flow + if (parentNode) { + return this.recall(parentNode, job); + } + + // really done for all nodes + // * should mark execution as done with last job status + return this.exit(job); + } + + async recall(node, job) { + const { resume } = getInstruction(node.type); + if (!resume) { + return Promise.reject(new Error('`resume` should be implemented because the node made branch')); + } + + return this.run(resume, node, job); + } + + async exit(job: JobModel | null) { + const status = job ? ExecutionModel.StatusMap[job.status] : EXECUTION_STATUS.RESOLVED; + await this.update({ status }, { transaction: this.transaction }); + return null; + } + + // TODO(optimize) + async saveJob(payload) { + const { database } = this.constructor; + const { model } = database.getCollection('jobs'); + const [result] = await model.upsert({ + ...payload, + executionId: this.id + }, { transaction: this.transaction }) as [JobModel, boolean | null]; + this.jobsMap.set(result.id, result); + + return result; + } + + // find the first node in current branch + findBranchStartNode(node: FlowNodeModel): FlowNodeModel | null { + for (let n = node; n; n = n.upstream) { + if (n.branchIndex !== null) { + return n; + } + } + return null; + } + + // find the node start current branch + findBranchParentNode(node: FlowNodeModel): FlowNodeModel | null { + for (let n = node; n; n = n.upstream) { + if (n.branchIndex !== null) { + return n.upstream; + } + } + return null; + } + + findBranchParentJob(job: JobModel, node: FlowNodeModel): JobModel | null { + for (let j = job; j; j = this.jobsMap.get(j.upstreamId)) { + if (j.nodeId === node.id) { + return j; + } + } + return null; + } +} diff --git a/packages/plugin-workflow/src/models/FlowNode.ts b/packages/plugin-workflow/src/models/FlowNode.ts new file mode 100644 index 000000000..4b9bf8db2 --- /dev/null +++ b/packages/plugin-workflow/src/models/FlowNode.ts @@ -0,0 +1,19 @@ +import { Model, BelongsToGetAssociationMixin } from 'sequelize'; +import WorkflowModel from './Workflow'; + +export default class FlowNodeModel extends Model { + declare id: number; + declare title: string; + declare branchIndex: null | number; + declare type: string; + declare config: any; + + declare createdAt: Date; + declare updatedAt: Date; + + declare upstream: FlowNodeModel; + declare downstream: FlowNodeModel; + + declare workflow?: WorkflowModel; + declare getWorkflow: BelongsToGetAssociationMixin; +} \ No newline at end of file diff --git a/packages/plugin-workflow/src/models/Job.ts b/packages/plugin-workflow/src/models/Job.ts new file mode 100644 index 000000000..fc92306a4 --- /dev/null +++ b/packages/plugin-workflow/src/models/Job.ts @@ -0,0 +1,18 @@ +import { Model, BelongsToGetAssociationMixin } from 'sequelize'; +import FlowNodeModel from './FlowNode'; + +export default class JobModel extends Model { + declare id: number; + declare status: number; + declare result: any; + + declare createdAt: Date; + declare updatedAt: Date; + + declare upstreamId: number; + declare upstream: JobModel; + + declare nodeId: number; + declare node?: FlowNodeModel; + declare getNode: BelongsToGetAssociationMixin; +} \ No newline at end of file diff --git a/packages/plugin-workflow/src/models/Workflow.ts b/packages/plugin-workflow/src/models/Workflow.ts new file mode 100644 index 000000000..0af20d334 --- /dev/null +++ b/packages/plugin-workflow/src/models/Workflow.ts @@ -0,0 +1,76 @@ +import { Model, HasManyGetAssociationsMixin, HasManyCreateAssociationMixin } from 'sequelize'; + +import Database from '@nocobase/database'; + +import { get as getTrigger } from '../triggers'; +import { EXECUTION_STATUS } from '../constants'; +import ExecutionModel from './Execution'; +import FlowNodeModel from './FlowNode'; + +export default class WorkflowModel extends Model { + declare static database: Database; + + declare id: number; + declare title: string; + declare enabled: boolean; + declare description?: string; + declare type: string; + declare config: any; + + declare createdAt: Date; + declare updatedAt: Date; + + declare nodes: FlowNodeModel[]; + declare getNodes: HasManyGetAssociationsMixin; + declare createNode: HasManyCreateAssociationMixin; + + declare executions: ExecutionModel[]; + declare getExecutions: HasManyGetAssociationsMixin; + declare createExecution: HasManyCreateAssociationMixin; + + static async mount() { + const collection = this.database.getCollection('workflows'); + const workflows = await collection.repository.find({ + filter: { enabled: true } + }); + + workflows.forEach((workflow: WorkflowModel) => { + workflow.toggle(); + }); + + this.addHook('afterCreate', (model: WorkflowModel) => model.toggle()); + this.addHook('afterUpdate', (model: WorkflowModel) => model.toggle()); + this.addHook('afterDestroy', (model: WorkflowModel) => model.toggle(false)); + } + + getHookId() { + return `workflow-${this.get('id')}`; + } + + async toggle(enable?: boolean) { + const type = this.get('type'); + const { on, off } = getTrigger(type); + if (typeof enable !== 'undefined' ? enable : this.get('enabled')) { + on.call(this, this.start.bind(this)); + } else { + off.call(this); + } + } + + async start(context: Object, options) { + // `null` means not to trigger + if (context === null) { + return; + } + + const execution = await this.createExecution({ + context, + status: EXECUTION_STATUS.STARTED + }); + + execution.workflow = this; + + await execution.start(options); + return execution; + } +} diff --git a/packages/plugin-workflow/src/server.ts b/packages/plugin-workflow/src/server.ts new file mode 100644 index 000000000..6a8c4ce83 --- /dev/null +++ b/packages/plugin-workflow/src/server.ts @@ -0,0 +1,59 @@ +import path from 'path'; + +import WorkflowModel from './models/Workflow'; +import ExecutionModel from './models/Execution'; + +export default { + name: 'workflow', + async load(options = {}) { + const { db } = this.app; + + db.registerModels({ + WorkflowModel, + ExecutionModel, + }); + + await db.import({ + directory: path.resolve(__dirname, 'collections'), + }); + + // [Life Cycle]: + // * load all workflows in db + // * add all hooks for enabled workflows + // * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks + this.app.on('beforeStart', async () => { + const { model } = db.getCollection('workflows'); + await model.mount(); + }) + + // [Life Cycle]: initialize all necessary seed data + this.app.on('db.init', async () => { + + }); + + // const [Automation, AutomationJob] = database.getModels(['automations', 'automations_jobs']); + + // Automation.addHook('afterCreate', async (model: AutomationModel) => { + // model.get('enabled') && await model.loadJobs(); + // }); + + // Automation.addHook('afterUpdate', async (model: AutomationModel) => { + // if (!model.changed('enabled' as any)) { + // return; + // } + // model.get('enabled') ? await model.loadJobs() : await model.cancelJobs(); + // }); + + // Automation.addHook('beforeDestroy', async (model: AutomationModel) => { + // await model.cancelJobs(); + // }); + + // AutomationJob.addHook('afterCreate', async (model: AutomationJobModel) => { + // await model.bootstrap(); + // }); + + // AutomationJob.addHook('beforeDestroy', async (model: AutomationJobModel) => { + // await model.cancel(); + // }); + } +} diff --git a/packages/plugin-workflow/src/triggers/index.ts b/packages/plugin-workflow/src/triggers/index.ts new file mode 100644 index 000000000..a48982e81 --- /dev/null +++ b/packages/plugin-workflow/src/triggers/index.ts @@ -0,0 +1,20 @@ +import WorkflowModel from '../models/Workflow'; +import modelTrigger from './model'; + +export interface Trigger { + name: string; + on(this: WorkflowModel, callback: Function): void; + off(this: WorkflowModel): void; +} + +const triggers = new Map(); + +export function register(type: string, trigger: Trigger): void { + triggers.set(type, trigger); +} + +export function get(type: string): Trigger | undefined { + return triggers.get(type); +} + +register(modelTrigger.name, modelTrigger); diff --git a/packages/plugin-workflow/src/triggers/model.ts b/packages/plugin-workflow/src/triggers/model.ts new file mode 100644 index 000000000..2d848e4ea --- /dev/null +++ b/packages/plugin-workflow/src/triggers/model.ts @@ -0,0 +1,44 @@ +import WorkflowModel from "../models/Workflow"; + +export interface ModelChangeTriggerConfig { + collection: string; + // TODO: ICondition + filter: any; +} + +const MODE_BITMAP = { + CREATE: 1, + UPDATE: 2, + DESTROY: 4 +}; + +const MODE_BITMAP_EVENTS = new Map(); +MODE_BITMAP_EVENTS.set(MODE_BITMAP.CREATE, 'afterCreate'); +MODE_BITMAP_EVENTS.set(MODE_BITMAP.UPDATE, 'afterUpdate'); +MODE_BITMAP_EVENTS.set(MODE_BITMAP.DESTROY, 'afterDestroy'); + +export default { + name: 'model', + on(this: WorkflowModel, callback: Function) { + const { database } = this.constructor; + const { collection, mode } = this.config; + const { model } = database.getCollection(collection); + const handler = (data: any, options) => callback({ data }, options); + // TODO: duplication when mode change should be considered + for (let [key, event] of MODE_BITMAP_EVENTS.entries()) { + if (mode & key) { + model.addHook(event, this.getHookId(), handler); + } + } + }, + off(this: WorkflowModel) { + const { database } = this.constructor; + const { collection, mode } = this.config; + const { model } = database.getCollection(collection); + for (let [key, event] of MODE_BITMAP_EVENTS.entries()) { + if (mode & key) { + model.removeHook(event, this.getHookId()); + } + } + } +} \ No newline at end of file diff --git a/packages/plugin-workflow/src/utils/calculators.ts b/packages/plugin-workflow/src/utils/calculators.ts new file mode 100644 index 000000000..33cfd16fe --- /dev/null +++ b/packages/plugin-workflow/src/utils/calculators.ts @@ -0,0 +1,52 @@ +type Calculator = (...args: any[]) => any; + +const calculators = new Map(); + +export function getCalculator(type: string): Calculator { + return calculators.get(type); +} + +export function registerCalculator(type: string, fn: Calculator) { + calculators.set(type, fn); +} + +export function registerCalculators(calculators) { + Object.keys(calculators).forEach(key => { + registerCalculator(key, calculators[key]); + }); +} + +function equal(a, b) { + return a === b; +} + +function gt(a, b) { + return a > b; +} + +function gte(a, b) { + return a >= b; +} + +function lt(a, b) { + return a < b; +} + +function lte(a, b) { + return a <= b; +} + +// TODO: add more common calculators + +registerCalculators({ + equal, + gt, + gte, + lt, + lte, + '===': equal, + '>': gt, + '>=': gte, + '<': lt, + '<=': lte +}); diff --git a/packages/plugin-workflow/src/utils/getter.ts b/packages/plugin-workflow/src/utils/getter.ts new file mode 100644 index 000000000..13b8d746c --- /dev/null +++ b/packages/plugin-workflow/src/utils/getter.ts @@ -0,0 +1,55 @@ +import { get } from 'lodash'; + +import ExecutionModel from '../models/Execution'; + +export type OperandType = 'context' | 'input' | 'job'; + +export type ObjectGetterOptions = { + path?: string +}; + +export type JobGetterOptions = ObjectGetterOptions & { + id: number +}; + +export type ConstantOperand = { + type?: 'constant'; + value: any +}; + +export type ContextOperand = { + type: 'context'; + options: ObjectGetterOptions; +}; + +export type InputOperand = { + type: 'input'; + options: ObjectGetterOptions; +}; + +export type JobOperand = { + type: 'job'; + options: JobGetterOptions; +}; + +export type Operand = ContextOperand | InputOperand | JobOperand | ConstantOperand; + +// TODO: other instructions may also use this method, could be moved to utils. +export function getValue(operand: Operand, input: any, execution: ExecutionModel) { + switch (operand.type) { + // from execution context + case 'context': + return get(execution, operand.options.path); + // from input from last job or manual + case 'input': + return get(input, operand.options.path); + // from job in execution + case 'job': + // assume jobs have been fetched from execution before + const job = execution.jobsMap.get(operand.options.id); + return get(job, operand.options.path); + // constant + default: + return operand.value; + } +}