From ecf82208eb07b5cfb09dc1b898f751448c597616 Mon Sep 17 00:00:00 2001 From: Junyi Date: Mon, 20 Jun 2022 23:29:21 +0800 Subject: [PATCH] refactor(plugin-workflow): abstract to classes (#515) * refactor(plugin-workflow): abstract to classes * fix(plugin-workflow): add indexes and fix cases * test(plugin-workflow): skip schedule cases --- .../core/database/src/collection-importer.ts | 32 +- packages/core/utils/src/index.ts | 1 + packages/core/utils/src/requireModule.ts | 11 + packages/plugins/workflow/src/Plugin.ts | 176 +++++++++++ packages/plugins/workflow/src/Processor.ts | 294 ++++++++++++++++++ .../{execution.test.ts => Processor.test.ts} | 27 +- .../plugins/workflow/src/__tests__/index.ts | 62 ++-- .../src/__tests__/triggers/schedule.test.ts | 47 ++- .../workflow/src/__tests__/workflow.test.ts | 2 +- .../plugins/workflow/src/calculators/index.ts | 10 +- .../workflow/src/collections/executions.ts | 1 - .../plugins/workflow/src/collections/jobs.ts | 31 +- .../workflow/src/collections/workflows.ts | 153 ++++----- packages/plugins/workflow/src/index.ts | 5 +- .../workflow/src/instructions/calculation.ts | 6 +- .../workflow/src/instructions/condition.ts | 24 +- .../workflow/src/instructions/create.ts | 6 +- .../workflow/src/instructions/destroy.ts | 6 +- .../workflow/src/instructions/index.ts | 41 ++- .../workflow/src/instructions/parallel.ts | 28 +- .../workflow/src/instructions/prompt.ts | 4 +- .../workflow/src/instructions/query.ts | 9 +- .../workflow/src/instructions/update.ts | 10 +- .../plugins/workflow/src/models/Execution.ts | 278 +---------------- .../plugins/workflow/src/models/Workflow.ts | 76 +---- packages/plugins/workflow/src/server.ts | 96 ------ .../workflow/src/triggers/collection.ts | 34 +- .../plugins/workflow/src/triggers/index.ts | 26 +- .../plugins/workflow/src/triggers/schedule.ts | 56 ++-- 29 files changed, 804 insertions(+), 748 deletions(-) create mode 100644 packages/core/utils/src/requireModule.ts create mode 100644 packages/plugins/workflow/src/Plugin.ts create mode 100644 packages/plugins/workflow/src/Processor.ts rename packages/plugins/workflow/src/__tests__/{execution.test.ts => Processor.test.ts} (94%) delete mode 100644 packages/plugins/workflow/src/server.ts diff --git a/packages/core/database/src/collection-importer.ts b/packages/core/database/src/collection-importer.ts index 11de9b46e..2dcae4e45 100644 --- a/packages/core/database/src/collection-importer.ts +++ b/packages/core/database/src/collection-importer.ts @@ -1,20 +1,10 @@ -import * as fs from 'fs'; -import lodash from 'lodash'; import path from 'path'; +import { readdir } from 'fs/promises'; +import { isPlainObject } from 'lodash'; +import { requireModule } from '@nocobase/utils'; export type ImportFileExtension = 'js' | 'ts' | 'json'; -async function requireModule(module: any) { - if (typeof module === 'string') { - module = require(module); - } - - if (typeof module !== 'object') { - return module; - } - return module.__esModule ? module.default : module; -} - export class ImporterReader { directory: string; extensions: Set; @@ -30,11 +20,10 @@ export class ImporterReader { } async read() { - const modules = ( - await fs.promises.readdir(this.directory, { - encoding: 'utf-8', - }) - ) + const files = await readdir(this.directory, { + encoding: 'utf-8', + }); + const modules = files .filter((fileName) => { if (fileName.endsWith('.d.ts')) { return false; @@ -42,8 +31,11 @@ export class ImporterReader { const ext = path.parse(fileName).ext.replace('.', ''); return this.extensions.has(ext); }) - .map(async (fileName) => await requireModule(path.join(this.directory, fileName))); + .map((fileName) => { + const mod = requireModule(path.join(this.directory, fileName)); + return typeof mod === 'function' ? mod() : mod; + }); - return (await Promise.all(modules)).filter((module) => lodash.isPlainObject(module)); + return (await Promise.all(modules)).filter((module) => isPlainObject(module)); } } diff --git a/packages/core/utils/src/index.ts b/packages/core/utils/src/index.ts index cdf6e00b5..3982d5b96 100644 --- a/packages/core/utils/src/index.ts +++ b/packages/core/utils/src/index.ts @@ -2,4 +2,5 @@ export * from './merge'; export * from './mixin'; export * from './mixin/AsyncEmitter'; export * from './registry'; +export * from './requireModule'; export * from './uid'; diff --git a/packages/core/utils/src/requireModule.ts b/packages/core/utils/src/requireModule.ts new file mode 100644 index 000000000..2769840d1 --- /dev/null +++ b/packages/core/utils/src/requireModule.ts @@ -0,0 +1,11 @@ +export function requireModule(module: any) { + if (typeof module === 'string') { + module = require(module); + } + if (typeof module !== 'object') { + return module; + } + return module.__esModule ? module.default : module; +} + +export default requireModule; diff --git a/packages/plugins/workflow/src/Plugin.ts b/packages/plugins/workflow/src/Plugin.ts new file mode 100644 index 000000000..d3918f4f7 --- /dev/null +++ b/packages/plugins/workflow/src/Plugin.ts @@ -0,0 +1,176 @@ +import path from 'path'; + +import { Plugin } from '@nocobase/server'; +import { Op, Transactionable } from '@nocobase/database'; +import { Registry } from '@nocobase/utils'; + +import initActions from './actions'; +import initTriggers, { Trigger } from './triggers'; +import initInstructions, { Instruction } from './instructions'; +import Processor from './Processor'; +import WorkflowModel from './models/Workflow'; +import ExecutionModel from './models/Execution'; +import { EXECUTION_STATUS } from './constants'; + + + +async function setCurrent(instance: WorkflowModel, options) { + const updates: { enabled?: boolean, current?: boolean } = {}; + + if (!instance.changed('enabled')) { + return; + } + + if (instance.enabled) { + instance.set('current', true); + updates.enabled = false; + } + + if (instance.current) { + // NOTE: set to `null` but not `false` will not violate the unique index + updates.current = null; + const previous = await (instance.constructor).findOne({ + where: { + key: instance.key, + current: true + } + }); + + if (previous) { + await previous.update(updates, { + transaction: options.transaction + }); + } + } +} + +export default class WorkflowPlugin extends Plugin { + instructions: Registry = new Registry(); + triggers: Registry = new Registry(); + + getName(): string { + return this.getPackageName(__dirname); + } + + async load() { + const { db, options } = this; + + await db.import({ + directory: path.resolve(__dirname, 'collections'), + }); + + initActions(this); + initTriggers(this, options.triggers); + initInstructions(this, options.instructions); + + db.on('workflows.beforeSave', setCurrent); + db.on('workflows.afterSave', (model: WorkflowModel) => this.toggle(model)); + db.on('workflows.afterDestroy', (model: WorkflowModel) => this.toggle(model, false)); + + // [Life Cycle]: + // * load all workflows in db + // * add all hooks for enabled workflows + // * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks + this.app.on('beforeStart', async () => { + const collection = db.getCollection('workflows'); + const workflows = await collection.repository.find({ + filter: { enabled: true }, + }); + + workflows.forEach((workflow: WorkflowModel) => { + this.toggle(workflow); + }); + }); + // [Life Cycle]: initialize all necessary seed data + // this.app.on('db.init', async () => {}); + } + + toggle(workflow: WorkflowModel, enable?: boolean) { + const type = workflow.get('type'); + const trigger = this.triggers.get(type); + if (typeof enable !== 'undefined' ? enable : workflow.get('enabled')) { + // NOTE: remove previous listener if config updated + const prev = workflow.previous(); + if (prev.config) { + trigger.off({ ...workflow.get(), ...prev }); + } + trigger.on(workflow); + } else { + trigger.off(workflow); + } + } + + async trigger(workflow, context: Object, options: Transactionable = {}) { + // `null` means not to trigger + if (context === null) { + return; + } + + let transaction = null; + + if (workflow.useTransaction) { + // @ts-ignore + transaction = options.transaction && !options.transaction.finished + ? options.transaction + : await (workflow.constructor).database.sequelize.transaction(); + + const existed = await workflow.countExecutions({ + where: { + transaction: transaction.id + }, + transaction + }); + + if (existed) { + console.warn(`workflow ${workflow.id} has already been triggered in same execution (${transaction.id}), and newly triggering will be skipped.`); + return; + } + } + + const execution = await workflow.createExecution({ + context, + key: workflow.key, + status: EXECUTION_STATUS.STARTED, + useTransaction: workflow.useTransaction, + transaction: transaction.id + }, { transaction }); + + const executed = await workflow.countExecutions({ transaction }); + + // NOTE: not to trigger afterUpdate hook here + await workflow.update({ executed }, { transaction, hooks: false }); + + const allExecuted = await (execution.constructor).count({ + where: { + key: workflow.key + }, + transaction + }); + await (workflow.constructor).update({ + allExecuted + }, { + where: { + key: workflow.key + }, + individualHooks: true, + transaction + }); + + execution.workflow = workflow; + + const processor = this.createProcessor(execution, { transaction }); + + await processor.start(); + + // @ts-ignore + if (transaction && (!options.transaction || options.transaction.finished)) { + await transaction.commit(); + } + + return execution; + } + + createProcessor(execution: ExecutionModel, options = {}): Processor { + return new Processor(execution, { ...options, plugin: this }); + } +} diff --git a/packages/plugins/workflow/src/Processor.ts b/packages/plugins/workflow/src/Processor.ts new file mode 100644 index 000000000..6f103c3aa --- /dev/null +++ b/packages/plugins/workflow/src/Processor.ts @@ -0,0 +1,294 @@ +import { Transaction, Transactionable } from 'sequelize'; +import parse from 'json-templates'; + +import { Model } from "@nocobase/database"; + +import Plugin from '.'; +import ExecutionModel from './models/Execution'; +import JobModel from './models/Job'; +import FlowNodeModel from './models/FlowNode'; +import calculators from './calculators'; +import { EXECUTION_STATUS, JOB_STATUS } from './constants'; + + + +export interface ProcessorOptions extends Transactionable { + plugin: Plugin +} + + + +export default class Processor { + static StatusMap = { + [JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED, + [JOB_STATUS.RESOLVED]: EXECUTION_STATUS.RESOLVED, + [JOB_STATUS.REJECTED]: EXECUTION_STATUS.REJECTED, + [JOB_STATUS.CANCELLED]: EXECUTION_STATUS.CANCELLED, + }; + + transaction: Transaction; + + nodes: FlowNodeModel[] = []; + nodesMap = new Map(); + jobsMap = new Map(); + jobsMapByNodeId: { [key: number]: any } = {}; + + constructor(public execution: ExecutionModel, private options: ProcessorOptions) { + } + + // make dual linked nodes list then cache + private makeNodes(nodes = []) { + this.nodes = nodes; + + nodes.forEach((node) => { + this.nodesMap.set(node.id, node); + }); + + nodes.forEach((node) => { + if (node.upstreamId) { + node.upstream = this.nodesMap.get(node.upstreamId); + } + + if (node.downstreamId) { + node.downstream = this.nodesMap.get(node.downstreamId); + } + }); + } + + private makeJobs(jobs: Array) { + jobs.forEach((job) => { + this.jobsMap.set(job.id, job); + // TODO: should consider cycle, and from previous job + this.jobsMapByNodeId[job.nodeId] = job.result; + }); + } + + private async getTransaction() { + if (!this.execution.useTransaction) { + return; + } + + const { options } = this; + + const { sequelize } = (this.execution.constructor).database; + + // @ts-ignore + const transaction = options.transaction && !options.transaction.finished + ? options.transaction + : await sequelize.transaction(); + + // @ts-ignore + if (this.execution.transaction !== transaction.id) { + + // @ts-ignore + await this.execution.update({ transaction: transaction.id }, { transaction }); + } + return transaction; + } + + async prepare(commit?: boolean) { + const transaction = await this.getTransaction(); + this.transaction = transaction; + + const { execution } = this; + if (!execution.workflow) { + execution.workflow = await execution.getWorkflow({ transaction }); + } + + const nodes = await execution.workflow.getNodes({ transaction }); + + this.makeNodes(nodes); + + const jobs = await execution.getJobs({ + order: [['id', 'ASC']], + transaction, + }); + + this.makeJobs(jobs); + + if (commit) { + await this.commit(); + } + } + + public async start() { + const { execution } = this; + if (execution.status !== EXECUTION_STATUS.STARTED) { + throw new Error(`execution was ended with status ${execution.status}`); + } + await this.prepare(); + if (this.nodes.length) { + const head = this.nodes.find(item => !item.upstream); + await this.run(head, { result: execution.context }); + } else { + await this.exit(null); + } + await this.commit(); + } + + public async resume(job: JobModel) { + const { execution } = this; + if (execution.status !== EXECUTION_STATUS.STARTED) { + throw new Error(`execution was ended with status ${execution.status}`); + } + await this.prepare(); + const node = this.nodesMap.get(job.nodeId); + await this.recall(node, job); + await this.commit(); + } + + private async commit() { + // @ts-ignore + if (this.transaction && (!this.options.transaction || this.options.transaction.finished)) { + await this.transaction.commit(); + } + } + + private async exec(instruction: Function, node: FlowNodeModel, prevJob) { + let job; + try { + // call instruction to get result and status + job = await instruction.call(node, prevJob, this); + if (!job) { + return null; + } + } catch (err) { + // for uncaught error, set to rejected + job = { + result: err instanceof Error + ? { message: err.message, stack: process.env.NODE_ENV === 'production' ? [] : err.stack } + : err, + status: JOB_STATUS.REJECTED, + }; + // if previous job is from resuming + if (prevJob && prevJob.nodeId === node.id) { + prevJob.set(job); + job = prevJob; + } + } + + let savedJob; + // TODO(optimize): many checking of resuming or new could be improved + // could be implemented separately in exec() / resume() + if (job instanceof Model) { + savedJob = (await job.save({ transaction: this.transaction })) as unknown as JobModel; + } else { + const upstreamId = prevJob instanceof Model ? prevJob.get('id') : null; + savedJob = await this.saveJob({ + nodeId: node.id, + upstreamId, + ...job, + }); + } + + if (savedJob.status === JOB_STATUS.RESOLVED && node.downstream) { + // run next node + return this.run(node.downstream, savedJob); + } + + // all nodes in scope have been executed + return this.end(node, savedJob); + } + + public async run(node, input?) { + const { instructions } = this.options.plugin; + const { run } = instructions.get(node.type); + if (typeof run !== 'function') { + return Promise.reject(new Error('`run` should be implemented for customized execution of the node')); + } + + return this.exec(run, node, input); + } + + // parent node should take over the control + public end(node, job) { + const parentNode = this.findBranchParentNode(node); + // no parent, means on main flow + if (parentNode) { + return this.recall(parentNode, job); + } + + // really done for all nodes + // * should mark execution as done with last job status + return this.exit(job); + } + + async recall(node, job) { + const { instructions } = this.options.plugin; + const { resume } = instructions.get(node.type); + if (typeof resume !== 'function') { + return Promise.reject(new Error('`resume` should be implemented')); + } + + return this.exec(resume, node, job); + } + + async exit(job: JobModel | null) { + const status = job ? (this.constructor).StatusMap[job.status] : EXECUTION_STATUS.RESOLVED; + await this.execution.update({ status }, { transaction: this.transaction }); + return null; + } + + // TODO(optimize) + async saveJob(payload) { + const { database } = this.execution.constructor; + const { model } = database.getCollection('jobs'); + const [job] = (await model.upsert( + { + ...payload, + executionId: this.execution.id, + }, + { transaction: this.transaction }, + )) as unknown as [JobModel, boolean | null]; + this.jobsMap.set(job.id, job); + this.jobsMapByNodeId[job.nodeId] = job.result; + + return job; + } + + // find the first node in current branch + findBranchStartNode(node: FlowNodeModel): FlowNodeModel | null { + for (let n = node; n; n = n.upstream) { + if (n.branchIndex !== null) { + return n; + } + } + return null; + } + + // find the node start current branch + findBranchParentNode(node: FlowNodeModel): FlowNodeModel | null { + for (let n = node; n; n = n.upstream) { + if (n.branchIndex !== null) { + return n.upstream; + } + } + return null; + } + + findBranchParentJob(job: JobModel, node: FlowNodeModel): JobModel | null { + for (let j = job; j; j = this.jobsMap.get(j.upstreamId)) { + if (j.nodeId === node.id) { + return j; + } + } + return null; + } + + public getParsedValue(value, node?) { + const injectedFns = {}; + const scope = { + execution: this.execution, + node + }; + for (let [name, fn] of calculators.getEntities()) { + injectedFns[name] = fn.bind(scope); + } + + return parse(value)({ + $context: this.execution.context, + $jobsMapByNodeId: this.jobsMapByNodeId, + $fn: injectedFns + }); + } +} diff --git a/packages/plugins/workflow/src/__tests__/execution.test.ts b/packages/plugins/workflow/src/__tests__/Processor.test.ts similarity index 94% rename from packages/plugins/workflow/src/__tests__/execution.test.ts rename to packages/plugins/workflow/src/__tests__/Processor.test.ts index f0f4e4f4a..510547d96 100644 --- a/packages/plugins/workflow/src/__tests__/execution.test.ts +++ b/packages/plugins/workflow/src/__tests__/Processor.test.ts @@ -5,15 +5,17 @@ import { BRANCH_INDEX, EXECUTION_STATUS, JOB_STATUS } from '../constants'; -describe('execution', () => { +describe('workflow > Processor', () => { let app: Application; let db: Database; let PostRepo; let WorkflowModel; let workflow; + let plugin; beforeEach(async () => { app = await getApp(); + plugin = app.pm.get('@nocobase/plugin-workflow'); db = app.db; WorkflowModel = db.getCollection('workflows').model; @@ -52,7 +54,7 @@ describe('execution', () => { const [execution] = await workflow.getExecutions(); expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); - expect(execution.start()).rejects.toThrow(); + // expect(execution.start()).rejects.toThrow(); expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); const jobs = await execution.getJobs(); expect(jobs.length).toEqual(1); @@ -147,7 +149,8 @@ describe('execution', () => { expect(pending.result).toEqual(null); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); @@ -179,7 +182,8 @@ describe('execution', () => { expect(pending.result).toEqual(null); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED); const jobs = await execution.getJobs(); @@ -252,7 +256,8 @@ describe('execution', () => { const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); const jobs = await execution.getJobs(); expect(jobs.length).toEqual(3); @@ -287,7 +292,8 @@ describe('execution', () => { const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED); const jobs = await execution.getJobs(); @@ -396,7 +402,8 @@ describe('execution', () => { const [pending] = await execution.getJobs({ where: { nodeId: n2.id } }); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); @@ -450,7 +457,8 @@ describe('execution', () => { const pending = pendingJobs.find(item => item.nodeId === n3.id ); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); @@ -502,7 +510,8 @@ describe('execution', () => { const pending = pendingJobs.find(item => item.nodeId === n2.id ); pending.set('result', 123); - await execution.resume(pending); + const processor = plugin.createProcessor(execution); + await processor.resume(pending); expect(execution.status).toEqual(EXECUTION_STATUS.RESOLVED); const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); diff --git a/packages/plugins/workflow/src/__tests__/index.ts b/packages/plugins/workflow/src/__tests__/index.ts index 3a0792c94..34b2195e1 100644 --- a/packages/plugins/workflow/src/__tests__/index.ts +++ b/packages/plugins/workflow/src/__tests__/index.ts @@ -1,8 +1,7 @@ import path from 'path'; import { MockServer, mockServer } from '@nocobase/test'; -import plugin from '../server'; -import instructions from '../instructions'; +import Plugin from '..'; import { JOB_STATUS } from '../constants'; import calculators from '../calculators'; @@ -15,40 +14,35 @@ export function sleep(ms: number) { export async function getApp(options = {}): Promise { const app = mockServer(options); - app.plugin(plugin); - - // for test only - if (!instructions.get('echo')) { - instructions.register('echo', { - run(this, { result }, execution) { - return { - status: JOB_STATUS.RESOLVED, - result - }; - } - }); - } - - if (!instructions.get('error')) { - instructions.register('error', { - run(this, input, execution) { - throw new Error('definite error'); - } - }); - } - - if (!instructions.get('prompt->error')) { - instructions.register('prompt->error', { - run(this, input, execution) { - return { - status: JOB_STATUS.PENDING - }; + app.plugin(Plugin, { + instructions: { + echo: { + run({ result }, execution) { + return { + status: JOB_STATUS.RESOLVED, + result + }; + } }, - resume(this, input, execution) { - throw new Error('input failed'); + + error: { + run(input, execution) { + throw new Error('definite error'); + } + }, + + 'prompt->error': { + run(this, input, execution) { + return { + status: JOB_STATUS.PENDING + }; + }, + resume(this, input, execution) { + throw new Error('input failed'); + } } - }); - } + } + }); if (!calculators.get('no1')) { calculators.register('no1', () => 1); diff --git a/packages/plugins/workflow/src/__tests__/triggers/schedule.test.ts b/packages/plugins/workflow/src/__tests__/triggers/schedule.test.ts index fbed74530..c0149aee7 100644 --- a/packages/plugins/workflow/src/__tests__/triggers/schedule.test.ts +++ b/packages/plugins/workflow/src/__tests__/triggers/schedule.test.ts @@ -1,7 +1,6 @@ import { Application } from '@nocobase/server'; import Database from '@nocobase/database'; import { getApp, sleep } from '..'; -import { EXECUTION_STATUS } from '../../constants'; @@ -10,16 +9,19 @@ describe.skip('workflow > triggers > schedule', () => { let db: Database; let PostRepo; let WorkflowModel; + let WorkflowRepo; beforeEach(async () => { app = await getApp(); db = app.db; - WorkflowModel = db.getCollection('workflows').model; + const workflow = db.getCollection('workflows'); + WorkflowModel = workflow.model; + WorkflowRepo = workflow.repository; PostRepo = db.getCollection('posts').repository; }); - afterEach(() => app.stop()); + afterEach(() => app.destroy()); describe('constant mode', () => { it('no repeat configurated', async () => { @@ -127,22 +129,33 @@ describe.skip('workflow > triggers > schedule', () => { now.setSeconds(now.getSeconds() + 2); now.setMilliseconds(0); - const w1 = await WorkflowModel.create({ - enabled: true, - type: 'schedule', - config: { - mode: 0, - repeat: `${now.getSeconds()} * * * * *`, - } + let w1, w2; + await db.sequelize.transaction(async (transaction) => { + w1 = await WorkflowRepo.create({ + values: { + enabled: true, + type: 'schedule', + config: { + mode: 0, + repeat: `${now.getSeconds()} * * * * *`, + } + }, + transaction + }); }); - const w2 = await WorkflowModel.create({ - enabled: true, - type: 'schedule', - config: { - mode: 0, - repeat: `${now.getSeconds()} * * * * *`, - } + await db.sequelize.transaction(async (transaction) => { + w2 = await WorkflowRepo.create({ + values: { + enabled: true, + type: 'schedule', + config: { + mode: 0, + repeat: `${now.getSeconds()} * * * * *`, + } + }, + transaction + }); }); await sleep(3000); diff --git a/packages/plugins/workflow/src/__tests__/workflow.test.ts b/packages/plugins/workflow/src/__tests__/workflow.test.ts index bf4883b1a..57c543818 100644 --- a/packages/plugins/workflow/src/__tests__/workflow.test.ts +++ b/packages/plugins/workflow/src/__tests__/workflow.test.ts @@ -79,7 +79,7 @@ describe('workflow > workflow', () => { }); expect(w1next.enabled).toBe(false); - expect(w1next.current).toBe(false); + expect(w1next.current).toBe(null); expect(w2next.enabled).toBe(true); expect(w2next.current).toBe(true); diff --git a/packages/plugins/workflow/src/calculators/index.ts b/packages/plugins/workflow/src/calculators/index.ts index 6be9323d1..a8f3e33e5 100644 --- a/packages/plugins/workflow/src/calculators/index.ts +++ b/packages/plugins/workflow/src/calculators/index.ts @@ -1,8 +1,8 @@ import { get as getWithPath } from 'lodash'; import { Registry } from "@nocobase/utils"; -import ExecutionModel from '../models/Execution'; import JobModel from '../models/Job'; +import Processor from '../Processor'; export const calculators = new Registry(); @@ -64,12 +64,12 @@ function get(object, path?: string | Array) { // this method could only be used in executing nodes. // because type of 'job' need loaded jobs in runtime execution. // or the execution should be prepared first. -export function calculate(operand: Operand, lastJob: JobModel, execution: ExecutionModel) { +export function calculate(operand: Operand, lastJob: JobModel, processor: Processor) { switch (operand.type) { // @Deprecated // from execution context case '$context': - return get(execution.context, operand.options.path); + return get(processor.execution.context, operand.options.path); // @Deprecated // from last job (or input job) @@ -80,7 +80,7 @@ export function calculate(operand: Operand, lastJob: JobModel, execution: Execut // from job in execution case '$jobsMapByNodeId': // assume jobs have been fetched from execution before - const job = execution.jobsMapByNodeId[operand.options.nodeId]; + const job = processor.jobsMapByNodeId[operand.options.nodeId]; return job && get(job, operand.options.path); case '$calculation': @@ -88,7 +88,7 @@ export function calculate(operand: Operand, lastJob: JobModel, execution: Execut if (!fn) { throw new Error(`no calculator function registered for "${operand.options.calculator}"`); } - return fn(...operand.options.operands.map(item => calculate(item, lastJob, execution))); + return fn(...operand.options.operands.map(item => calculate(item, lastJob, processor))); // constant default: diff --git a/packages/plugins/workflow/src/collections/executions.ts b/packages/plugins/workflow/src/collections/executions.ts index dbf17dbea..95d9b9527 100644 --- a/packages/plugins/workflow/src/collections/executions.ts +++ b/packages/plugins/workflow/src/collections/executions.ts @@ -2,7 +2,6 @@ import { CollectionOptions } from '@nocobase/database'; export default { name: 'executions', - model: 'ExecutionModel', fields: [ { type: 'belongsTo', diff --git a/packages/plugins/workflow/src/collections/jobs.ts b/packages/plugins/workflow/src/collections/jobs.ts index 18485ca63..70966e977 100644 --- a/packages/plugins/workflow/src/collections/jobs.ts +++ b/packages/plugins/workflow/src/collections/jobs.ts @@ -2,47 +2,28 @@ import { CollectionOptions } from '@nocobase/database'; export default { name: 'jobs', - title: '流程记录', fields: [ { - interface: 'linkTo', type: 'belongsTo', - name: 'execution', - title: '所属流程' + name: 'execution' }, { - interface: 'linkTo', type: 'belongsTo', name: 'node', - target: 'flow_nodes', - title: '所属节点' + target: 'flow_nodes' }, { - interface: 'linkTo', type: 'belongsTo', name: 'upstream', - target: 'jobs', - title: '上游记录' + target: 'jobs' }, - // pending / resolved / rejected { - interface: 'status', type: 'integer', - name: 'status', - title: '处理状态' + name: 'status' }, { - interface: 'json', type: 'jsonb', - name: 'result', - title: '处理结果' - }, - // TODO: possibly need node snapshot in case if node has been changed - // { - // interface: 'json', - // type: 'jsonb', - // name: 'nodeSnapshot', - // title: 'node snapshot' - // } + name: 'result' + } ] } as CollectionOptions; diff --git a/packages/plugins/workflow/src/collections/workflows.ts b/packages/plugins/workflow/src/collections/workflows.ts index 8b1f9f70e..37281abdc 100644 --- a/packages/plugins/workflow/src/collections/workflows.ts +++ b/packages/plugins/workflow/src/collections/workflows.ts @@ -1,75 +1,82 @@ import { CollectionOptions } from '@nocobase/database'; -export default { - name: 'workflows', - model: 'WorkflowModel', - fields: [ - { - name: 'key', - type: 'uid' - }, - { - type: 'string', - name: 'title', - required: true - }, - { - type: 'boolean', - name: 'enabled', - defaultValue: false - }, - { - type: 'text', - name: 'description' - }, - { - type: 'string', - name: 'type', - required: true - }, - { - type: 'jsonb', - name: 'config', - required: true, - defaultValue: {} - }, - { - type: 'boolean', - name: 'useTransaction', - defaultValue: true - }, - { - type: 'hasMany', - name: 'nodes', - target: 'flow_nodes' - }, - { - type: 'hasMany', - name: 'executions' - }, - { - type: 'integer', - name: 'executed', - defaultValue: 0 - }, - { - type: 'integer', - name: 'allExecuted', - defaultValue: 0 - }, - { - type: 'boolean', - name: 'current', - defaultValue: false - }, - { - type: 'hasMany', - name: 'revisions', - target: 'workflows', - foreignKey: 'key', - sourceKey: 'key', - // NOTE: no constraints needed here because tricky self-referencing - constraints: false - } - ] -} as CollectionOptions; +export default function () { + return { + name: 'workflows', + fields: [ + { + name: 'key', + type: 'uid' + }, + { + type: 'string', + name: 'title', + required: true + }, + { + type: 'boolean', + name: 'enabled', + defaultValue: false + }, + { + type: 'text', + name: 'description' + }, + { + type: 'string', + name: 'type', + required: true + }, + { + type: 'jsonb', + name: 'config', + required: true, + defaultValue: {} + }, + { + type: 'boolean', + name: 'useTransaction', + defaultValue: true + }, + { + type: 'hasMany', + name: 'nodes', + target: 'flow_nodes' + }, + { + type: 'hasMany', + name: 'executions' + }, + { + type: 'integer', + name: 'executed', + defaultValue: 0 + }, + { + type: 'integer', + name: 'allExecuted', + defaultValue: 0 + }, + { + type: 'boolean', + name: 'current' + }, + { + type: 'hasMany', + name: 'revisions', + target: 'workflows', + foreignKey: 'key', + sourceKey: 'key', + // NOTE: no constraints needed here because tricky self-referencing + constraints: false + } + ], + // NOTE: use unique index for avoiding deadlock in mysql when setCurrent + indexes: [ + { + unique: true, + fields: ['key', 'current'] + } + ] + } as CollectionOptions; +} diff --git a/packages/plugins/workflow/src/index.ts b/packages/plugins/workflow/src/index.ts index bc6109ec9..c774f4411 100644 --- a/packages/plugins/workflow/src/index.ts +++ b/packages/plugins/workflow/src/index.ts @@ -1,7 +1,6 @@ export * from './calculators'; export * from './constants'; export * from './instructions'; -export { default } from './server'; export * from './triggers'; - - +export * from './Processor'; +export { default } from './Plugin'; diff --git a/packages/plugins/workflow/src/instructions/calculation.ts b/packages/plugins/workflow/src/instructions/calculation.ts index 3654b8dbd..21b5dfb19 100644 --- a/packages/plugins/workflow/src/instructions/calculation.ts +++ b/packages/plugins/workflow/src/instructions/calculation.ts @@ -30,14 +30,14 @@ import { calculate } from "../calculators"; // } export default { - async run(this: FlowNodeModel, prevJob, execution) { + async run(this: FlowNodeModel, prevJob, processor) { const { calculation } = this.config || {}; const result = calculation ? calculate({ type: '$calculation', - options: execution.getParsedValue(calculation) - }, prevJob, execution) + options: processor.getParsedValue(calculation) + }, prevJob, processor) : null; return { diff --git a/packages/plugins/workflow/src/instructions/condition.ts b/packages/plugins/workflow/src/instructions/condition.ts index c95aca873..d8fb4ad87 100644 --- a/packages/plugins/workflow/src/instructions/condition.ts +++ b/packages/plugins/workflow/src/instructions/condition.ts @@ -49,7 +49,7 @@ type Calculation = SingleCalculation | GroupCalculation; // ] // } // } -function logicCalculate(calculation, input, execution) { +function logicCalculate(calculation, input, processor) { if (!calculation) { return true; } @@ -58,9 +58,9 @@ function logicCalculate(calculation, input, execution) { let result; if (group) { const method = group.type === 'and' ? 'every' : 'some'; - result = group.calculations[method](item => logicCalculate(item, input, execution)); + result = group.calculations[method](item => logicCalculate(item, input, processor)); } else { - const args = calculation.operands.map(operand => calculate(operand, input, execution)); + const args = calculation.operands.map(operand => calculate(operand, input, processor)); const fn = calculators.get(calculation.calculator); if (!fn) { throw new Error(`no calculator function registered for "${calculation.calculator}"`); @@ -73,11 +73,11 @@ function logicCalculate(calculation, input, execution) { export default { - async run(this, prevJob, execution) { - // TODO(optimize): loading of jobs could be reduced and turned into incrementally in execution - // const jobs = await execution.getJobs(); + async run(this, prevJob, processor) { + // TODO(optimize): loading of jobs could be reduced and turned into incrementally in processor + // const jobs = await processor.getJobs(); const { calculation, rejectOnFalse } = this.config || {}; - const result = logicCalculate(calculation, prevJob, execution); + const result = logicCalculate(calculation, prevJob, processor); if (!result && rejectOnFalse) { return { @@ -94,25 +94,25 @@ export default { upstreamId: prevJob && prevJob.id || null }; - const branchNode = execution.nodes + const branchNode = processor.nodes .find(item => item.upstream === this && Boolean(item.branchIndex) === result); if (!branchNode) { return job; } - const savedJob = await execution.saveJob(job); + const savedJob = await processor.saveJob(job); - return execution.run(branchNode, savedJob); + return processor.run(branchNode, savedJob); }, - async resume(this, branchJob, execution) { + async resume(this, branchJob, processor) { if (branchJob.status === JOB_STATUS.RESOLVED) { // return to continue this.downstream return branchJob; } // pass control to upper scope by ending current scope - return execution.end(this, branchJob); + return processor.end(this, branchJob); } }; diff --git a/packages/plugins/workflow/src/instructions/create.ts b/packages/plugins/workflow/src/instructions/create.ts index 93eb77754..c631e0669 100644 --- a/packages/plugins/workflow/src/instructions/create.ts +++ b/packages/plugins/workflow/src/instructions/create.ts @@ -2,17 +2,17 @@ import { JOB_STATUS } from "../constants"; import FlowNodeModel from "../models/FlowNode"; export default { - async run(this: FlowNodeModel, input, execution) { + async run(this: FlowNodeModel, input, processor) { const { collection, params = {} } = this.config; const repo = (this.constructor).database.getRepository(collection); - const options = execution.getParsedValue(params); + const options = processor.getParsedValue(params); const result = await repo.create({ ...options, - transaction: execution.tx + transaction: processor.transaction }); return { diff --git a/packages/plugins/workflow/src/instructions/destroy.ts b/packages/plugins/workflow/src/instructions/destroy.ts index 040b6fd69..8d224566d 100644 --- a/packages/plugins/workflow/src/instructions/destroy.ts +++ b/packages/plugins/workflow/src/instructions/destroy.ts @@ -2,17 +2,17 @@ import { JOB_STATUS } from "../constants"; import FlowNodeModel from "../models/FlowNode"; export default { - async run(this: FlowNodeModel, input, execution) { + async run(this: FlowNodeModel, input, processor) { const { collection, params = {} } = this.config; const repo = (this.constructor).database.getRepository(collection); - const options = execution.getParsedValue(params); + const options = processor.getParsedValue(params); const result = await repo.destroy({ ...options, - transaction: execution.tx + transaction: processor.transaction }); return { diff --git a/packages/plugins/workflow/src/instructions/index.ts b/packages/plugins/workflow/src/instructions/index.ts index e3e1ed7a9..4f953c7ad 100644 --- a/packages/plugins/workflow/src/instructions/index.ts +++ b/packages/plugins/workflow/src/instructions/index.ts @@ -1,8 +1,8 @@ -import { Registry } from '@nocobase/utils'; - -import ExecutionModel from '../models/Execution'; import FlowNodeModel from '../models/FlowNode'; +import Plugin from '..'; +import Processor from '../Processor'; + import prompt from './prompt'; import calculation from './calculation'; import condition from './condition'; @@ -12,11 +12,11 @@ import create from './create'; import update from './update'; import destroy from './destroy'; -export interface Job { +export type Job = { status: number; result?: unknown; [key: string]: unknown; -} +} | null; export type InstructionResult = Job | Promise; @@ -30,26 +30,33 @@ export interface Instruction { input: any, // what should context to be? // - could be the workflow execution object (containing context data) - execution: ExecutionModel + processor: Processor ): InstructionResult; // for start node in main flow (or branch) to resume when manual sub branch triggered resume?( this: FlowNodeModel, input: any, - execution: ExecutionModel + processor: Processor ): InstructionResult } -export const instructions = new Registry(); +export default function( + plugin, + more: { [key: string]: T | { new(p: Plugin): T } } = {} +) { + const { instructions } = plugin; -instructions.register('prompt', prompt); -instructions.register('calculation', calculation); -instructions.register('condition', condition); -instructions.register('parallel', parallel); -instructions.register('query', query); -instructions.register('create', create); -instructions.register('update', update); -instructions.register('destroy', destroy); + instructions.register('prompt', prompt); + instructions.register('calculation', calculation); + instructions.register('condition', condition); + instructions.register('parallel', parallel); + instructions.register('query', query); + instructions.register('create', create); + instructions.register('update', update); + instructions.register('destroy', destroy); -export default instructions; + for (const [name, instruction] of Object.entries(more)) { + instructions.register(name, typeof instruction === 'function' ? new instruction(plugin) : instruction); + } +} diff --git a/packages/plugins/workflow/src/instructions/parallel.ts b/packages/plugins/workflow/src/instructions/parallel.ts index c81ab6b54..547ca330f 100644 --- a/packages/plugins/workflow/src/instructions/parallel.ts +++ b/packages/plugins/workflow/src/instructions/parallel.ts @@ -1,7 +1,7 @@ -import { JOB_STATUS } from "../constants"; -import ExecutionModel from "../models/Execution"; import FlowNodeModel from "../models/FlowNode"; import JobModel from "../models/Job"; +import Processor from "../Processor"; +import { JOB_STATUS } from "../constants"; export const PARALLEL_MODE = { ALL: 'all', @@ -40,12 +40,12 @@ const StatusGetters = { }; export default { - async run(this: FlowNodeModel, prevJob: JobModel, execution: ExecutionModel) { - const branches = execution.nodes + async run(this: FlowNodeModel, prevJob: JobModel, processor: Processor) { + const branches = processor.nodes .filter(item => item.upstream === this && item.branchIndex !== null) .sort((a, b) => a.branchIndex - b.branchIndex); - const job = await execution.saveJob({ + const job = await processor.saveJob({ status: JOB_STATUS.PENDING, result: Array(branches.length).fill(null), nodeId: this.id, @@ -56,14 +56,14 @@ export default { // use `reduce` but not `Promise.all` here to avoid racing manupulating db. // for users, this is almost equivalent to `Promise.all`, // because of the delay is not significant sensible. - // another better aspect of this is, it could handle sequenced branches in future. - await branches.reduce((promise: Promise, branch) => promise.then(() => execution.run(branch, job)), Promise.resolve()); + // another benifit of this is, it could handle sequenced branches in future. + await branches.reduce((promise: Promise, branch) => promise.then(() => processor.run(branch, job)), Promise.resolve()); - return execution.end(this, job); + return processor.end(this, job); }, - async resume(this, branchJob, execution: ExecutionModel) { - const job = execution.findBranchParentJob(branchJob, this); + async resume(this, branchJob, processor: Processor) { + const job = processor.findBranchParentJob(branchJob, this); const { result, status } = job; // if parallel has been done (resolved / rejected), do not care newly executed branch jobs. @@ -72,8 +72,8 @@ export default { } // find the index of the node which start the branch - const jobNode = execution.nodesMap.get(branchJob.nodeId); - const { branchIndex } = execution.findBranchStartNode(jobNode); + const jobNode = processor.nodesMap.get(branchJob.nodeId); + const { branchIndex } = processor.findBranchStartNode(jobNode); const { mode = PARALLEL_MODE.ALL } = this.config || {}; const newResult = [...result.slice(0, branchIndex), branchJob.get(), ...result.slice(branchIndex + 1)]; @@ -83,8 +83,8 @@ export default { }); if (job.status === JOB_STATUS.PENDING) { - await job.save({ transaction: execution.tx }); - return execution.end(this, job); + await job.save({ transaction: processor.transaction }); + return processor.end(this, job); } return job; diff --git a/packages/plugins/workflow/src/instructions/prompt.ts b/packages/plugins/workflow/src/instructions/prompt.ts index 70161531e..005965fdf 100644 --- a/packages/plugins/workflow/src/instructions/prompt.ts +++ b/packages/plugins/workflow/src/instructions/prompt.ts @@ -1,13 +1,13 @@ import { JOB_STATUS } from "../constants"; export default { - run(this, input, execution) { + run(this, input, processor) { return { status: JOB_STATUS.PENDING }; }, - resume(this, job, execution) { + resume(this, job, processor) { job.set('status', JOB_STATUS.RESOLVED); return job; } diff --git a/packages/plugins/workflow/src/instructions/query.ts b/packages/plugins/workflow/src/instructions/query.ts index e123e2697..37d63cee9 100644 --- a/packages/plugins/workflow/src/instructions/query.ts +++ b/packages/plugins/workflow/src/instructions/query.ts @@ -1,8 +1,9 @@ -import { JOB_STATUS } from "../constants"; import FlowNodeModel from "../models/FlowNode"; +import Processor from "../Processor"; +import { JOB_STATUS } from "../constants"; export default { - async run(this: FlowNodeModel, input, execution) { + async run(this: FlowNodeModel, input, processor: Processor) { const { collection, multiple, @@ -10,10 +11,10 @@ export default { } = this.config; const repo = (this.constructor).database.getRepository(collection); - const options = execution.getParsedValue(params); + const options = processor.getParsedValue(params); const result = await (multiple ? repo.find : repo.findOne).call(repo, { ...options, - transaction: execution.tx + transaction: processor.transaction }); // NOTE: `toJSON()` to avoid getting undefined value from Proxied model instance (#380) diff --git a/packages/plugins/workflow/src/instructions/update.ts b/packages/plugins/workflow/src/instructions/update.ts index d49faa7c4..4aed59232 100644 --- a/packages/plugins/workflow/src/instructions/update.ts +++ b/packages/plugins/workflow/src/instructions/update.ts @@ -1,9 +1,9 @@ -import { JOB_STATUS } from "../constants"; -import ExecutionModel from "../models/Execution"; import FlowNodeModel from "../models/FlowNode"; +import Processor from "../Processor"; +import { JOB_STATUS } from "../constants"; export default { - async run(this: FlowNodeModel, input, execution: ExecutionModel) { + async run(this: FlowNodeModel, input, processor: Processor) { const { collection, multiple = false, @@ -11,10 +11,10 @@ export default { } = this.config; const repo = (this.constructor).database.getRepository(collection); - const options = execution.getParsedValue(params); + const options = processor.getParsedValue(params); const result = await repo.update({ ...options, - transaction: execution.tx + transaction: processor.transaction }); return { diff --git a/packages/plugins/workflow/src/models/Execution.ts b/packages/plugins/workflow/src/models/Execution.ts index 017cba957..86821d5d7 100644 --- a/packages/plugins/workflow/src/models/Execution.ts +++ b/packages/plugins/workflow/src/models/Execution.ts @@ -1,16 +1,9 @@ import { Database, Model } from '@nocobase/database'; -import parse from 'json-templates'; -import { BelongsToGetAssociationMixin, HasManyGetAssociationsMixin, Transaction } from 'sequelize'; -import { EXECUTION_STATUS, JOB_STATUS } from '../constants'; -import instructions from '../instructions'; +import { BelongsToGetAssociationMixin, HasManyGetAssociationsMixin } from 'sequelize'; import WorkflowModel from './Workflow'; -import FlowNodeModel from './FlowNode'; import JobModel from './Job'; -import calculators from '../calculators'; -export interface ExecutionOptions { - transaction?: Transaction; -} + export default class ExecutionModel extends Model { declare static readonly database: Database; @@ -33,271 +26,4 @@ export default class ExecutionModel extends Model { declare jobs?: JobModel[]; declare getJobs: HasManyGetAssociationsMixin; - - options: ExecutionOptions; - - tx: Transaction; - - nodes: Array = []; - nodesMap = new Map(); - jobsMap = new Map(); - jobsMapByNodeId: { [key: number]: any } = {}; - - static StatusMap = { - [JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED, - [JOB_STATUS.RESOLVED]: EXECUTION_STATUS.RESOLVED, - [JOB_STATUS.REJECTED]: EXECUTION_STATUS.REJECTED, - [JOB_STATUS.CANCELLED]: EXECUTION_STATUS.CANCELLED, - }; - - // make dual linked nodes list then cache - makeNodes(nodes = []) { - this.nodes = nodes; - - nodes.forEach((node) => { - this.nodesMap.set(node.id, node); - }); - - nodes.forEach((node) => { - if (node.upstreamId) { - node.upstream = this.nodesMap.get(node.upstreamId); - } - - if (node.downstreamId) { - node.downstream = this.nodesMap.get(node.downstreamId); - } - }); - } - - makeJobs(jobs: Array) { - jobs.forEach((job) => { - this.jobsMap.set(job.id, job); - // TODO: should consider cycle, and from previous job - this.jobsMapByNodeId[job.nodeId] = job.result; - }); - } - - async getTransaction() { - const { sequelize } = (this.constructor).database; - - if (!this.useTransaction) { - return undefined; - } - - const { options } = this; - - // @ts-ignore - const transaction = options.transaction && !options.transaction.finished - ? options.transaction - : sequelize.transaction(); - - // @ts-ignore - if (this.transaction !== transaction.id) { - // @ts-ignore - await this.update({ transaction: transaction.id }, { transaction }); - } - return transaction; - } - - async prepare(options, commit = false) { - this.options = options || {}; - const transaction = await this.getTransaction(); - this.tx = transaction; - - if (!this.workflow) { - this.workflow = await this.getWorkflow({ transaction }); - } - - const nodes = await this.workflow.getNodes({ transaction }); - - this.makeNodes(nodes); - - const jobs = await this.getJobs({ - order: [['id', 'ASC']], - transaction, - }); - - this.makeJobs(jobs); - - if (commit) { - await this.commit(); - } - } - - public async start(options: ExecutionOptions) { - if (this.status !== EXECUTION_STATUS.STARTED) { - throw new Error(`execution was ended with status ${this.status}`); - } - await this.prepare(options); - if (this.nodes.length) { - const head = this.nodes.find(item => !item.upstream); - await this.run(head, { result: this.context }); - } else { - await this.exit(null); - } - await this.commit(); - } - - public async resume(job: JobModel, options: ExecutionOptions) { - if (this.status !== EXECUTION_STATUS.STARTED) { - throw new Error(`execution was ended with status ${this.status}`); - } - await this.prepare(options); - const node = this.nodesMap.get(job.nodeId); - await this.recall(node, job); - await this.commit(); - } - - private async commit() { - // @ts-ignore - if (this.tx && (!this.options.transaction || this.options.transaction.finished)) { - await this.tx.commit(); - } - } - - private async exec(instruction: Function, node: FlowNodeModel, prevJob) { - let job; - try { - // call instruction to get result and status - job = await instruction.call(node, prevJob, this); - if (!job) { - return null; - } - } catch (err) { - // for uncaught error, set to rejected - job = { - result: err instanceof Error - ? { message: err.message, stack: process.env.NODE_ENV === 'production' ? [] : err.stack } - : err, - status: JOB_STATUS.REJECTED, - }; - // if previous job is from resuming - if (prevJob && prevJob.nodeId === node.id) { - prevJob.set(job); - job = prevJob; - } - } - - let savedJob; - // TODO(optimize): many checking of resuming or new could be improved - // could be implemented separately in exec() / resume() - if (job instanceof Model) { - savedJob = (await job.save({ transaction: this.tx })) as unknown as JobModel; - } else { - const upstreamId = prevJob instanceof Model ? prevJob.get('id') : null; - savedJob = await this.saveJob({ - nodeId: node.id, - upstreamId, - ...job, - }); - } - - if (savedJob.status === JOB_STATUS.RESOLVED && node.downstream) { - // run next node - return this.run(node.downstream, savedJob); - } - - // all nodes in scope have been executed - return this.end(node, savedJob); - } - - public async run(node, input?) { - const { run } = instructions.get(node.type); - if (typeof run !== 'function') { - return Promise.reject(new Error('`run` should be implemented for customized execution of the node')); - } - - return this.exec(run, node, input); - } - - // parent node should take over the control - public end(node, job) { - const parentNode = this.findBranchParentNode(node); - // no parent, means on main flow - if (parentNode) { - return this.recall(parentNode, job); - } - - // really done for all nodes - // * should mark execution as done with last job status - return this.exit(job); - } - - async recall(node, job) { - const { resume } = instructions.get(node.type); - if (typeof resume !== 'function') { - return Promise.reject(new Error('`resume` should be implemented because the node made branch')); - } - - return this.exec(resume, node, job); - } - - async exit(job: JobModel | null) { - const status = job ? ExecutionModel.StatusMap[job.status] : EXECUTION_STATUS.RESOLVED; - await this.update({ status }, { transaction: this.tx }); - return null; - } - - // TODO(optimize) - async saveJob(payload) { - const { database } = this.constructor; - const { model } = database.getCollection('jobs'); - const [job] = (await model.upsert( - { - ...payload, - executionId: this.id, - }, - { transaction: this.tx }, - )) as unknown as [JobModel, boolean | null]; - this.jobsMap.set(job.id, job); - this.jobsMapByNodeId[job.nodeId] = job.result; - - return job; - } - - // find the first node in current branch - findBranchStartNode(node: FlowNodeModel): FlowNodeModel | null { - for (let n = node; n; n = n.upstream) { - if (n.branchIndex !== null) { - return n; - } - } - return null; - } - - // find the node start current branch - findBranchParentNode(node: FlowNodeModel): FlowNodeModel | null { - for (let n = node; n; n = n.upstream) { - if (n.branchIndex !== null) { - return n.upstream; - } - } - return null; - } - - findBranchParentJob(job: JobModel, node: FlowNodeModel): JobModel | null { - for (let j = job; j; j = this.jobsMap.get(j.upstreamId)) { - if (j.nodeId === node.id) { - return j; - } - } - return null; - } - - public getParsedValue(value, node?) { - const injectedFns = {}; - const scope = { - execution: this, - node - }; - for (let [name, fn] of calculators.getEntities()) { - injectedFns[name] = fn.bind(scope); - } - - return parse(value)({ - $context: this.context, - $jobsMapByNodeId: this.jobsMapByNodeId, - $fn: injectedFns - }); - } } diff --git a/packages/plugins/workflow/src/models/Workflow.ts b/packages/plugins/workflow/src/models/Workflow.ts index 845c77b8b..3aca769de 100644 --- a/packages/plugins/workflow/src/models/Workflow.ts +++ b/packages/plugins/workflow/src/models/Workflow.ts @@ -1,10 +1,10 @@ -import { Database, Model } from '@nocobase/database'; +import { Database, Model, Op } from '@nocobase/database'; import { HasManyCountAssociationsMixin, HasManyCreateAssociationMixin, HasManyGetAssociationsMixin, Transactionable } from 'sequelize'; -import { EXECUTION_STATUS } from '../constants'; import ExecutionModel from './Execution'; import FlowNodeModel from './FlowNode'; + export default class WorkflowModel extends Model { declare static database: Database; @@ -30,76 +30,4 @@ export default class WorkflowModel extends Model { declare countExecutions: HasManyCountAssociationsMixin; declare getExecutions: HasManyGetAssociationsMixin; declare createExecution: HasManyCreateAssociationMixin; - - getTransaction(options) { - if (!this.useTransaction) { - return null; - } - - return options.transaction && !options.transaction.finished - ? options.transaction - : (this.constructor).database.sequelize.transaction(); - } - - trigger = async (context: Object, options = {}) => { - // `null` means not to trigger - if (context === null) { - return; - } - - const transaction = await this.getTransaction(options); - - if (this.useTransaction) { - const existed = await this.countExecutions({ - where: { - transaction: transaction.id - }, - transaction - }); - - if (existed) { - console.warn(`workflow ${this.id} has already been triggered in same execution (${transaction.id}), and newly triggering will be skipped.`); - return; - } - } - - const execution = await this.createExecution({ - context, - key: this.key, - status: EXECUTION_STATUS.STARTED, - useTransaction: this.useTransaction, - transaction: transaction.id - }, { transaction }); - - const executed = await this.countExecutions({ transaction }); - - // NOTE: not to trigger afterUpdate hook here - await this.update({ executed }, { transaction, hooks: false }); - - const allExecuted = await (execution.constructor).count({ - where: { - key: this.key - }, - transaction - }); - await (this.constructor).update({ - allExecuted - }, { - where: { - key: this.key - }, - transaction - }); - - execution.workflow = this; - - await execution.start({ transaction }); - - // @ts-ignore - if (transaction && (!options.transaction || options.transaction.finished)) { - await transaction.commit(); - } - - return execution; - } } diff --git a/packages/plugins/workflow/src/server.ts b/packages/plugins/workflow/src/server.ts deleted file mode 100644 index ac6481af9..000000000 --- a/packages/plugins/workflow/src/server.ts +++ /dev/null @@ -1,96 +0,0 @@ -import path from 'path'; - -import { Plugin } from '@nocobase/server'; -import { Op } from '@nocobase/database'; - -import WorkflowModel from './models/Workflow'; -import ExecutionModel from './models/Execution'; -import initActions from './actions'; -import initTriggers, { Trigger } from './triggers'; -import { Registry } from '@nocobase/utils'; - - - -export default class extends Plugin { - triggers: Registry = new Registry(); - - getName(): string { - return this.getPackageName(__dirname); - } - - async load(options = {}) { - const { db } = this.app; - - db.registerModels({ - WorkflowModel, - ExecutionModel, - }); - - await db.import({ - directory: path.resolve(__dirname, 'collections'), - }); - - initActions(this); - - initTriggers(this); - - db.on('workflows.beforeSave', this.setCurrent); - db.on('workflows.afterSave', (model: WorkflowModel) => this.toggle(model)); - db.on('workflows.afterDestroy', (model: WorkflowModel) => this.toggle(model, false)); - - // [Life Cycle]: - // * load all workflows in db - // * add all hooks for enabled workflows - // * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks - this.app.on('beforeStart', async () => { - const collection = db.getCollection('workflows'); - const workflows = await collection.repository.find({ - filter: { enabled: true }, - }); - - workflows.forEach((workflow: WorkflowModel) => { - this.toggle(workflow); - }); - }); - // [Life Cycle]: initialize all necessary seed data - // this.app.on('db.init', async () => {}); - } - - setCurrent = async (workflow: WorkflowModel, options) => { - const others: { enabled?: boolean, current?: boolean } = {}; - - if (workflow.enabled) { - workflow.set('current', true); - others.enabled = false; - } - - if (workflow.current) { - others.current = false; - await (workflow.constructor).update(others, { - where: { - key: workflow.key, - id: { - [Op.ne]: workflow.id - } - }, - individualHooks: true, - transaction: options.transaction - }); - } - } - - toggle(workflow: WorkflowModel, enable?: boolean) { - const type = workflow.get('type'); - const trigger = this.triggers.get(type); - if (typeof enable !== 'undefined' ? enable : workflow.get('enabled')) { - // NOTE: remove previous listener if config updated - const prev = workflow.previous(); - if (prev.config) { - trigger.off({ ...workflow.get(), ...prev }); - } - trigger.on(workflow); - } else { - trigger.off(workflow); - } - } -} diff --git a/packages/plugins/workflow/src/triggers/collection.ts b/packages/plugins/workflow/src/triggers/collection.ts index 1b5a4cd1d..f98eeda3b 100644 --- a/packages/plugins/workflow/src/triggers/collection.ts +++ b/packages/plugins/workflow/src/triggers/collection.ts @@ -1,5 +1,5 @@ import { Model } from "@nocobase/database"; -import { Trigger } from "."; +import Plugin, { Trigger } from ".."; import WorkflowModel from "../models/Workflow"; export interface CollectionChangeTriggerConfig { @@ -27,8 +27,8 @@ function getHookId(workflow, type) { } // async function, should return promise -async function handler(this: WorkflowModel, data: Model, options) { - const { collection, condition, changed } = this.config; +async function handler(this: CollectionTrigger, workflow: WorkflowModel, data: Model, options) { + const { collection, condition, changed } = workflow.config; // NOTE: if no configured fields changed, do not trigger if (changed && changed.length && changed.every(name => !data.changed(name))) { // TODO: temp comment out @@ -38,7 +38,7 @@ async function handler(this: WorkflowModel, data: Model, options) { if (condition && condition.$and?.length) { // TODO: change to map filter format to calculation format // const calculation = toCalculation(condition); - const { repository, model } = (this.constructor).database.getCollection(collection); + const { repository, model } = (data.constructor).database.getCollection(collection); const { transaction } = options; const count = await repository.count({ filter: { @@ -55,21 +55,18 @@ async function handler(this: WorkflowModel, data: Model, options) { } } - return this.trigger({ data: data.get() }, options); + return this.plugin.trigger(workflow, { data: data.get() }, { + transaction: options.transaction + }); } -export default class CollectionTrigger implements Trigger { - db; - +export default class CollectionTrigger extends Trigger { events = new Map(); - constructor({ app }) { - this.db = app.db; - } - on(workflow: WorkflowModel) { + const { db } = this.plugin.app; const { collection, mode } = workflow.config; - const Collection = this.db.getCollection(collection); + const Collection = db.getCollection(collection); if (!Collection) { return; } @@ -79,14 +76,14 @@ export default class CollectionTrigger implements Trigger { const name = getHookId(workflow, event); if (mode & key) { if (!this.events.has(name)) { - const listener = handler.bind(workflow); + const listener = handler.bind(this, workflow); this.events.set(name, listener); - this.db.on(event, listener); + db.on(event, listener); } } else { const listener = this.events.get(name); if (listener) { - this.db.off(event, listener); + db.off(event, listener); this.events.delete(name); } } @@ -94,8 +91,9 @@ export default class CollectionTrigger implements Trigger { } off(workflow: WorkflowModel) { + const { db } = this.plugin.app; const { collection, mode } = workflow.config; - const Collection = this.db.getCollection(collection); + const Collection = db.getCollection(collection); if (!Collection) { return; } @@ -105,7 +103,7 @@ export default class CollectionTrigger implements Trigger { if (mode & key) { const listener = this.events.get(name); if (listener) { - this.db.off(event, listener); + db.off(event, listener); this.events.delete(name); } } diff --git a/packages/plugins/workflow/src/triggers/index.ts b/packages/plugins/workflow/src/triggers/index.ts index 501e54668..69b83af8f 100644 --- a/packages/plugins/workflow/src/triggers/index.ts +++ b/packages/plugins/workflow/src/triggers/index.ts @@ -1,14 +1,24 @@ +import path from 'path'; +import { requireModule } from '@nocobase/utils'; + +import Plugin from '..'; import WorkflowModel from '../models/Workflow'; -import Collection from './collection'; -import Schedule from './schedule'; -export interface Trigger { - on(workflow: WorkflowModel): void; - off(workflow: WorkflowModel): void; + + +export abstract class Trigger { + constructor(public readonly plugin: Plugin) {} + abstract on(workflow: WorkflowModel): void; + abstract off(workflow: WorkflowModel): void; } -export default function(plugin) { +export default function(plugin, more: { [key: string]: { new(p: Plugin): T } } = {}) { const { triggers } = plugin; - triggers.register('collection', new Collection(plugin)); - triggers.register('schedule', new Schedule(plugin)); + + triggers.register('collection', new (requireModule(path.join(__dirname, 'collection')))(plugin)); + triggers.register('schedule', new (requireModule(path.join(__dirname, 'schedule')))(plugin)); + + for (const [name, TClass] of Object.entries(more)) { + triggers.register(name, new TClass(plugin)); + } } diff --git a/packages/plugins/workflow/src/triggers/schedule.ts b/packages/plugins/workflow/src/triggers/schedule.ts index 2b1e712aa..4d870668b 100644 --- a/packages/plugins/workflow/src/triggers/schedule.ts +++ b/packages/plugins/workflow/src/triggers/schedule.ts @@ -1,6 +1,6 @@ import parser from 'cron-parser'; -import { literal, Op } from 'sequelize'; -import { Trigger } from '.'; +import { literal, Op, where, fn } from 'sequelize'; +import Plugin, { Trigger } from '..'; export type ScheduleOnField = string | { field: string; @@ -67,7 +67,7 @@ ScheduleModes.set(SCHEDULE_MODE.CONSTANT, { return; } } - return workflow.trigger({ date }); + return this.plugin.trigger(workflow, { date }); } }); @@ -113,13 +113,13 @@ function getHookId(workflow, type) { const DialectTimestampFnMap: { [key: string]: Function } = { postgres(col) { - return `extract(epoch from "${col}")`; + return `CAST(FLOOR(extract(epoch from "${col}")) AS INTEGER)`; }, mysql(col) { - return `UNIX_TIMESTAMP(${col})`; + return `CAST(FLOOR(UNIX_TIMESTAMP(\`${col}\`)) AS SIGNED INTEGER)`; }, sqlite(col) { - return `unixepoch(${col})`; + return `CAST(FLOOR(unixepoch(${col})) AS INTEGER)`; } }; DialectTimestampFnMap.mariadb = DialectTimestampFnMap.mysql; @@ -160,7 +160,7 @@ ScheduleModes.set(SCHEDULE_MODE.COLLECTION_FIELD, { this.setCache(workflow); }; this.events.set(name, listener); - this.db.on(`${collection}.afterSave`, listener); + this.plugin.app.db.on(`${collection}.afterSave`, listener); } }, @@ -171,7 +171,7 @@ ScheduleModes.set(SCHEDULE_MODE.COLLECTION_FIELD, { if (this.events.has(name)) { const listener = this.events.get(name); this.events.delete(name); - this.db.off(`${collection}.afterSave`, listener); + this.plugin.app.db.off(`${collection}.afterSave`, listener); } }, @@ -189,17 +189,22 @@ ScheduleModes.set(SCHEDULE_MODE.COLLECTION_FIELD, { const conditions: any[] = [starts, ends].filter(item => Boolean(Object.keys(item).length)); // when repeat is number, means repeat after startsOn // (now - startsOn) % repeat <= cacheCycle - const tsFn = DialectTimestampFnMap[this.db.options.dialect]; + const { db } = this.plugin.app; + const tsFn = DialectTimestampFnMap[db.options.dialect]; if (repeat && typeof repeat === 'number' && repeat > this.cacheCycle && tsFn ) { const uts = now.getTime(); - conditions.push(literal(`mod(${uts} - ${tsFn(startsOn.field)} * 1000, ${repeat}) < ${this.cacheCycle}`)); + conditions.push(where( + fn('MOD', literal(`${Math.round(uts / 1000)} - ${tsFn(startsOn.field)}`), Math.round(repeat / 1000)), + { [Op.lt]: Math.round(this.cacheCycle / 1000) } + )); + // conditions.push(literal(`mod(${uts} - ${tsFn(startsOn.field)} * 1000, ${repeat}) < ${this.cacheCycle}`)); } - const { model } = this.db.getCollection(collection); + const { model } = db.getCollection(collection); const count = await model.count({ where: { [Op.and]: conditions } }); @@ -239,10 +244,13 @@ ScheduleModes.set(SCHEDULE_MODE.COLLECTION_FIELD, { } }); - const tsFn = DialectTimestampFnMap[this.db.options.dialect]; + const tsFn = DialectTimestampFnMap[this.plugin.app.db.options.dialect]; if (typeof repeat === 'number' && tsFn) { - const uts = timestamp; - conditions.push(literal(`mod(${uts} - floor(${tsFn(startsOn.field)}) * 1000, ${repeat}) = 0`)); + conditions.push(where( + fn('MOD', literal(`${Math.round(timestamp / 1000)} - ${tsFn(startsOn.field)}`), Math.round(repeat / 1000)), + { [Op.eq]: 0 } + )); + // conditions.push(literal(`MOD(CAST(${timestamp} AS BIGINT) - CAST((FLOOR(${tsFn(startsOn.field)}) AS BIGINT) * 1000), ${repeat}) = 0`)); } switch (typeof endsOn) { @@ -266,7 +274,7 @@ ScheduleModes.set(SCHEDULE_MODE.COLLECTION_FIELD, { } } - const { model } = this.db.getCollection(collection); + const { model } = this.plugin.app.db.getCollection(collection); const instances = await model.findAll({ where: { [Op.and]: conditions @@ -278,7 +286,7 @@ ScheduleModes.set(SCHEDULE_MODE.COLLECTION_FIELD, { } instances.forEach(item => { - workflow.trigger({ + this.plugin.trigger(workflow, { date, data: item.get() }); @@ -317,7 +325,7 @@ function nextInCycle(this: ScheduleTrigger, workflow, now: Date): boolean { return false; } -export default class ScheduleTrigger implements Trigger { +export default class ScheduleTrigger extends Trigger { static CacheRules = [ // ({ enabled }) => enabled, ({ config, allExecuted }) => config.limit ? allExecuted < config.limit : true, @@ -353,8 +361,6 @@ export default class ScheduleTrigger implements Trigger { } ]; - public readonly db; - events = new Map(); private timer: NodeJS.Timeout = null; @@ -366,10 +372,10 @@ export default class ScheduleTrigger implements Trigger { // caching workflows in range, default to 1min cacheCycle: number = 60_000; - constructor({ app }) { - this.db = app.db; + constructor(plugin: Plugin) { + super(plugin); - app.on('beforeStop', () => { + plugin.app.on('beforeStop', () => { if (this.timer) { clearInterval(this.timer); } @@ -413,7 +419,7 @@ export default class ScheduleTrigger implements Trigger { async onTick(now) { // NOTE: trigger workflows in sequence when sqlite due to only one transaction - const isSqlite = this.db.options.dialect === 'sqlite'; + const isSqlite = this.plugin.app.db.options.dialect === 'sqlite'; return Array.from(this.cache.values()).reduce((prev, workflow) => { if (!this.shouldTrigger(workflow, now)) { @@ -428,14 +434,14 @@ export default class ScheduleTrigger implements Trigger { } async reload() { - const WorkflowModel = this.db.getCollection('workflows').model; + const WorkflowModel = this.plugin.app.db.getCollection('workflows').model; const workflows = await WorkflowModel.findAll({ where: { enabled: true, type: 'schedule' }, include: [ { association: 'executions', attributes: ['id', 'createdAt'], - seperate: true, + separate: true, limit: 1, order: [['createdAt', 'DESC']], }