chore(plugin-workflow): migrate from 0.5 to 0.6

This commit is contained in:
mytharcher 2022-01-28 00:25:26 +08:00
parent 4249047318
commit e592d03f18
21 changed files with 236 additions and 140 deletions

View File

@ -1,16 +1,12 @@
{ {
"name": "@nocobase/plugin-workflow", "name": "@nocobase/plugin-workflow",
"version": "0.5.0-alpha.37", "version": "0.6.0-alpha.0",
"main": "lib/index.js", "main": "lib/index.js",
"private": true, "private": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@nocobase/server": "^0.5.0-alpha.37",
"json-templates": "^4.1.0",
"node-schedule": "^2.0.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node-schedule": "^1.3.1"
}, },
"gitHead": "f0b335ac30f29f25c95d7d137655fa64d8d67f1e" "gitHead": "f0b335ac30f29f25c95d7d137655fa64d8d67f1e"
} }

View File

@ -1,4 +1,4 @@
import { TableOptions } from '@nocobase/database'; import { CollectionOptions } from '@nocobase/database';
export default { export default {
name: 'posts', name: 'posts',
@ -12,4 +12,4 @@ export default {
name: 'published', name: 'published',
} }
] ]
} as TableOptions; } as CollectionOptions;

View File

@ -1,4 +1,4 @@
import { TableOptions } from '@nocobase/database'; import { CollectionOptions } from '@nocobase/database';
export default { export default {
name: 'targets', name: 'targets',
@ -12,4 +12,4 @@ export default {
name: 'col2', name: 'col2',
} }
], ],
} as TableOptions; } as CollectionOptions;

View File

@ -1,7 +1,7 @@
import { Application } from '@nocobase/server'; import { Application } from '@nocobase/server';
import Database from '@nocobase/database'; import Database from '@nocobase/database';
import { getApp } from '.'; import { getApp } from '.';
import { EXECUTION_STATUS, JOB_STATUS, LINK_TYPE } from '../constants'; import { BRANCH_INDEX, EXECUTION_STATUS, JOB_STATUS } from '../constants';
jest.setTimeout(300000); jest.setTimeout(300000);
@ -10,22 +10,28 @@ describe('execution', () => {
let db: Database; let db: Database;
let PostModel; let PostModel;
let WorkflowModel; let WorkflowModel;
let WorkflowRepository;
let workflow; let workflow;
beforeEach(async () => { beforeEach(async () => {
app = await getApp(); app = await getApp();
db = app.db; db = app.db;
WorkflowModel = db.getModel('workflows'); WorkflowRepository = db.getCollection('workflows').repository;
PostModel = db.getModel('posts'); WorkflowModel = db.getCollection('workflows').model;
// Target = db.getModel('targets'); PostModel = db.getCollection('posts').model;
workflow = await WorkflowModel.create({ // TODO(question): why the hooks of creating workflow won't run by using `WorkflowModel.create()`?
title: 'test workflow', // maybe the model is not the original defined one which hooks have been added.
enabled: true, // @see database/../collections.ts@L99: `this.model = class extends M {};`
type: 'afterCreate', workflow = await WorkflowRepository.create({
config: { values: {
collection: 'posts' title: 'condition workflow',
enabled: true,
type: 'afterCreate',
config: {
collection: 'posts'
}
} }
}); });
}); });
@ -86,7 +92,7 @@ describe('execution', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'echo 2', title: 'echo 2',
type: 'echo', type: 'echo',
upstream_id: n1.id upstreamId: n1.id
}); });
await n1.setDownstream(n2); await n1.setDownstream(n2);
@ -133,7 +139,7 @@ describe('execution', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'echo', title: 'echo',
type: 'echo', type: 'echo',
upstream_id: n1.id upstreamId: n1.id
}); });
await n1.setDownstream(n2); await n1.setDownstream(n2);
@ -166,7 +172,7 @@ describe('execution', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'echo', title: 'echo',
type: 'echo', type: 'echo',
upstream_id: n1.id upstreamId: n1.id
}); });
await n1.setDownstream(n2); await n1.setDownstream(n2);
@ -200,15 +206,15 @@ describe('execution', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'true to echo', title: 'true to echo',
type: 'echo', type: 'echo',
linkType: LINK_TYPE.ON_TRUE, branchIndex: BRANCH_INDEX.ON_TRUE,
upstream_id: n1.id upstreamId: n1.id
}); });
await workflow.createNode({ await workflow.createNode({
title: 'false to echo', title: 'false to echo',
type: 'echo', type: 'echo',
linkType: LINK_TYPE.ON_FALSE, branchIndex: BRANCH_INDEX.ON_FALSE,
upstream_id: n1.id upstreamId: n1.id
}); });
const post = await PostModel.create({ title: 't1' }); const post = await PostModel.create({ title: 't1' });
@ -218,8 +224,8 @@ describe('execution', () => {
const jobs = await execution.getJobs({ order: [['id', 'ASC']] }); const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
expect(jobs.length).toEqual(2); expect(jobs.length).toEqual(2);
expect(jobs[0].node_id).toEqual(n1.id); expect(jobs[0].nodeId).toEqual(n1.id);
expect(jobs[1].node_id).toEqual(n2.id); expect(jobs[1].nodeId).toEqual(n2.id);
expect(jobs[1].result).toEqual(true); expect(jobs[1].result).toEqual(true);
}); });
@ -233,14 +239,14 @@ describe('execution', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'manual', title: 'manual',
type: 'prompt', type: 'prompt',
linkType: LINK_TYPE.ON_TRUE, branchIndex: BRANCH_INDEX.ON_TRUE,
upstream_id: n1.id upstreamId: n1.id
}); });
const n3 = await workflow.createNode({ const n3 = await workflow.createNode({
title: 'echo input value', title: 'echo input value',
type: 'echo', type: 'echo',
upstream_id: n1.id upstreamId: n1.id
}); });
await n1.setDownstream(n3); await n1.setDownstream(n3);
@ -250,7 +256,7 @@ describe('execution', () => {
const [execution] = await workflow.getExecutions(); const [execution] = await workflow.getExecutions();
expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); expect(execution.status).toEqual(EXECUTION_STATUS.STARTED);
const [pending] = await execution.getJobs({ node_id: n2.id }); const [pending] = await execution.getJobs({ nodeId: n2.id });
pending.set('result', 123); pending.set('result', 123);
await execution.resume(pending); await execution.resume(pending);
@ -268,14 +274,14 @@ describe('execution', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'manual', title: 'manual',
type: 'prompt->error', type: 'prompt->error',
linkType: LINK_TYPE.ON_TRUE, branchIndex: BRANCH_INDEX.ON_TRUE,
upstream_id: n1.id upstreamId: n1.id
}); });
const n3 = await workflow.createNode({ const n3 = await workflow.createNode({
title: 'echo input value', title: 'echo input value',
type: 'echo', type: 'echo',
upstream_id: n1.id upstreamId: n1.id
}); });
await n1.setDownstream(n3); await n1.setDownstream(n3);
@ -285,7 +291,7 @@ describe('execution', () => {
const [execution] = await workflow.getExecutions(); const [execution] = await workflow.getExecutions();
expect(execution.status).toEqual(EXECUTION_STATUS.STARTED); expect(execution.status).toEqual(EXECUTION_STATUS.STARTED);
const [pending] = await execution.getJobs({ node_id: n2.id }); const [pending] = await execution.getJobs({ nodeId: n2.id });
pending.set('result', 123); pending.set('result', 123);
await execution.resume(pending); await execution.resume(pending);
expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED); expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED);

View File

@ -39,7 +39,7 @@ export async function getApp(options = {}): Promise<MockServer> {
await app.load(); await app.load();
app.db.import({ await app.db.import({
directory: path.resolve(__dirname, './collections') directory: path.resolve(__dirname, './collections')
}); });

View File

@ -1,7 +1,7 @@
import { Application } from '@nocobase/server'; import { Application } from '@nocobase/server';
import Database from '@nocobase/database'; import Database from '@nocobase/database';
import { getApp } from '..'; import { getApp } from '..';
import { EXECUTION_STATUS, JOB_STATUS, LINK_TYPE } from '../../constants'; import { EXECUTION_STATUS, BRANCH_INDEX } from '../../constants';
@ -10,21 +10,25 @@ describe('workflow > instructions > condition', () => {
let db: Database; let db: Database;
let PostModel; let PostModel;
let WorkflowModel; let WorkflowModel;
let WorkflowRepository;
let workflow; let workflow;
beforeEach(async () => { beforeEach(async () => {
app = await getApp(); app = await getApp();
db = app.db; db = app.db;
WorkflowModel = db.getModel('workflows'); WorkflowRepository = db.getCollection('workflows').repository;
PostModel = db.getModel('posts'); WorkflowModel = db.getCollection('workflows').model;
PostModel = db.getCollection('posts').model;
workflow = await WorkflowModel.create({ workflow = await WorkflowRepository.create({
title: 'condition workflow', values: {
enabled: true, title: 'condition workflow',
type: 'afterCreate', enabled: true,
config: { type: 'afterCreate',
collection: 'posts' config: {
collection: 'posts'
}
} }
}); });
}); });
@ -53,15 +57,15 @@ describe('workflow > instructions > condition', () => {
const n2 = await workflow.createNode({ const n2 = await workflow.createNode({
title: 'true to echo', title: 'true to echo',
type: 'echo', type: 'echo',
linkType: LINK_TYPE.ON_TRUE, branchIndex: BRANCH_INDEX.ON_TRUE,
upstream_id: n1.id upstreamId: n1.id
}); });
const n3 = await workflow.createNode({ const n3 = await workflow.createNode({
title: 'false to echo', title: 'false to echo',
type: 'echo', type: 'echo',
linkType: LINK_TYPE.ON_FALSE, branchIndex: BRANCH_INDEX.ON_FALSE,
upstream_id: n1.id upstreamId: n1.id
}); });
const post = await PostModel.create({ title: 't1' }); const post = await PostModel.create({ title: 't1' });
@ -99,15 +103,15 @@ describe('workflow > instructions > condition', () => {
await workflow.createNode({ await workflow.createNode({
title: 'true to echo', title: 'true to echo',
type: 'echo', type: 'echo',
linkType: LINK_TYPE.ON_TRUE, branchIndex: BRANCH_INDEX.ON_TRUE,
upstream_id: n1.id upstreamId: n1.id
}); });
await workflow.createNode({ await workflow.createNode({
title: 'false to echo', title: 'false to echo',
type: 'echo', type: 'echo',
linkType: LINK_TYPE.ON_FALSE, branchIndex: BRANCH_INDEX.ON_FALSE,
upstream_id: n1.id upstreamId: n1.id
}); });
const post = await PostModel.create({ title: 't1' }); const post = await PostModel.create({ title: 't1' });

View File

@ -1,4 +1,4 @@
import { TableOptions } from '@nocobase/database'; import { CollectionOptions } from '@nocobase/database';
export default { export default {
name: 'executions', name: 'executions',
@ -30,4 +30,4 @@ export default {
title: '状态' title: '状态'
} }
] ]
} as TableOptions; } as CollectionOptions;

View File

@ -1,5 +1,4 @@
import { TableOptions } from '@nocobase/database'; import { CollectionOptions } from '@nocobase/database';
import { LINK_TYPE } from '../constants';
export default { export default {
name: 'flow_nodes', name: 'flow_nodes',
@ -40,18 +39,11 @@ export default {
// only works when upstream node is branching type, like condition and parallel. // only works when upstream node is branching type, like condition and parallel.
// put here because the design of flow-links model is not really necessary for now. // put here because the design of flow-links model is not really necessary for now.
// or it should be put into flow-links model. // or it should be put into flow-links model.
// if keeps 1:n relactionship, cannot support cycle flow.
{ {
interface: 'select', interface: 'select',
name: 'linkType', name: 'branchIndex',
type: 'smallint', type: 'integer',
title: 'Link Type', title: 'branch index'
dataSource: [
{ label: 'Default', value: LINK_TYPE.DEFAULT },
{ label: 'Branched, on true', value: LINK_TYPE.ON_TRUE },
{ label: 'Branched, on false', value: LINK_TYPE.ON_FALSE },
{ label: 'Branched, no limit', value: LINK_TYPE.NO_LIMIT }
]
}, },
// for reasons: // for reasons:
// 1. redirect type node to solve cycle flow. // 1. redirect type node to solve cycle flow.
@ -83,4 +75,4 @@ export default {
defaultValue: {} defaultValue: {}
} }
] ]
} as TableOptions; } as CollectionOptions;

View File

@ -1,4 +1,4 @@
import { TableOptions } from '@nocobase/database'; import { CollectionOptions } from '@nocobase/database';
export default { export default {
name: 'jobs', name: 'jobs',
@ -45,4 +45,4 @@ export default {
// title: 'node snapshot' // title: 'node snapshot'
// } // }
] ]
} as TableOptions; } as CollectionOptions;

View File

@ -1,4 +1,4 @@
import { TableOptions } from '@nocobase/database'; import { CollectionOptions } from '@nocobase/database';
export default { export default {
name: 'workflows', name: 'workflows',
@ -53,4 +53,4 @@ export default {
title: '触发执行' title: '触发执行'
} }
] ]
} as TableOptions; } as CollectionOptions;

View File

@ -12,9 +12,8 @@ export const JOB_STATUS = {
CANCELLED: -2 CANCELLED: -2
}; };
export const LINK_TYPE = { export const BRANCH_INDEX = {
DEFAULT: null, DEFAULT: null,
ON_TRUE: 1, ON_TRUE: 1,
ON_FALSE: 0, ON_FALSE: 0
NO_LIMIT: -1
}; };

View File

@ -82,12 +82,12 @@ export default {
status: JOB_STATUS.RESOLVED, status: JOB_STATUS.RESOLVED,
result, result,
// TODO(optimize): try unify the building of job // TODO(optimize): try unify the building of job
node_id: this.id, nodeId: this.id,
upstream_id: prevJob instanceof Sequelize.Model ? prevJob.get('id') : null upstreamId: prevJob instanceof Sequelize.Model ? prevJob.get('id') : null
}; };
const branchNode = execution.nodes const branchNode = execution.nodes
.find(item => item.upstream === this && item.linkType === Number(result)); .find(item => item.upstream === this && Boolean(item.branchIndex) === result);
if (!branchNode) { if (!branchNode) {
return job; return job;

View File

@ -1,7 +1,5 @@
// something like template for type of nodes import ExecutionModel from "../models/Execution";
import FlowNodeModel from "../models/FlowNode";
import { ModelCtor, Model } from "@nocobase/database";
import { ExecutionModel } from "../models/Execution";
import prompt from './prompt'; import prompt from './prompt';
import condition from './condition'; import condition from './condition';
@ -19,13 +17,13 @@ export type InstructionResult = Job | Promise<Job>;
// - base on input and context, do any calculations or system call (io), and produce a result or pending. // - base on input and context, do any calculations or system call (io), and produce a result or pending.
export interface Instruction { export interface Instruction {
run( run(
this: ModelCtor<Model>, this: FlowNodeModel,
// what should input to be? // what should input to be?
// - just use previously output result for convenience? // - just use previously output result for convenience?
input: any, input: any,
// what should context to be? // what should context to be?
// - could be the workflow execution object (containing context data) // - could be the workflow execution object (containing context data)
execution: ModelCtor<ExecutionModel> execution: ExecutionModel
): InstructionResult; ): InstructionResult;
// for start node in main flow (or branch) to resume when manual sub branch triggered // for start node in main flow (or branch) to resume when manual sub branch triggered
resume?(): InstructionResult resume?(): InstructionResult

View File

@ -1,13 +1,50 @@
import Sequelize from 'sequelize'; import {
import { Model, ModelCtor } from '@nocobase/database'; Model,
BelongsToGetAssociationMixin,
Optional,
HasManyGetAssociationsMixin
} from 'sequelize';
import Database from '@nocobase/database';
import { EXECUTION_STATUS, JOB_STATUS } from '../constants'; import { EXECUTION_STATUS, JOB_STATUS } from '../constants';
import { getInstruction } from '../instructions'; import { getInstruction } from '../instructions';
import WorkflowModel from './Workflow';
import FlowNodeModel from './FlowNode';
import JobModel from './Job';
export class ExecutionModel extends Model { interface ExecutionAttributes {
nodes: Array<any> = []; id: number;
nodesMap = new Map(); title: string;
jobsMap = new Map(); context: any;
status: number;
}
interface ExecutionCreationAttributes extends Optional<ExecutionAttributes, 'id'> {}
export default class ExecutionModel
extends Model<ExecutionAttributes, ExecutionCreationAttributes>
implements ExecutionAttributes {
declare readonly database: Database;
declare id: number;
declare title: string;
declare context: any;
declare status: number;
declare createdAt: Date;
declare updatedAt: Date;
declare workflow?: WorkflowModel;
declare getWorkflow: BelongsToGetAssociationMixin<WorkflowModel>;
declare jobs?: JobModel[];
declare getJobs: HasManyGetAssociationsMixin<JobModel>;
nodes: Array<FlowNodeModel> = [];
nodesMap = new Map<number, FlowNodeModel>();
jobsMap = new Map<number, JobModel>();
// make dual linked nodes list then cache // make dual linked nodes list then cache
makeNodes(nodes = []) { makeNodes(nodes = []) {
@ -18,17 +55,17 @@ export class ExecutionModel extends Model {
}); });
nodes.forEach(node => { nodes.forEach(node => {
if (node.upstream_id) { if (node.upstreamId) {
node.upstream = this.nodesMap.get(node.upstream_id); node.upstream = this.nodesMap.get(node.upstreamId);
} }
if (node.downstream_id) { if (node.downstreamId) {
node.downstream = this.nodesMap.get(node.downstream_id); node.downstream = this.nodesMap.get(node.downstreamId);
} }
}); });
} }
makeJobs(jobs: Array<ModelCtor<Model>>) { makeJobs(jobs: Array<JobModel>) {
jobs.forEach(job => { jobs.forEach(job => {
this.jobsMap.set(job.id, job); this.jobsMap.set(job.id, job);
}); });
@ -55,7 +92,7 @@ export class ExecutionModel extends Model {
async start(options) { async start(options) {
await this.prepare(); await this.prepare();
if (!this.nodes.length) { if (!this.nodes.length) {
return this.exit(null); return this.exit();
} }
const head = this.nodes.find(item => !item.upstream); const head = this.nodes.find(item => !item.upstream);
return this.exec(head, { result: this.context }); return this.exec(head, { result: this.context });
@ -63,7 +100,7 @@ export class ExecutionModel extends Model {
async resume(job, options) { async resume(job, options) {
await this.prepare(); await this.prepare();
const node = this.nodesMap.get(job.node_id); const node = this.nodesMap.get(job.nodeId);
return this.recall(node, job); return this.recall(node, job);
} }
@ -79,7 +116,7 @@ export class ExecutionModel extends Model {
status: JOB_STATUS.REJECTED status: JOB_STATUS.REJECTED
}; };
// if previous job is from resuming // if previous job is from resuming
if (prevJob && prevJob.node_id === node.id) { if (prevJob && prevJob.nodeId === node.id) {
prevJob.set(job); prevJob.set(job);
job = prevJob; job = prevJob;
} }
@ -88,13 +125,13 @@ export class ExecutionModel extends Model {
let savedJob; let savedJob;
// TODO(optimize): many checking of resuming or new could be improved // TODO(optimize): many checking of resuming or new could be improved
// could be implemented separately in exec() / resume() // could be implemented separately in exec() / resume()
if (job instanceof Sequelize.Model) { if (job instanceof Model) {
savedJob = await job.save(); savedJob = await job.save();
} else { } else {
const upstream_id = prevJob instanceof Sequelize.Model ? prevJob.get('id') : null; const upstreamId = prevJob instanceof Model ? prevJob.get('id') : null;
savedJob = await this.saveJob({ savedJob = await this.saveJob({
node_id: node.id, nodeId: node.id,
upstream_id, upstreamId,
...job ...job
}); });
} }
@ -136,7 +173,7 @@ export class ExecutionModel extends Model {
return this.run(resume, node, job); return this.run(resume, node, job);
} }
async exit(job) { async exit(job?: JobModel) {
const executionStatusMap = { const executionStatusMap = {
[JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED, [JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED,
[JOB_STATUS.RESOLVED]: EXECUTION_STATUS.RESOLVED, [JOB_STATUS.RESOLVED]: EXECUTION_STATUS.RESOLVED,
@ -150,29 +187,30 @@ export class ExecutionModel extends Model {
// TODO(optimize) // TODO(optimize)
async saveJob(payload) { async saveJob(payload) {
const JobModel = this.database.getModel('jobs'); // @ts-ignore
const [result] = await JobModel.upsert({ const { database } = this.constructor;
const { model } = database.getCollection('jobs');
const [result] = await model.upsert({
...payload, ...payload,
execution_id: this.id executionId: this.id
}); }) as [JobModel, boolean | null];
this.jobsMap.set(result.id, result); this.jobsMap.set(result.id, result);
return result; return result;
} }
findBranchParentNode(node): any { findBranchParentNode(node: FlowNodeModel): FlowNodeModel | null {
for (let n = node; n; n = n.upstream) { for (let n = node; n; n = n.upstream) {
if (n.linkType !== null) { if (n.branchIndex !== null) {
return n.upstream; return n.upstream;
} }
} }
return null; return null;
} }
findBranchParentJob(job, node) { findBranchParentJob(job: JobModel, node: FlowNodeModel): JobModel | null {
for (let j = job; j; j = this.jobsMap.get(j.upstream_id)) { for (let j = job; j; j = this.jobsMap.get(j.upstreamId)) {
if (j.node_id === node.id) { if (j.nodeId === node.id) {
return j; return j;
} }
} }

View File

@ -0,0 +1,19 @@
import { Model, BelongsToGetAssociationMixin } from 'sequelize';
import WorkflowModel from './Workflow';
export default class FlowNodeModel extends Model {
declare id: number;
declare title: string;
declare branchIndex: null | number;
declare type: string;
declare config: any;
declare createdAt: Date;
declare updatedAt: Date;
declare upstream: FlowNodeModel;
declare downstream: FlowNodeModel;
declare workflow?: WorkflowModel;
declare getWorkflow: BelongsToGetAssociationMixin<WorkflowModel>;
}

View File

@ -0,0 +1,18 @@
import { Model, BelongsToGetAssociationMixin } from 'sequelize';
import FlowNodeModel from './FlowNode';
export default class JobModel extends Model {
declare id: number;
declare status: number;
declare result: any;
declare createdAt: Date;
declare updatedAt: Date;
declare upstreamId: number;
declare upstream: JobModel;
declare nodeId: number;
declare node?: FlowNodeModel;
declare getNode: BelongsToGetAssociationMixin<FlowNodeModel>;
}

View File

@ -1,15 +1,41 @@
import { Model } from '@nocobase/database'; import { Model, HasManyGetAssociationsMixin, HasManyCreateAssociationMixin } from 'sequelize';
import Database from '@nocobase/database';
import { get as getTrigger } from '../triggers'; import { get as getTrigger } from '../triggers';
import { EXECUTION_STATUS } from '../constants'; import { EXECUTION_STATUS } from '../constants';
import ExecutionModel from './Execution';
import FlowNodeModel from './FlowNode';
export default class WorkflowModel extends Model {
declare static database: Database;
declare id: number;
declare title: string;
declare enabled: boolean;
declare description?: string;
declare type: string;
declare config: any;
declare createdAt: Date;
declare updatedAt: Date;
declare nodes: FlowNodeModel[];
declare getNodes: HasManyGetAssociationsMixin<FlowNodeModel>;
declare createNode: HasManyCreateAssociationMixin<FlowNodeModel>;
declare executions: ExecutionModel[];
declare getExecutions: HasManyGetAssociationsMixin<ExecutionModel>;
declare createExecution: HasManyCreateAssociationMixin<ExecutionModel>;
export class WorkflowModel extends Model {
static async mount() { static async mount() {
const workflows = await this.findAll({ const collection = this.database.getCollection('workflows');
where: { enabled: true } const workflows = await collection.repository.find({
filter: { enabled: true }
}); });
workflows.forEach(workflow => { workflows.forEach(workflow => {
// @ts-ignore
workflow.mount(); workflow.mount();
}); });
@ -42,10 +68,10 @@ export class WorkflowModel extends Model {
context, context,
status: EXECUTION_STATUS.STARTED status: EXECUTION_STATUS.STARTED
}); });
execution.setDataValue('workflow', this);
execution.workflow = this; execution.workflow = this;
await execution.start(null, null, options); await execution.start(options);
return execution; return execution;
} }
} }

View File

@ -1,21 +1,19 @@
import path from 'path'; import path from 'path';
import { registerModels } from '@nocobase/database'; import WorkflowModel from './models/Workflow';
import ExecutionModel from './models/Execution';
import { WorkflowModel } from './models/Workflow';
import { ExecutionModel } from './models/Execution';
export default { export default {
name: 'workflow', name: 'workflow',
async load(options = {}) { async load(options = {}) {
const { db } = this.app; const { db } = this.app;
registerModels({ db.registerModels({
WorkflowModel, WorkflowModel,
ExecutionModel, ExecutionModel,
}); });
db.import({ await db.import({
directory: path.resolve(__dirname, 'collections'), directory: path.resolve(__dirname, 'collections'),
}); });
@ -24,8 +22,8 @@ export default {
// * add all hooks for enabled workflows // * add all hooks for enabled workflows
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks // * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
this.app.on('beforeStart', async () => { this.app.on('beforeStart', async () => {
const Workflow = db.getModel('workflows'); const { model } = db.getCollection('workflows');
await Workflow.mount(); await model.mount();
}) })
// [Life Cycle]: initialize all necessary seed data // [Life Cycle]: initialize all necessary seed data

View File

@ -1,10 +1,14 @@
import WorkflowModel from "../models/Workflow";
export interface IDataChangeTriggerConfig { export interface IDataChangeTriggerConfig {
collection: string; collection: string;
// TODO: ICondition // TODO: ICondition
filter: any; filter: any;
} }
export function afterCreate(config: IDataChangeTriggerConfig, callback: Function) { export function afterCreate(this: WorkflowModel, config: IDataChangeTriggerConfig, callback: Function) {
const Model = this.database.getModel(config.collection); // @ts-ignore
Model.addHook('afterCreate', `workflow-${this.get('id')}`, (data: typeof Model, options) => callback({ data }, options)); const { database } = this.constructor;
const { model } = database.getCollection(config.collection);
model.addHook('afterCreate', `workflow-${this.get('id')}`, (data: any, options) => callback({ data }, options));
} }

View File

@ -1,9 +1,8 @@
import { ModelCtor } from '@nocobase/database'; import WorkflowModel from '../models/Workflow';
import { WorkflowModel } from '../models/Workflow';
import * as dataChangeTriggers from './data-change'; import * as dataChangeTriggers from './data-change';
export interface ITrigger { export interface ITrigger {
(this: ModelCtor<WorkflowModel>, config: any): void (this: WorkflowModel, config: any): void
} }
const triggers = new Map<string, ITrigger>(); const triggers = new Map<string, ITrigger>();

View File

@ -1,7 +1,6 @@
import { get } from 'lodash'; import { get } from 'lodash';
import { ModelCtor } from '@nocobase/database'; import ExecutionModel from '../models/Execution';
import { ExecutionModel } from '../models/Execution';
export type OperandType = 'context' | 'input' | 'job'; export type OperandType = 'context' | 'input' | 'job';
@ -36,7 +35,7 @@ export type JobOperand = {
export type Operand = ContextOperand | InputOperand | JobOperand | ConstantOperand; export type Operand = ContextOperand | InputOperand | JobOperand | ConstantOperand;
// TODO: other instructions may also use this method, could be moved to utils. // TODO: other instructions may also use this method, could be moved to utils.
export function getValue(operand: Operand, input: any, execution: ModelCtor<ExecutionModel>) { export function getValue(operand: Operand, input: any, execution: ExecutionModel) {
switch (operand.type) { switch (operand.type) {
// from execution context // from execution context
case 'context': case 'context':
@ -47,7 +46,7 @@ export function getValue(operand: Operand, input: any, execution: ModelCtor<Exec
// from job in execution // from job in execution
case 'job': case 'job':
// assume jobs have been fetched from execution before // assume jobs have been fetched from execution before
const job = execution.jobs.find(item => item.id === operand.options.id); const job = execution.jobsMap.get(operand.options.id);
return get(job, operand.options.path); return get(job, operand.options.path);
// constant // constant
default: default: