chore(plugin-workflow): migrate from 0.5 to 0.6
This commit is contained in:
parent
4249047318
commit
e592d03f18
@ -1,16 +1,12 @@
|
||||
{
|
||||
"name": "@nocobase/plugin-workflow",
|
||||
"version": "0.5.0-alpha.37",
|
||||
"version": "0.6.0-alpha.0",
|
||||
"main": "lib/index.js",
|
||||
"private": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@nocobase/server": "^0.5.0-alpha.37",
|
||||
"json-templates": "^4.1.0",
|
||||
"node-schedule": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node-schedule": "^1.3.1"
|
||||
},
|
||||
"gitHead": "f0b335ac30f29f25c95d7d137655fa64d8d67f1e"
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TableOptions } from '@nocobase/database';
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'posts',
|
||||
@ -12,4 +12,4 @@ export default {
|
||||
name: 'published',
|
||||
}
|
||||
]
|
||||
} as TableOptions;
|
||||
} as CollectionOptions;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TableOptions } from '@nocobase/database';
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'targets',
|
||||
@ -12,4 +12,4 @@ export default {
|
||||
name: 'col2',
|
||||
}
|
||||
],
|
||||
} as TableOptions;
|
||||
} as CollectionOptions;
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '.';
|
||||
import { EXECUTION_STATUS, JOB_STATUS, LINK_TYPE } from '../constants';
|
||||
import { BRANCH_INDEX, EXECUTION_STATUS, JOB_STATUS } from '../constants';
|
||||
|
||||
jest.setTimeout(300000);
|
||||
|
||||
@ -10,22 +10,28 @@ describe('execution', () => {
|
||||
let db: Database;
|
||||
let PostModel;
|
||||
let WorkflowModel;
|
||||
let WorkflowRepository;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getModel('workflows');
|
||||
PostModel = db.getModel('posts');
|
||||
// Target = db.getModel('targets');
|
||||
WorkflowRepository = db.getCollection('workflows').repository;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'afterCreate',
|
||||
config: {
|
||||
collection: 'posts'
|
||||
// TODO(question): why the hooks of creating workflow won't run by using `WorkflowModel.create()`?
|
||||
// maybe the model is not the original defined one which hooks have been added.
|
||||
// @see database/../collections.ts@L99: `this.model = class extends M {};`
|
||||
workflow = await WorkflowRepository.create({
|
||||
values: {
|
||||
title: 'condition workflow',
|
||||
enabled: true,
|
||||
type: 'afterCreate',
|
||||
config: {
|
||||
collection: 'posts'
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -86,7 +92,7 @@ describe('execution', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo 2',
|
||||
type: 'echo',
|
||||
upstream_id: n1.id
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n2);
|
||||
@ -133,7 +139,7 @@ describe('execution', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo',
|
||||
upstream_id: n1.id
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n2);
|
||||
@ -166,7 +172,7 @@ describe('execution', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'echo',
|
||||
type: 'echo',
|
||||
upstream_id: n1.id
|
||||
upstreamId: n1.id
|
||||
});
|
||||
await n1.setDownstream(n2);
|
||||
|
||||
@ -200,15 +206,15 @@ describe('execution', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'true to echo',
|
||||
type: 'echo',
|
||||
linkType: LINK_TYPE.ON_TRUE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await workflow.createNode({
|
||||
title: 'false to echo',
|
||||
type: 'echo',
|
||||
linkType: LINK_TYPE.ON_FALSE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_FALSE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
@ -218,8 +224,8 @@ describe('execution', () => {
|
||||
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs.length).toEqual(2);
|
||||
expect(jobs[0].node_id).toEqual(n1.id);
|
||||
expect(jobs[1].node_id).toEqual(n2.id);
|
||||
expect(jobs[0].nodeId).toEqual(n1.id);
|
||||
expect(jobs[1].nodeId).toEqual(n2.id);
|
||||
expect(jobs[1].result).toEqual(true);
|
||||
});
|
||||
|
||||
@ -233,14 +239,14 @@ describe('execution', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'manual',
|
||||
type: 'prompt',
|
||||
linkType: LINK_TYPE.ON_TRUE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo input value',
|
||||
type: 'echo',
|
||||
upstream_id: n1.id
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n3);
|
||||
@ -250,7 +256,7 @@ describe('execution', () => {
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.STARTED);
|
||||
|
||||
const [pending] = await execution.getJobs({ node_id: n2.id });
|
||||
const [pending] = await execution.getJobs({ nodeId: n2.id });
|
||||
pending.set('result', 123);
|
||||
await execution.resume(pending);
|
||||
|
||||
@ -268,14 +274,14 @@ describe('execution', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'manual',
|
||||
type: 'prompt->error',
|
||||
linkType: LINK_TYPE.ON_TRUE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'echo input value',
|
||||
type: 'echo',
|
||||
upstream_id: n1.id
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await n1.setDownstream(n3);
|
||||
@ -285,7 +291,7 @@ describe('execution', () => {
|
||||
const [execution] = await workflow.getExecutions();
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.STARTED);
|
||||
|
||||
const [pending] = await execution.getJobs({ node_id: n2.id });
|
||||
const [pending] = await execution.getJobs({ nodeId: n2.id });
|
||||
pending.set('result', 123);
|
||||
await execution.resume(pending);
|
||||
expect(execution.status).toEqual(EXECUTION_STATUS.REJECTED);
|
||||
|
@ -36,10 +36,10 @@ export async function getApp(options = {}): Promise<MockServer> {
|
||||
throw new Error('input failed');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
await app.load();
|
||||
|
||||
app.db.import({
|
||||
|
||||
await app.db.import({
|
||||
directory: path.resolve(__dirname, './collections')
|
||||
});
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '..';
|
||||
import { EXECUTION_STATUS, JOB_STATUS, LINK_TYPE } from '../../constants';
|
||||
import { EXECUTION_STATUS, BRANCH_INDEX } from '../../constants';
|
||||
|
||||
|
||||
|
||||
@ -10,21 +10,25 @@ describe('workflow > instructions > condition', () => {
|
||||
let db: Database;
|
||||
let PostModel;
|
||||
let WorkflowModel;
|
||||
let WorkflowRepository;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getModel('workflows');
|
||||
PostModel = db.getModel('posts');
|
||||
WorkflowRepository = db.getCollection('workflows').repository;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'condition workflow',
|
||||
enabled: true,
|
||||
type: 'afterCreate',
|
||||
config: {
|
||||
collection: 'posts'
|
||||
workflow = await WorkflowRepository.create({
|
||||
values: {
|
||||
title: 'condition workflow',
|
||||
enabled: true,
|
||||
type: 'afterCreate',
|
||||
config: {
|
||||
collection: 'posts'
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -53,15 +57,15 @@ describe('workflow > instructions > condition', () => {
|
||||
const n2 = await workflow.createNode({
|
||||
title: 'true to echo',
|
||||
type: 'echo',
|
||||
linkType: LINK_TYPE.ON_TRUE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
title: 'false to echo',
|
||||
type: 'echo',
|
||||
linkType: LINK_TYPE.ON_FALSE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_FALSE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
@ -99,15 +103,15 @@ describe('workflow > instructions > condition', () => {
|
||||
await workflow.createNode({
|
||||
title: 'true to echo',
|
||||
type: 'echo',
|
||||
linkType: LINK_TYPE.ON_TRUE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
await workflow.createNode({
|
||||
title: 'false to echo',
|
||||
type: 'echo',
|
||||
linkType: LINK_TYPE.ON_FALSE,
|
||||
upstream_id: n1.id
|
||||
branchIndex: BRANCH_INDEX.ON_FALSE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TableOptions } from '@nocobase/database';
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'executions',
|
||||
@ -30,4 +30,4 @@ export default {
|
||||
title: '状态'
|
||||
}
|
||||
]
|
||||
} as TableOptions;
|
||||
} as CollectionOptions;
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { TableOptions } from '@nocobase/database';
|
||||
import { LINK_TYPE } from '../constants';
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'flow_nodes',
|
||||
@ -40,18 +39,11 @@ export default {
|
||||
// only works when upstream node is branching type, like condition and parallel.
|
||||
// put here because the design of flow-links model is not really necessary for now.
|
||||
// or it should be put into flow-links model.
|
||||
// if keeps 1:n relactionship, cannot support cycle flow.
|
||||
{
|
||||
interface: 'select',
|
||||
name: 'linkType',
|
||||
type: 'smallint',
|
||||
title: 'Link Type',
|
||||
dataSource: [
|
||||
{ label: 'Default', value: LINK_TYPE.DEFAULT },
|
||||
{ label: 'Branched, on true', value: LINK_TYPE.ON_TRUE },
|
||||
{ label: 'Branched, on false', value: LINK_TYPE.ON_FALSE },
|
||||
{ label: 'Branched, no limit', value: LINK_TYPE.NO_LIMIT }
|
||||
]
|
||||
name: 'branchIndex',
|
||||
type: 'integer',
|
||||
title: 'branch index'
|
||||
},
|
||||
// for reasons:
|
||||
// 1. redirect type node to solve cycle flow.
|
||||
@ -83,4 +75,4 @@ export default {
|
||||
defaultValue: {}
|
||||
}
|
||||
]
|
||||
} as TableOptions;
|
||||
} as CollectionOptions;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TableOptions } from '@nocobase/database';
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'jobs',
|
||||
@ -45,4 +45,4 @@ export default {
|
||||
// title: 'node snapshot'
|
||||
// }
|
||||
]
|
||||
} as TableOptions;
|
||||
} as CollectionOptions;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TableOptions } from '@nocobase/database';
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'workflows',
|
||||
@ -53,4 +53,4 @@ export default {
|
||||
title: '触发执行'
|
||||
}
|
||||
]
|
||||
} as TableOptions;
|
||||
} as CollectionOptions;
|
||||
|
@ -12,9 +12,8 @@ export const JOB_STATUS = {
|
||||
CANCELLED: -2
|
||||
};
|
||||
|
||||
export const LINK_TYPE = {
|
||||
export const BRANCH_INDEX = {
|
||||
DEFAULT: null,
|
||||
ON_TRUE: 1,
|
||||
ON_FALSE: 0,
|
||||
NO_LIMIT: -1
|
||||
ON_FALSE: 0
|
||||
};
|
||||
|
@ -82,12 +82,12 @@ export default {
|
||||
status: JOB_STATUS.RESOLVED,
|
||||
result,
|
||||
// TODO(optimize): try unify the building of job
|
||||
node_id: this.id,
|
||||
upstream_id: prevJob instanceof Sequelize.Model ? prevJob.get('id') : null
|
||||
nodeId: this.id,
|
||||
upstreamId: prevJob instanceof Sequelize.Model ? prevJob.get('id') : null
|
||||
};
|
||||
|
||||
const branchNode = execution.nodes
|
||||
.find(item => item.upstream === this && item.linkType === Number(result));
|
||||
.find(item => item.upstream === this && Boolean(item.branchIndex) === result);
|
||||
|
||||
if (!branchNode) {
|
||||
return job;
|
||||
|
@ -1,7 +1,5 @@
|
||||
// something like template for type of nodes
|
||||
|
||||
import { ModelCtor, Model } from "@nocobase/database";
|
||||
import { ExecutionModel } from "../models/Execution";
|
||||
import ExecutionModel from "../models/Execution";
|
||||
import FlowNodeModel from "../models/FlowNode";
|
||||
|
||||
import prompt from './prompt';
|
||||
import condition from './condition';
|
||||
@ -19,13 +17,13 @@ export type InstructionResult = Job | Promise<Job>;
|
||||
// - base on input and context, do any calculations or system call (io), and produce a result or pending.
|
||||
export interface Instruction {
|
||||
run(
|
||||
this: ModelCtor<Model>,
|
||||
this: FlowNodeModel,
|
||||
// what should input to be?
|
||||
// - just use previously output result for convenience?
|
||||
input: any,
|
||||
// what should context to be?
|
||||
// - could be the workflow execution object (containing context data)
|
||||
execution: ModelCtor<ExecutionModel>
|
||||
execution: ExecutionModel
|
||||
): InstructionResult;
|
||||
// for start node in main flow (or branch) to resume when manual sub branch triggered
|
||||
resume?(): InstructionResult
|
||||
|
@ -1,13 +1,50 @@
|
||||
import Sequelize from 'sequelize';
|
||||
import { Model, ModelCtor } from '@nocobase/database';
|
||||
import {
|
||||
Model,
|
||||
BelongsToGetAssociationMixin,
|
||||
Optional,
|
||||
HasManyGetAssociationsMixin
|
||||
} from 'sequelize';
|
||||
|
||||
import Database from '@nocobase/database';
|
||||
|
||||
import { EXECUTION_STATUS, JOB_STATUS } from '../constants';
|
||||
import { getInstruction } from '../instructions';
|
||||
import WorkflowModel from './Workflow';
|
||||
import FlowNodeModel from './FlowNode';
|
||||
import JobModel from './Job';
|
||||
|
||||
export class ExecutionModel extends Model {
|
||||
nodes: Array<any> = [];
|
||||
nodesMap = new Map();
|
||||
jobsMap = new Map();
|
||||
interface ExecutionAttributes {
|
||||
id: number;
|
||||
title: string;
|
||||
context: any;
|
||||
status: number;
|
||||
}
|
||||
|
||||
interface ExecutionCreationAttributes extends Optional<ExecutionAttributes, 'id'> {}
|
||||
|
||||
export default class ExecutionModel
|
||||
extends Model<ExecutionAttributes, ExecutionCreationAttributes>
|
||||
implements ExecutionAttributes {
|
||||
|
||||
declare readonly database: Database;
|
||||
|
||||
declare id: number;
|
||||
declare title: string;
|
||||
declare context: any;
|
||||
declare status: number;
|
||||
|
||||
declare createdAt: Date;
|
||||
declare updatedAt: Date;
|
||||
|
||||
declare workflow?: WorkflowModel;
|
||||
declare getWorkflow: BelongsToGetAssociationMixin<WorkflowModel>;
|
||||
|
||||
declare jobs?: JobModel[];
|
||||
declare getJobs: HasManyGetAssociationsMixin<JobModel>;
|
||||
|
||||
nodes: Array<FlowNodeModel> = [];
|
||||
nodesMap = new Map<number, FlowNodeModel>();
|
||||
jobsMap = new Map<number, JobModel>();
|
||||
|
||||
// make dual linked nodes list then cache
|
||||
makeNodes(nodes = []) {
|
||||
@ -18,17 +55,17 @@ export class ExecutionModel extends Model {
|
||||
});
|
||||
|
||||
nodes.forEach(node => {
|
||||
if (node.upstream_id) {
|
||||
node.upstream = this.nodesMap.get(node.upstream_id);
|
||||
if (node.upstreamId) {
|
||||
node.upstream = this.nodesMap.get(node.upstreamId);
|
||||
}
|
||||
|
||||
if (node.downstream_id) {
|
||||
node.downstream = this.nodesMap.get(node.downstream_id);
|
||||
if (node.downstreamId) {
|
||||
node.downstream = this.nodesMap.get(node.downstreamId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
makeJobs(jobs: Array<ModelCtor<Model>>) {
|
||||
makeJobs(jobs: Array<JobModel>) {
|
||||
jobs.forEach(job => {
|
||||
this.jobsMap.set(job.id, job);
|
||||
});
|
||||
@ -55,7 +92,7 @@ export class ExecutionModel extends Model {
|
||||
async start(options) {
|
||||
await this.prepare();
|
||||
if (!this.nodes.length) {
|
||||
return this.exit(null);
|
||||
return this.exit();
|
||||
}
|
||||
const head = this.nodes.find(item => !item.upstream);
|
||||
return this.exec(head, { result: this.context });
|
||||
@ -63,7 +100,7 @@ export class ExecutionModel extends Model {
|
||||
|
||||
async resume(job, options) {
|
||||
await this.prepare();
|
||||
const node = this.nodesMap.get(job.node_id);
|
||||
const node = this.nodesMap.get(job.nodeId);
|
||||
return this.recall(node, job);
|
||||
}
|
||||
|
||||
@ -79,7 +116,7 @@ export class ExecutionModel extends Model {
|
||||
status: JOB_STATUS.REJECTED
|
||||
};
|
||||
// if previous job is from resuming
|
||||
if (prevJob && prevJob.node_id === node.id) {
|
||||
if (prevJob && prevJob.nodeId === node.id) {
|
||||
prevJob.set(job);
|
||||
job = prevJob;
|
||||
}
|
||||
@ -88,13 +125,13 @@ export class ExecutionModel extends Model {
|
||||
let savedJob;
|
||||
// TODO(optimize): many checking of resuming or new could be improved
|
||||
// could be implemented separately in exec() / resume()
|
||||
if (job instanceof Sequelize.Model) {
|
||||
if (job instanceof Model) {
|
||||
savedJob = await job.save();
|
||||
} else {
|
||||
const upstream_id = prevJob instanceof Sequelize.Model ? prevJob.get('id') : null;
|
||||
const upstreamId = prevJob instanceof Model ? prevJob.get('id') : null;
|
||||
savedJob = await this.saveJob({
|
||||
node_id: node.id,
|
||||
upstream_id,
|
||||
nodeId: node.id,
|
||||
upstreamId,
|
||||
...job
|
||||
});
|
||||
}
|
||||
@ -136,7 +173,7 @@ export class ExecutionModel extends Model {
|
||||
return this.run(resume, node, job);
|
||||
}
|
||||
|
||||
async exit(job) {
|
||||
async exit(job?: JobModel) {
|
||||
const executionStatusMap = {
|
||||
[JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED,
|
||||
[JOB_STATUS.RESOLVED]: EXECUTION_STATUS.RESOLVED,
|
||||
@ -150,29 +187,30 @@ export class ExecutionModel extends Model {
|
||||
|
||||
// TODO(optimize)
|
||||
async saveJob(payload) {
|
||||
const JobModel = this.database.getModel('jobs');
|
||||
const [result] = await JobModel.upsert({
|
||||
// @ts-ignore
|
||||
const { database } = this.constructor;
|
||||
const { model } = database.getCollection('jobs');
|
||||
const [result] = await model.upsert({
|
||||
...payload,
|
||||
execution_id: this.id
|
||||
});
|
||||
|
||||
executionId: this.id
|
||||
}) as [JobModel, boolean | null];
|
||||
this.jobsMap.set(result.id, result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
findBranchParentNode(node): any {
|
||||
findBranchParentNode(node: FlowNodeModel): FlowNodeModel | null {
|
||||
for (let n = node; n; n = n.upstream) {
|
||||
if (n.linkType !== null) {
|
||||
if (n.branchIndex !== null) {
|
||||
return n.upstream;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
findBranchParentJob(job, node) {
|
||||
for (let j = job; j; j = this.jobsMap.get(j.upstream_id)) {
|
||||
if (j.node_id === node.id) {
|
||||
findBranchParentJob(job: JobModel, node: FlowNodeModel): JobModel | null {
|
||||
for (let j = job; j; j = this.jobsMap.get(j.upstreamId)) {
|
||||
if (j.nodeId === node.id) {
|
||||
return j;
|
||||
}
|
||||
}
|
||||
|
19
packages/plugin-workflow/src/models/FlowNode.ts
Normal file
19
packages/plugin-workflow/src/models/FlowNode.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { Model, BelongsToGetAssociationMixin } from 'sequelize';
|
||||
import WorkflowModel from './Workflow';
|
||||
|
||||
export default class FlowNodeModel extends Model {
|
||||
declare id: number;
|
||||
declare title: string;
|
||||
declare branchIndex: null | number;
|
||||
declare type: string;
|
||||
declare config: any;
|
||||
|
||||
declare createdAt: Date;
|
||||
declare updatedAt: Date;
|
||||
|
||||
declare upstream: FlowNodeModel;
|
||||
declare downstream: FlowNodeModel;
|
||||
|
||||
declare workflow?: WorkflowModel;
|
||||
declare getWorkflow: BelongsToGetAssociationMixin<WorkflowModel>;
|
||||
}
|
18
packages/plugin-workflow/src/models/Job.ts
Normal file
18
packages/plugin-workflow/src/models/Job.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import { Model, BelongsToGetAssociationMixin } from 'sequelize';
|
||||
import FlowNodeModel from './FlowNode';
|
||||
|
||||
export default class JobModel extends Model {
|
||||
declare id: number;
|
||||
declare status: number;
|
||||
declare result: any;
|
||||
|
||||
declare createdAt: Date;
|
||||
declare updatedAt: Date;
|
||||
|
||||
declare upstreamId: number;
|
||||
declare upstream: JobModel;
|
||||
|
||||
declare nodeId: number;
|
||||
declare node?: FlowNodeModel;
|
||||
declare getNode: BelongsToGetAssociationMixin<FlowNodeModel>;
|
||||
}
|
@ -1,15 +1,41 @@
|
||||
import { Model } from '@nocobase/database';
|
||||
import { Model, HasManyGetAssociationsMixin, HasManyCreateAssociationMixin } from 'sequelize';
|
||||
|
||||
import Database from '@nocobase/database';
|
||||
|
||||
import { get as getTrigger } from '../triggers';
|
||||
import { EXECUTION_STATUS } from '../constants';
|
||||
import ExecutionModel from './Execution';
|
||||
import FlowNodeModel from './FlowNode';
|
||||
|
||||
export default class WorkflowModel extends Model {
|
||||
declare static database: Database;
|
||||
|
||||
declare id: number;
|
||||
declare title: string;
|
||||
declare enabled: boolean;
|
||||
declare description?: string;
|
||||
declare type: string;
|
||||
declare config: any;
|
||||
|
||||
declare createdAt: Date;
|
||||
declare updatedAt: Date;
|
||||
|
||||
declare nodes: FlowNodeModel[];
|
||||
declare getNodes: HasManyGetAssociationsMixin<FlowNodeModel>;
|
||||
declare createNode: HasManyCreateAssociationMixin<FlowNodeModel>;
|
||||
|
||||
declare executions: ExecutionModel[];
|
||||
declare getExecutions: HasManyGetAssociationsMixin<ExecutionModel>;
|
||||
declare createExecution: HasManyCreateAssociationMixin<ExecutionModel>;
|
||||
|
||||
export class WorkflowModel extends Model {
|
||||
static async mount() {
|
||||
const workflows = await this.findAll({
|
||||
where: { enabled: true }
|
||||
const collection = this.database.getCollection('workflows');
|
||||
const workflows = await collection.repository.find({
|
||||
filter: { enabled: true }
|
||||
});
|
||||
|
||||
workflows.forEach(workflow => {
|
||||
// @ts-ignore
|
||||
workflow.mount();
|
||||
});
|
||||
|
||||
@ -42,10 +68,10 @@ export class WorkflowModel extends Model {
|
||||
context,
|
||||
status: EXECUTION_STATUS.STARTED
|
||||
});
|
||||
execution.setDataValue('workflow', this);
|
||||
|
||||
execution.workflow = this;
|
||||
|
||||
await execution.start(null, null, options);
|
||||
await execution.start(options);
|
||||
return execution;
|
||||
}
|
||||
}
|
||||
|
@ -1,21 +1,19 @@
|
||||
import path from 'path';
|
||||
|
||||
import { registerModels } from '@nocobase/database';
|
||||
|
||||
import { WorkflowModel } from './models/Workflow';
|
||||
import { ExecutionModel } from './models/Execution';
|
||||
import WorkflowModel from './models/Workflow';
|
||||
import ExecutionModel from './models/Execution';
|
||||
|
||||
export default {
|
||||
name: 'workflow',
|
||||
async load(options = {}) {
|
||||
const { db } = this.app;
|
||||
|
||||
registerModels({
|
||||
db.registerModels({
|
||||
WorkflowModel,
|
||||
ExecutionModel,
|
||||
});
|
||||
|
||||
db.import({
|
||||
await db.import({
|
||||
directory: path.resolve(__dirname, 'collections'),
|
||||
});
|
||||
|
||||
@ -24,8 +22,8 @@ export default {
|
||||
// * add all hooks for enabled workflows
|
||||
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
|
||||
this.app.on('beforeStart', async () => {
|
||||
const Workflow = db.getModel('workflows');
|
||||
await Workflow.mount();
|
||||
const { model } = db.getCollection('workflows');
|
||||
await model.mount();
|
||||
})
|
||||
|
||||
// [Life Cycle]: initialize all necessary seed data
|
||||
|
@ -1,10 +1,14 @@
|
||||
import WorkflowModel from "../models/Workflow";
|
||||
|
||||
export interface IDataChangeTriggerConfig {
|
||||
collection: string;
|
||||
// TODO: ICondition
|
||||
filter: any;
|
||||
}
|
||||
|
||||
export function afterCreate(config: IDataChangeTriggerConfig, callback: Function) {
|
||||
const Model = this.database.getModel(config.collection);
|
||||
Model.addHook('afterCreate', `workflow-${this.get('id')}`, (data: typeof Model, options) => callback({ data }, options));
|
||||
export function afterCreate(this: WorkflowModel, config: IDataChangeTriggerConfig, callback: Function) {
|
||||
// @ts-ignore
|
||||
const { database } = this.constructor;
|
||||
const { model } = database.getCollection(config.collection);
|
||||
model.addHook('afterCreate', `workflow-${this.get('id')}`, (data: any, options) => callback({ data }, options));
|
||||
}
|
||||
|
@ -1,9 +1,8 @@
|
||||
import { ModelCtor } from '@nocobase/database';
|
||||
import { WorkflowModel } from '../models/Workflow';
|
||||
import WorkflowModel from '../models/Workflow';
|
||||
import * as dataChangeTriggers from './data-change';
|
||||
|
||||
export interface ITrigger {
|
||||
(this: ModelCtor<WorkflowModel>, config: any): void
|
||||
(this: WorkflowModel, config: any): void
|
||||
}
|
||||
|
||||
const triggers = new Map<string, ITrigger>();
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { get } from 'lodash';
|
||||
|
||||
import { ModelCtor } from '@nocobase/database';
|
||||
import { ExecutionModel } from '../models/Execution';
|
||||
import ExecutionModel from '../models/Execution';
|
||||
|
||||
export type OperandType = 'context' | 'input' | 'job';
|
||||
|
||||
@ -36,7 +35,7 @@ export type JobOperand = {
|
||||
export type Operand = ContextOperand | InputOperand | JobOperand | ConstantOperand;
|
||||
|
||||
// TODO: other instructions may also use this method, could be moved to utils.
|
||||
export function getValue(operand: Operand, input: any, execution: ModelCtor<ExecutionModel>) {
|
||||
export function getValue(operand: Operand, input: any, execution: ExecutionModel) {
|
||||
switch (operand.type) {
|
||||
// from execution context
|
||||
case 'context':
|
||||
@ -47,7 +46,7 @@ export function getValue(operand: Operand, input: any, execution: ModelCtor<Exec
|
||||
// from job in execution
|
||||
case 'job':
|
||||
// assume jobs have been fetched from execution before
|
||||
const job = execution.jobs.find(item => item.id === operand.options.id);
|
||||
const job = execution.jobsMap.get(operand.options.id);
|
||||
return get(job, operand.options.path);
|
||||
// constant
|
||||
default:
|
||||
|
Loading…
Reference in New Issue
Block a user