Feat(plugin workflow): add more instructions (#201)
* test(plugin-workflow): add getter cases * feat(plugin-workflow): add query type for node instruction * feat(plugin-workflow): add crud instructions for node type * fix(plugin-workflow): fix transaction * fix(plugin-workflow): fix context data of model trigger
This commit is contained in:
parent
a0ebd36e5a
commit
46afc05898
@ -63,6 +63,7 @@
|
||||
"jest-localstorage-mock": "^2.3.0",
|
||||
"jest-styled-components": "6.3.3",
|
||||
"jest-watch-lerna-packages": "^1.1.0",
|
||||
"json-templates": "^4.2.0",
|
||||
"koa": "^2.13.4",
|
||||
"koa-bodyparser": "^4.3.0",
|
||||
"lerna": "^4.0.0",
|
||||
|
@ -24,7 +24,7 @@ export function getConfigByEnv() {
|
||||
port: process.env.DB_PORT,
|
||||
dialect: process.env.DB_DIALECT,
|
||||
logging: process.env.DB_LOG_SQL === 'on' ? console.log : false,
|
||||
storage: process.env.DB_STORAGE ? resolve(process.cwd(), process.env.DB_STORAGE) : ':memory:',
|
||||
storage: process.env.DB_STORAGE && process.env.DB_STORAGE !== ':memory:' ? resolve(process.cwd(), process.env.DB_STORAGE) : ':memory:',
|
||||
define: {
|
||||
charset: 'utf8mb4',
|
||||
collate: 'utf8mb4_unicode_ci',
|
||||
|
@ -13,8 +13,7 @@
|
||||
"dependencies": {
|
||||
"@nocobase/acl": "0.6.0-alpha.0",
|
||||
"@nocobase/database": "0.6.0-alpha.0",
|
||||
"@nocobase/server": "0.6.0-alpha.0",
|
||||
"json-templates": "^4.2.0"
|
||||
"@nocobase/server": "0.6.0-alpha.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -1,15 +1,16 @@
|
||||
import { CollectionOptions } from '@nocobase/database';
|
||||
|
||||
export default {
|
||||
name: 'targets',
|
||||
name: 'approvals',
|
||||
fields: [
|
||||
{
|
||||
type: 'string',
|
||||
name: 'col1',
|
||||
type: 'belongsTo',
|
||||
name: 'post',
|
||||
},
|
||||
{
|
||||
type: 'string',
|
||||
name: 'col2',
|
||||
type: 'integer',
|
||||
name: 'status',
|
||||
defaultValue: 0
|
||||
}
|
||||
],
|
||||
} as CollectionOptions;
|
@ -10,6 +10,7 @@ export default {
|
||||
{
|
||||
type: 'boolean',
|
||||
name: 'published',
|
||||
defaultValue: false
|
||||
}
|
||||
]
|
||||
} as CollectionOptions;
|
||||
|
@ -0,0 +1,55 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '..';
|
||||
|
||||
|
||||
|
||||
describe('workflow > instructions > create', () => {
|
||||
let app: Application;
|
||||
let db: Database;
|
||||
let PostModel;
|
||||
let WorkflowModel;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'model',
|
||||
config: {
|
||||
mode: 1,
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => db.close());
|
||||
|
||||
describe('create one', () => {
|
||||
it('params: from context', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'create',
|
||||
config: {
|
||||
collection: 'approvals',
|
||||
params: {
|
||||
values: {
|
||||
postId: '{{$context.data.id}}'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.postId).toBe(post.id);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,58 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '..';
|
||||
|
||||
|
||||
|
||||
describe('workflow > instructions > create', () => {
|
||||
let app: Application;
|
||||
let db: Database;
|
||||
let PostModel;
|
||||
let WorkflowModel;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'model',
|
||||
config: {
|
||||
mode: 1,
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => db.close());
|
||||
|
||||
describe('destroy one', () => {
|
||||
it('params: from context', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'destroy',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
filter: {
|
||||
id: '{{$context.data.id}}'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result).toBe(1);
|
||||
|
||||
const count = await PostModel.count();
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,218 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '..';
|
||||
|
||||
|
||||
|
||||
describe('workflow > instructions > query', () => {
|
||||
let app: Application;
|
||||
let db: Database;
|
||||
let PostModel;
|
||||
let WorkflowModel;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'model',
|
||||
config: {
|
||||
mode: 1,
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => db.close());
|
||||
|
||||
describe('query one', () => {
|
||||
it('params: empty', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.title).toBe(post.title);
|
||||
});
|
||||
|
||||
it('params.filter: match', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
filter: {
|
||||
title: 't1'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.title).toBe(post.title);
|
||||
});
|
||||
|
||||
it('params.filter: unmatch', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
filter: {
|
||||
title: 't2'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result).toBe(null);
|
||||
});
|
||||
|
||||
it('params.filter: value from context', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
filter: {
|
||||
title: '{{$context.data.title}}'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.title).toBe(post.title);
|
||||
});
|
||||
|
||||
it('params.filter: value from job of node', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'echo'
|
||||
});
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
filter: {
|
||||
title: `{{$jobsMapByNodeId.${n1.id}.data.title}}`
|
||||
}
|
||||
}
|
||||
},
|
||||
upstreamId: n1.id
|
||||
});
|
||||
await n1.setDownstream(n2);
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const jobs = await execution.getJobs({ order: [['id', 'ASC']] });
|
||||
expect(jobs[1].result.title).toBe(post.title);
|
||||
});
|
||||
|
||||
it('params.sort', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
sort: 'id'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const p1 = await PostModel.create({ title: 't1' });
|
||||
const p2 = await PostModel.create({ title: 't2' });
|
||||
|
||||
// get the 2nd execution
|
||||
const [execution] = await workflow.getExecutions({ order: [['id', 'DESC']] });
|
||||
expect(execution.context.data.title).toBe(p2.title);
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.title).toBe(p1.title);
|
||||
});
|
||||
});
|
||||
|
||||
describe('query all', () => {
|
||||
it('params: empty', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
multiple: true
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.length).toBe(1);
|
||||
expect(job.result[0].title).toBe(post.title);
|
||||
});
|
||||
|
||||
it('params.filter: match', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
multiple: true,
|
||||
params: {
|
||||
filter: {
|
||||
title: 't1'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.length).toBe(1);
|
||||
expect(job.result[0].title).toBe(post.title);
|
||||
});
|
||||
|
||||
it('params.filter: unmatch', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'query',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
multiple: true,
|
||||
params: {
|
||||
filter: {
|
||||
title: 't2'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,62 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '..';
|
||||
|
||||
|
||||
|
||||
describe('workflow > instructions > update', () => {
|
||||
let app: Application;
|
||||
let db: Database;
|
||||
let PostModel;
|
||||
let WorkflowModel;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'model',
|
||||
config: {
|
||||
mode: 1,
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => db.close());
|
||||
|
||||
describe('update one', () => {
|
||||
it('params: from context', async () => {
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'update',
|
||||
config: {
|
||||
collection: 'posts',
|
||||
params: {
|
||||
filter: {
|
||||
id: '{{$context.data.id}}'
|
||||
},
|
||||
values: {
|
||||
published: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
expect(post.published).toBe(false);
|
||||
|
||||
const [execution] = await workflow.getExecutions();
|
||||
const [job] = await execution.getJobs();
|
||||
expect(job.result.published).toBe(true);
|
||||
|
||||
const updatedPost = await PostModel.findByPk(post.id);
|
||||
expect(updatedPost.published).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
142
packages/plugin-workflow/src/__tests__/utils/getter.test.ts
Normal file
142
packages/plugin-workflow/src/__tests__/utils/getter.test.ts
Normal file
@ -0,0 +1,142 @@
|
||||
import { Application } from '@nocobase/server';
|
||||
import Database from '@nocobase/database';
|
||||
import { getApp } from '..';
|
||||
|
||||
import { get } from 'lodash';
|
||||
|
||||
import { getValue } from '../../utils/getter';
|
||||
import { BRANCH_INDEX } from '../../constants';
|
||||
|
||||
describe('value getter', () => {
|
||||
let app: Application;
|
||||
let db: Database;
|
||||
let JobModel;
|
||||
let WorkflowModel;
|
||||
let ExecutionModel;
|
||||
let PostModel;
|
||||
let workflow;
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await getApp();
|
||||
|
||||
db = app.db;
|
||||
WorkflowModel = db.getCollection('workflows').model;
|
||||
JobModel = db.getCollection('jobs').model;
|
||||
ExecutionModel = db.getCollection('executions').model;
|
||||
PostModel = db.getCollection('posts').model;
|
||||
|
||||
workflow = await WorkflowModel.create({
|
||||
title: 'test workflow',
|
||||
enabled: true,
|
||||
type: 'model',
|
||||
config: {
|
||||
mode: 1,
|
||||
collection: 'posts'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => db.close());
|
||||
|
||||
describe('get from constants', () => {
|
||||
it('null', () => {
|
||||
const v1 = getValue({
|
||||
value: null
|
||||
}, null, null);
|
||||
expect(v1).toBe(null);
|
||||
});
|
||||
|
||||
it('number', () => {
|
||||
const v1 = getValue({
|
||||
value: 1
|
||||
}, null, null);
|
||||
expect(v1).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get from context', () => {
|
||||
it('paths', async () => {
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
const [execution] = await workflow.getExecutions();
|
||||
|
||||
const v1 = getValue({
|
||||
type: 'context',
|
||||
options: {}
|
||||
}, null, execution);
|
||||
expect(v1).toMatchObject({ data: { title: 't1' } });
|
||||
|
||||
const v2 = getValue({
|
||||
type: 'context',
|
||||
options: { path: 'data' }
|
||||
}, null, execution);
|
||||
expect(v2).toMatchObject({ title: 't1' });
|
||||
|
||||
const v3 = getValue({
|
||||
type: 'context',
|
||||
options: { path: 'data.title' }
|
||||
}, null, execution);
|
||||
expect(v3).toBe(post.title);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get from job by id', () => {
|
||||
it('base getting from executed job', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'echo'
|
||||
});
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
const [execution] = await workflow.getExecutions();
|
||||
await execution.prepare({}, true);
|
||||
|
||||
const v1 = getValue({
|
||||
type: 'job',
|
||||
options: {
|
||||
nodeId: n1.id,
|
||||
path: 'data.title'
|
||||
}
|
||||
}, null, execution);
|
||||
|
||||
expect(v1).toBe(post.title);
|
||||
});
|
||||
|
||||
it('result of unexecuted job could not be got', async () => {
|
||||
const n1 = await workflow.createNode({
|
||||
type: 'condition'
|
||||
});
|
||||
|
||||
const n2 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
branchIndex: BRANCH_INDEX.ON_TRUE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const n3 = await workflow.createNode({
|
||||
type: 'echo',
|
||||
branchIndex: BRANCH_INDEX.ON_FALSE,
|
||||
upstreamId: n1.id
|
||||
});
|
||||
|
||||
const post = await PostModel.create({ title: 't1' });
|
||||
const [execution] = await workflow.getExecutions();
|
||||
await execution.prepare({}, true);
|
||||
|
||||
const v1 = getValue({
|
||||
type: 'job',
|
||||
options: {
|
||||
nodeId: n3.id
|
||||
}
|
||||
}, null, execution);
|
||||
|
||||
expect(v1).toBeUndefined();
|
||||
|
||||
const v2 = getValue({
|
||||
type: 'job',
|
||||
options: {
|
||||
nodeId: n2.id
|
||||
}
|
||||
}, null, execution);
|
||||
|
||||
expect(v2).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
@ -36,7 +36,7 @@ export default {
|
||||
sourceKey: 'id',
|
||||
foreignKey: 'upstream_id',
|
||||
},
|
||||
// only works when upstream node is branching type, like condition and parallel.
|
||||
// only works when upstream node is branching type, such as condition and parallel.
|
||||
// put here because the design of flow-links model is not really necessary for now.
|
||||
// or it should be put into flow-links model.
|
||||
{
|
||||
@ -47,7 +47,7 @@ export default {
|
||||
},
|
||||
// for reasons:
|
||||
// 1. redirect type node to solve cycle flow.
|
||||
// 2. recognize as true next node after branches.
|
||||
// 2. recognize as real next node after branches.
|
||||
{
|
||||
interface: 'linkTo',
|
||||
name: 'downstream',
|
||||
|
22
packages/plugin-workflow/src/instructions/create.ts
Normal file
22
packages/plugin-workflow/src/instructions/create.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { JOB_STATUS } from "../constants";
|
||||
import FlowNodeModel from "../models/FlowNode";
|
||||
|
||||
export default {
|
||||
async run(this: FlowNodeModel, input, execution) {
|
||||
const {
|
||||
collection,
|
||||
params = {}
|
||||
} = this.config;
|
||||
|
||||
const repo = (<typeof FlowNodeModel>this.constructor).database.getRepository(collection);
|
||||
const result = await repo.create({
|
||||
...execution.getParsedValue(params),
|
||||
transaction: execution.transaction
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
status: JOB_STATUS.RESOLVED
|
||||
};
|
||||
}
|
||||
}
|
22
packages/plugin-workflow/src/instructions/destroy.ts
Normal file
22
packages/plugin-workflow/src/instructions/destroy.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { JOB_STATUS } from "../constants";
|
||||
import FlowNodeModel from "../models/FlowNode";
|
||||
|
||||
export default {
|
||||
async run(this: FlowNodeModel, input, execution) {
|
||||
const {
|
||||
collection,
|
||||
params = {}
|
||||
} = this.config;
|
||||
|
||||
const repo = (<typeof FlowNodeModel>this.constructor).database.getRepository(collection);
|
||||
const result = await repo.destroy({
|
||||
...execution.getParsedValue(params),
|
||||
transaction: execution.transaction
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
status: JOB_STATUS.RESOLVED
|
||||
};
|
||||
}
|
||||
}
|
@ -4,6 +4,10 @@ import FlowNodeModel from "../models/FlowNode";
|
||||
import prompt from './prompt';
|
||||
import condition from './condition';
|
||||
import parallel from './parallel';
|
||||
import query from "./query";
|
||||
import create from "./create";
|
||||
import update from "./update";
|
||||
import destroy from "./destroy";
|
||||
|
||||
export interface Job {
|
||||
status: number;
|
||||
@ -42,3 +46,7 @@ export function registerInstruction(key: string, instruction: any) {
|
||||
registerInstruction('prompt', prompt);
|
||||
registerInstruction('condition', condition);
|
||||
registerInstruction('parallel', parallel);
|
||||
registerInstruction('query', query);
|
||||
registerInstruction('create', create);
|
||||
registerInstruction('update', update);
|
||||
registerInstruction('destroy', destroy);
|
||||
|
24
packages/plugin-workflow/src/instructions/query.ts
Normal file
24
packages/plugin-workflow/src/instructions/query.ts
Normal file
@ -0,0 +1,24 @@
|
||||
import { JOB_STATUS } from "../constants";
|
||||
import FlowNodeModel from "../models/FlowNode";
|
||||
|
||||
export default {
|
||||
async run(this: FlowNodeModel, input, execution) {
|
||||
const {
|
||||
collection,
|
||||
multiple,
|
||||
params = {}
|
||||
} = this.config;
|
||||
|
||||
const repo = (<typeof FlowNodeModel>this.constructor).database.getRepository(collection);
|
||||
const options = execution.getParsedValue(params);
|
||||
const result = await (multiple ? repo.find : repo.findOne).call(repo, {
|
||||
...options,
|
||||
transaction: execution.transaction
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
status: JOB_STATUS.RESOLVED
|
||||
};
|
||||
}
|
||||
}
|
23
packages/plugin-workflow/src/instructions/update.ts
Normal file
23
packages/plugin-workflow/src/instructions/update.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { JOB_STATUS } from "../constants";
|
||||
import FlowNodeModel from "../models/FlowNode";
|
||||
|
||||
export default {
|
||||
async run(this: FlowNodeModel, input, execution) {
|
||||
const {
|
||||
collection,
|
||||
multiple = false,
|
||||
params = {}
|
||||
} = this.config;
|
||||
|
||||
const repo = (<typeof FlowNodeModel>this.constructor).database.getRepository(collection);
|
||||
const result = await repo.update({
|
||||
...execution.getParsedValue(params),
|
||||
transaction: execution.transaction
|
||||
});
|
||||
|
||||
return {
|
||||
result: multiple ? result : (result[0] || null),
|
||||
status: JOB_STATUS.RESOLVED
|
||||
};
|
||||
}
|
||||
}
|
@ -4,6 +4,7 @@ import {
|
||||
HasManyGetAssociationsMixin,
|
||||
Transaction
|
||||
} from 'sequelize';
|
||||
import parse from 'json-templates';
|
||||
|
||||
import Database from '@nocobase/database';
|
||||
|
||||
@ -40,6 +41,7 @@ export default class ExecutionModel extends Model {
|
||||
nodes: Array<FlowNodeModel> = [];
|
||||
nodesMap = new Map<number, FlowNodeModel>();
|
||||
jobsMap = new Map<number, JobModel>();
|
||||
jobsMapByNodeId: { [key: number]: any } = {};
|
||||
|
||||
static StatusMap = {
|
||||
[JOB_STATUS.PENDING]: EXECUTION_STATUS.STARTED,
|
||||
@ -70,14 +72,12 @@ export default class ExecutionModel extends Model {
|
||||
makeJobs(jobs: Array<JobModel>) {
|
||||
jobs.forEach(job => {
|
||||
this.jobsMap.set(job.id, job);
|
||||
// TODO: should consider cycle, and from previous job
|
||||
this.jobsMapByNodeId[job.nodeId] = job.result;
|
||||
});
|
||||
}
|
||||
|
||||
async prepare(options) {
|
||||
if (this.status !== EXECUTION_STATUS.STARTED) {
|
||||
throw new Error(`execution was ended with status ${this.status}`);
|
||||
}
|
||||
|
||||
async prepare(options, commit = false) {
|
||||
this.options = options || {};
|
||||
const { transaction = await (<typeof ExecutionModel>this.constructor).database.sequelize.transaction() } = this.options;
|
||||
this.transaction = transaction;
|
||||
@ -90,12 +90,22 @@ export default class ExecutionModel extends Model {
|
||||
|
||||
this.makeNodes(nodes);
|
||||
|
||||
const jobs = await this.getJobs({ transaction });
|
||||
const jobs = await this.getJobs({
|
||||
order: [['id', 'ASC']],
|
||||
transaction
|
||||
});
|
||||
|
||||
this.makeJobs(jobs);
|
||||
|
||||
if (commit) {
|
||||
await this.commit();
|
||||
}
|
||||
}
|
||||
|
||||
async start(options: ExecutionOptions) {
|
||||
if (this.status !== EXECUTION_STATUS.STARTED) {
|
||||
throw new Error(`execution was ended with status ${this.status}`);
|
||||
}
|
||||
await this.prepare(options);
|
||||
if (this.nodes.length) {
|
||||
const head = this.nodes.find(item => !item.upstream);
|
||||
@ -107,6 +117,9 @@ export default class ExecutionModel extends Model {
|
||||
}
|
||||
|
||||
async resume(job: JobModel, options: ExecutionOptions) {
|
||||
if (this.status !== EXECUTION_STATUS.STARTED) {
|
||||
throw new Error(`execution was ended with status ${this.status}`);
|
||||
}
|
||||
await this.prepare(options);
|
||||
const node = this.nodesMap.get(job.nodeId);
|
||||
await this.recall(node, job);
|
||||
@ -201,13 +214,14 @@ export default class ExecutionModel extends Model {
|
||||
async saveJob(payload) {
|
||||
const { database } = <typeof WorkflowModel>this.constructor;
|
||||
const { model } = database.getCollection('jobs');
|
||||
const [result] = await model.upsert({
|
||||
const [job] = await model.upsert({
|
||||
...payload,
|
||||
executionId: this.id
|
||||
}, { transaction: this.transaction }) as [JobModel, boolean | null];
|
||||
this.jobsMap.set(result.id, result);
|
||||
this.jobsMap.set(job.id, job);
|
||||
this.jobsMapByNodeId[job.nodeId] = job.result;
|
||||
|
||||
return result;
|
||||
return job;
|
||||
}
|
||||
|
||||
// find the first node in current branch
|
||||
@ -238,4 +252,11 @@ export default class ExecutionModel extends Model {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
getParsedValue(value) {
|
||||
return parse(value)({
|
||||
$context: this.context,
|
||||
$jobsMapByNodeId: this.jobsMapByNodeId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,11 @@
|
||||
import { Model, BelongsToGetAssociationMixin } from 'sequelize';
|
||||
import Database from '@nocobase/database';
|
||||
|
||||
import WorkflowModel from './Workflow';
|
||||
|
||||
export default class FlowNodeModel extends Model {
|
||||
declare static readonly database: Database;
|
||||
|
||||
declare id: number;
|
||||
declare title: string;
|
||||
declare branchIndex: null | number;
|
||||
|
@ -2,6 +2,7 @@ import WorkflowModel from "../models/Workflow";
|
||||
|
||||
export interface ModelChangeTriggerConfig {
|
||||
collection: string;
|
||||
mode: number;
|
||||
// TODO: ICondition
|
||||
filter: any;
|
||||
}
|
||||
@ -23,7 +24,7 @@ export default {
|
||||
const { database } = <typeof WorkflowModel>this.constructor;
|
||||
const { collection, mode } = this.config;
|
||||
const { model } = database.getCollection(collection);
|
||||
const handler = (data: any, options) => callback({ data }, options);
|
||||
const handler = (data: any, options) => callback({ data: data.get() }, options);
|
||||
// TODO: duplication when mode change should be considered
|
||||
for (let [key, event] of MODE_BITMAP_EVENTS.entries()) {
|
||||
if (mode & key) {
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { get } from 'lodash';
|
||||
import { get as getWithPath } from 'lodash';
|
||||
|
||||
import ExecutionModel from '../models/Execution';
|
||||
import JobModel from '../models/Job';
|
||||
|
||||
export type OperandType = 'context' | 'input' | 'job';
|
||||
|
||||
@ -9,7 +10,7 @@ export type ObjectGetterOptions = {
|
||||
};
|
||||
|
||||
export type JobGetterOptions = ObjectGetterOptions & {
|
||||
id: number
|
||||
nodeId: number
|
||||
};
|
||||
|
||||
export type ConstantOperand = {
|
||||
@ -32,22 +33,34 @@ export type JobOperand = {
|
||||
options: JobGetterOptions;
|
||||
};
|
||||
|
||||
// TODO(type): union type here is wrong
|
||||
export type Operand = ContextOperand | InputOperand | JobOperand | ConstantOperand;
|
||||
|
||||
// TODO: other instructions may also use this method, could be moved to utils.
|
||||
export function getValue(operand: Operand, input: any, execution: ExecutionModel) {
|
||||
// HACK: if no path provided, return self
|
||||
// @see https://github.com/lodash/lodash/pull/1270
|
||||
// TODO(question): should add default value as lodash?
|
||||
function get(object, path?: string | Array<string>) {
|
||||
return path == null || !path.length ? object : getWithPath(object, path);
|
||||
}
|
||||
|
||||
|
||||
// NOTE:
|
||||
// this method could only be used in executing nodes.
|
||||
// because type of 'job' need loaded jobs in runtime execution.
|
||||
// or the execution should be prepared first.
|
||||
export function getValue(operand: Operand, lastJob: JobModel, execution: ExecutionModel) {
|
||||
switch (operand.type) {
|
||||
// from execution context
|
||||
case 'context':
|
||||
return get(execution, operand.options.path);
|
||||
// from input from last job or manual
|
||||
return get(execution.context, operand.options.path);
|
||||
// from last job (or input job)
|
||||
case 'input':
|
||||
return get(input, operand.options.path);
|
||||
return lastJob ?? get(lastJob.result, operand.options.path);
|
||||
// from job in execution
|
||||
case 'job':
|
||||
// assume jobs have been fetched from execution before
|
||||
const job = execution.jobsMap.get(operand.options.id);
|
||||
return get(job, operand.options.path);
|
||||
const job = execution.jobsMapByNodeId[operand.options.nodeId];
|
||||
return job && get(job, operand.options.path);
|
||||
// constant
|
||||
default:
|
||||
return operand.value;
|
||||
|
Loading…
Reference in New Issue
Block a user