feat(ci): run backend tests concurrently (#2815)
* chore: run backend tests concurrently * fix: mock app server create database * chore: using @swc/jest instead of ts-jest * chore: create test database * chore: database distributor * fix: test collection manager * fix: test * chore: to use Jest.mock, change the formatter to the commonjs module * chore: db acquire * chore: github action * fix: pg test action * chore: formatter * chore: jest workaround * fix: mysql test * chore: mysql github action * chore: clean storage * chore: gitignore * chore: max workers
This commit is contained in:
parent
df552619dd
commit
02559b61ba
19
.github/workflows/nocobase-test-backend.yml
vendored
19
.github/workflows/nocobase-test-backend.yml
vendored
@ -38,11 +38,12 @@ jobs:
|
|||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
- run: yarn install
|
- run: yarn install
|
||||||
- name: Test with Sqlite
|
- name: Test with Sqlite
|
||||||
run: yarn nocobase install -f && node --max_old_space_size=4096 ./node_modules/.bin/jest --maxWorkers=1 --workerIdleMemoryLimit=3000MB
|
run: yarn nocobase install -f && node --max_old_space_size=4096 ./node_modules/.bin/jest --maxWorkers=100% --workerIdleMemoryLimit=3000MB
|
||||||
env:
|
env:
|
||||||
LOGGER_LEVEL: error
|
LOGGER_LEVEL: error
|
||||||
DB_DIALECT: sqlite
|
DB_DIALECT: sqlite
|
||||||
DB_STORAGE: /tmp/db.sqlite
|
DB_STORAGE: /tmp/db.sqlite
|
||||||
|
DB_TEST_PREFIX: test_
|
||||||
DB_UNDERSCORED: ${{ matrix.underscored }}
|
DB_UNDERSCORED: ${{ matrix.underscored }}
|
||||||
timeout-minutes: 40
|
timeout-minutes: 40
|
||||||
|
|
||||||
@ -80,7 +81,11 @@ jobs:
|
|||||||
- run: yarn install
|
- run: yarn install
|
||||||
# - run: yarn build
|
# - run: yarn build
|
||||||
- name: Test with postgres
|
- name: Test with postgres
|
||||||
run: yarn nocobase install -f && node --max_old_space_size=4096 ./node_modules/.bin/jest --maxWorkers=1 --workerIdleMemoryLimit=3000MB
|
run: |
|
||||||
|
yarn nocobase install -f
|
||||||
|
./node_modules/.bin/tsx packages/core/test/src/scripts/test-db-creator.ts &
|
||||||
|
sleep 1
|
||||||
|
node --max_old_space_size=4096 ./node_modules/.bin/jest --maxWorkers=100% --workerIdleMemoryLimit=3000MB
|
||||||
env:
|
env:
|
||||||
LOGGER_LEVEL: error
|
LOGGER_LEVEL: error
|
||||||
DB_DIALECT: postgres
|
DB_DIALECT: postgres
|
||||||
@ -92,6 +97,8 @@ jobs:
|
|||||||
DB_UNDERSCORED: ${{ matrix.underscored }}
|
DB_UNDERSCORED: ${{ matrix.underscored }}
|
||||||
DB_SCHEMA: ${{ matrix.schema }}
|
DB_SCHEMA: ${{ matrix.schema }}
|
||||||
COLLECTION_MANAGER_SCHEMA: ${{ matrix.collection_schema }}
|
COLLECTION_MANAGER_SCHEMA: ${{ matrix.collection_schema }}
|
||||||
|
DB_TEST_DISTRIBUTOR_PORT: 23450
|
||||||
|
DB_TEST_PREFIX: test_
|
||||||
timeout-minutes: 40
|
timeout-minutes: 40
|
||||||
|
|
||||||
mysql-test:
|
mysql-test:
|
||||||
@ -118,7 +125,11 @@ jobs:
|
|||||||
- run: yarn install
|
- run: yarn install
|
||||||
# - run: yarn build
|
# - run: yarn build
|
||||||
- name: Test with MySQL
|
- name: Test with MySQL
|
||||||
run: yarn nocobase install -f && node --max_old_space_size=4096 ./node_modules/.bin/jest --maxWorkers=1 --workerIdleMemoryLimit=3000MB
|
run: |
|
||||||
|
yarn nocobase install -f
|
||||||
|
./node_modules/.bin/tsx packages/core/test/src/scripts/test-db-creator.ts &
|
||||||
|
sleep 1
|
||||||
|
node --max_old_space_size=4096 ./node_modules/.bin/jest --maxWorkers=100% --workerIdleMemoryLimit=3000MB
|
||||||
env:
|
env:
|
||||||
LOGGER_LEVEL: error
|
LOGGER_LEVEL: error
|
||||||
DB_DIALECT: mysql
|
DB_DIALECT: mysql
|
||||||
@ -128,4 +139,6 @@ jobs:
|
|||||||
DB_PASSWORD: password
|
DB_PASSWORD: password
|
||||||
DB_DATABASE: nocobase
|
DB_DATABASE: nocobase
|
||||||
DB_UNDERSCORED: ${{ matrix.underscored }}
|
DB_UNDERSCORED: ${{ matrix.underscored }}
|
||||||
|
DB_TEST_DISTRIBUTOR_PORT: 23450
|
||||||
|
DB_TEST_PREFIX: test_
|
||||||
timeout-minutes: 40
|
timeout-minutes: 40
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -35,3 +35,4 @@ storage/logs-e2e
|
|||||||
storage/uploads-e2e
|
storage/uploads-e2e
|
||||||
tsconfig.paths.json
|
tsconfig.paths.json
|
||||||
playwright
|
playwright
|
||||||
|
.swc
|
@ -1,8 +1,20 @@
|
|||||||
const { pathsToModuleNameMapper } = require('ts-jest');
|
const { pathsToModuleNameMapper } = require('ts-jest');
|
||||||
const { compilerOptions } = require('./tsconfig.paths.json');
|
const { compilerOptions } = require('./tsconfig.paths.json');
|
||||||
const { defaults } = require('jest-config');
|
|
||||||
|
|
||||||
module.exports = {
|
const swcrc = {
|
||||||
|
jsc: {
|
||||||
|
parser: {
|
||||||
|
syntax: 'typescript',
|
||||||
|
tsx: false,
|
||||||
|
decorators: true,
|
||||||
|
dynamicImport: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
((swcrc.jsc ??= {}).experimental ??= {}).plugins = [['jest_workaround', {}]];
|
||||||
|
|
||||||
|
const config = {
|
||||||
rootDir: process.cwd(),
|
rootDir: process.cwd(),
|
||||||
collectCoverage: false,
|
collectCoverage: false,
|
||||||
verbose: true,
|
verbose: true,
|
||||||
@ -16,14 +28,7 @@ module.exports = {
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.{ts|tsx}?$': [
|
'^.+\\.(t|j)sx?$': ['@swc/jest', swcrc],
|
||||||
'ts-jest',
|
|
||||||
{
|
|
||||||
babelConfig: false,
|
|
||||||
tsconfig: './tsconfig.jest.json',
|
|
||||||
diagnostics: false,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
modulePathIgnorePatterns: ['/esm/', '/es/', '/dist/', '/lib/', '/client/', '/sdk/', '\\.test\\.tsx$'],
|
modulePathIgnorePatterns: ['/esm/', '/es/', '/dist/', '/lib/', '/client/', '/sdk/', '\\.test\\.tsx$'],
|
||||||
coveragePathIgnorePatterns: [
|
coveragePathIgnorePatterns: [
|
||||||
@ -37,3 +42,5 @@ module.exports = {
|
|||||||
'/storage/',
|
'/storage/',
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
module.exports = config;
|
||||||
|
@ -1,13 +1,3 @@
|
|||||||
import prettyFormat from 'pretty-format';
|
import { jest } from '@jest/globals';
|
||||||
|
|
||||||
global['prettyFormat'] = prettyFormat;
|
jest.setTimeout(100000);
|
||||||
|
|
||||||
jest.setTimeout(300000);
|
|
||||||
|
|
||||||
// 把 console.error 转换成 error,方便断言
|
|
||||||
(() => {
|
|
||||||
const spy = jest.spyOn(console, 'error');
|
|
||||||
afterAll(() => {
|
|
||||||
spy.mockRestore();
|
|
||||||
});
|
|
||||||
})();
|
|
||||||
|
@ -72,6 +72,8 @@
|
|||||||
"@commitlint/prompt-cli": "^16.1.0",
|
"@commitlint/prompt-cli": "^16.1.0",
|
||||||
"@faker-js/faker": "8.1.0",
|
"@faker-js/faker": "8.1.0",
|
||||||
"@playwright/test": "^1.37.1",
|
"@playwright/test": "^1.37.1",
|
||||||
|
"@swc/core": "^1.3.92",
|
||||||
|
"@swc/jest": "^0.2.29",
|
||||||
"@testing-library/jest-dom": "^5.17.0",
|
"@testing-library/jest-dom": "^5.17.0",
|
||||||
"@testing-library/react": "^14.0.0",
|
"@testing-library/react": "^14.0.0",
|
||||||
"@testing-library/user-event": "^14.4.3",
|
"@testing-library/user-event": "^14.4.3",
|
||||||
@ -79,6 +81,7 @@
|
|||||||
"@types/react-dom": "^17.0.0",
|
"@types/react-dom": "^17.0.0",
|
||||||
"@vitejs/plugin-react": "^4.0.0",
|
"@vitejs/plugin-react": "^4.0.0",
|
||||||
"auto-changelog": "^2.4.0",
|
"auto-changelog": "^2.4.0",
|
||||||
|
"axios": "^0.26.1",
|
||||||
"commander": "^9.2.0",
|
"commander": "^9.2.0",
|
||||||
"dumi": "^2.2.0",
|
"dumi": "^2.2.0",
|
||||||
"dumi-theme-nocobase": "^0.2.14",
|
"dumi-theme-nocobase": "^0.2.14",
|
||||||
@ -88,6 +91,7 @@
|
|||||||
"ghooks": "^2.0.4",
|
"ghooks": "^2.0.4",
|
||||||
"jest": "^29.6.2",
|
"jest": "^29.6.2",
|
||||||
"jest-cli": "^29.6.2",
|
"jest-cli": "^29.6.2",
|
||||||
|
"jest_workaround": "^0.79.19",
|
||||||
"jsdom-worker": "^0.3.0",
|
"jsdom-worker": "^0.3.0",
|
||||||
"lint-staged": "^13.2.3",
|
"lint-staged": "^13.2.3",
|
||||||
"pretty-format": "^24.0.0",
|
"pretty-format": "^24.0.0",
|
||||||
@ -97,8 +101,7 @@
|
|||||||
"ts-jest": "^29.1.1",
|
"ts-jest": "^29.1.1",
|
||||||
"typescript": "5.1.3",
|
"typescript": "5.1.3",
|
||||||
"vite": "^4.4.9",
|
"vite": "^4.4.9",
|
||||||
"vitest": "^0.34.3",
|
"vitest": "^0.34.3"
|
||||||
"axios": "^0.26.1"
|
|
||||||
},
|
},
|
||||||
"volta": {
|
"volta": {
|
||||||
"node": "18.14.2",
|
"node": "18.14.2",
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import Database, { CollectionOptions, DatabaseOptions } from '@nocobase/database';
|
import Database, { CollectionOptions, DatabaseOptions, mockDatabase } from '@nocobase/database';
|
||||||
import { Handlers, ResourceOptions, Resourcer } from '@nocobase/resourcer';
|
import { Handlers, ResourceOptions, Resourcer } from '@nocobase/resourcer';
|
||||||
import merge from 'deepmerge';
|
import merge from 'deepmerge';
|
||||||
import Koa from 'koa';
|
import Koa from 'koa';
|
||||||
@ -8,38 +8,6 @@ import supertest, { SuperAgentTest } from 'supertest';
|
|||||||
import db2resource from '../../../server/src/middlewares/db2resource';
|
import db2resource from '../../../server/src/middlewares/db2resource';
|
||||||
import { uid } from '@nocobase/utils';
|
import { uid } from '@nocobase/utils';
|
||||||
|
|
||||||
export function generatePrefixByPath() {
|
|
||||||
return `mock_${uid(6)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getConfig(config = {}, options?: any): DatabaseOptions {
|
|
||||||
return merge(
|
|
||||||
{
|
|
||||||
username: process.env.DB_USER,
|
|
||||||
password: process.env.DB_PASSWORD,
|
|
||||||
database: process.env.DB_DATABASE,
|
|
||||||
host: process.env.DB_HOST,
|
|
||||||
port: process.env.DB_PORT,
|
|
||||||
dialect: process.env.DB_DIALECT,
|
|
||||||
storage: process.env.DB_STORAGE,
|
|
||||||
logging: process.env.DB_LOGGING === 'on',
|
|
||||||
sync: {
|
|
||||||
force: true,
|
|
||||||
},
|
|
||||||
hooks: {
|
|
||||||
beforeDefine(model, options) {
|
|
||||||
options.tableName = `${generatePrefixByPath()}_${options.tableName || options.name.plural}`;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
config || {},
|
|
||||||
options,
|
|
||||||
) as any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function mockDatabase(options?: DatabaseOptions): Database {
|
|
||||||
return new Database(getConfig(options));
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ActionParams {
|
interface ActionParams {
|
||||||
fields?: string[];
|
fields?: string[];
|
||||||
|
@ -21,10 +21,12 @@
|
|||||||
"qs": "^6.11.2",
|
"qs": "^6.11.2",
|
||||||
"semver": "^7.3.7",
|
"semver": "^7.3.7",
|
||||||
"sequelize": "^6.26.0",
|
"sequelize": "^6.26.0",
|
||||||
"umzug": "^3.1.1"
|
"umzug": "^3.1.1",
|
||||||
|
"nanoid": "^3.3.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/glob": "^7.2.0"
|
"@types/glob": "^7.2.0",
|
||||||
|
"node-fetch": "^2.6.7"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
@ -336,7 +336,6 @@ describe('repository find', () => {
|
|||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
db = mockDatabase();
|
db = mockDatabase();
|
||||||
|
|
||||||
User = db.collection<{ id: number; name: string }, { name: string }>({
|
User = db.collection<{ id: number; name: string }, { name: string }>({
|
||||||
name: 'users',
|
name: 'users',
|
||||||
fields: [
|
fields: [
|
||||||
|
@ -5,9 +5,7 @@ describe('view inference', function () {
|
|||||||
let db: Database;
|
let db: Database;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
db = mockDatabase({
|
db = mockDatabase();
|
||||||
tablePrefix: '',
|
|
||||||
});
|
|
||||||
await db.clean({ drop: true });
|
await db.clean({ drop: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -71,6 +71,7 @@ import { patchSequelizeQueryInterface, snakeCase } from './utils';
|
|||||||
import { BaseValueParser, registerFieldValueParsers } from './value-parsers';
|
import { BaseValueParser, registerFieldValueParsers } from './value-parsers';
|
||||||
import { ViewCollection } from './view-collection';
|
import { ViewCollection } from './view-collection';
|
||||||
import { SqlCollection } from './sql-collection/sql-collection';
|
import { SqlCollection } from './sql-collection/sql-collection';
|
||||||
|
import { nanoid } from 'nanoid';
|
||||||
|
|
||||||
export type MergeOptions = merge.Options;
|
export type MergeOptions = merge.Options;
|
||||||
|
|
||||||
@ -88,6 +89,8 @@ export interface IDatabaseOptions extends Options {
|
|||||||
migrator?: any;
|
migrator?: any;
|
||||||
usingBigIntForId?: boolean;
|
usingBigIntForId?: boolean;
|
||||||
underscored?: boolean;
|
underscored?: boolean;
|
||||||
|
customHooks?: any;
|
||||||
|
instanceId?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type DatabaseOptions = IDatabaseOptions;
|
export type DatabaseOptions = IDatabaseOptions;
|
||||||
@ -172,9 +175,11 @@ export class Database extends EventEmitter implements AsyncEmitter {
|
|||||||
pendingFields = new Map<string, RelationField[]>();
|
pendingFields = new Map<string, RelationField[]>();
|
||||||
modelCollection = new Map<ModelStatic<any>, Collection>();
|
modelCollection = new Map<ModelStatic<any>, Collection>();
|
||||||
tableNameCollectionMap = new Map<string, Collection>();
|
tableNameCollectionMap = new Map<string, Collection>();
|
||||||
|
context: any = {};
|
||||||
queryInterface: QueryInterface;
|
queryInterface: QueryInterface;
|
||||||
|
|
||||||
|
_instanceId: string;
|
||||||
|
|
||||||
utils = new DatabaseUtils(this);
|
utils = new DatabaseUtils(this);
|
||||||
referenceMap = new ReferencesMap();
|
referenceMap = new ReferencesMap();
|
||||||
inheritanceMap = new InheritanceMap();
|
inheritanceMap = new InheritanceMap();
|
||||||
@ -206,6 +211,12 @@ export class Database extends EventEmitter implements AsyncEmitter {
|
|||||||
...lodash.clone(options),
|
...lodash.clone(options),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!options.instanceId) {
|
||||||
|
this._instanceId = nanoid();
|
||||||
|
} else {
|
||||||
|
this._instanceId = options.instanceId;
|
||||||
|
}
|
||||||
|
|
||||||
if (options.storage && options.storage !== ':memory:') {
|
if (options.storage && options.storage !== ':memory:') {
|
||||||
if (!isAbsolute(options.storage)) {
|
if (!isAbsolute(options.storage)) {
|
||||||
opts.storage = resolve(process.cwd(), options.storage);
|
opts.storage = resolve(process.cwd(), options.storage);
|
||||||
@ -224,7 +235,8 @@ export class Database extends EventEmitter implements AsyncEmitter {
|
|||||||
}
|
}
|
||||||
this.options = opts;
|
this.options = opts;
|
||||||
|
|
||||||
this.sequelize = new Sequelize(this.sequelizeOptions(this.options));
|
const sequelizeOptions = this.sequelizeOptions(this.options);
|
||||||
|
this.sequelize = new Sequelize(sequelizeOptions);
|
||||||
|
|
||||||
this.queryInterface = buildQueryInterface(this);
|
this.queryInterface = buildQueryInterface(this);
|
||||||
|
|
||||||
@ -297,17 +309,31 @@ export class Database extends EventEmitter implements AsyncEmitter {
|
|||||||
patchSequelizeQueryInterface(this);
|
patchSequelizeQueryInterface(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get instanceId() {
|
||||||
|
return this._instanceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
setContext(context: any) {
|
||||||
|
this.context = context;
|
||||||
|
}
|
||||||
|
|
||||||
setLogger(logger: Logger) {
|
setLogger(logger: Logger) {
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
sequelizeOptions(options) {
|
sequelizeOptions(options) {
|
||||||
if (options.dialect === 'postgres') {
|
if (options.dialect === 'postgres') {
|
||||||
options.hooks = {
|
if (!options.hooks) {
|
||||||
afterConnect: async (connection) => {
|
options.hooks = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.hooks['afterConnect']) {
|
||||||
|
options.hooks['afterConnect'] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
options.hooks['afterConnect'].push(async (connection) => {
|
||||||
await connection.query('SET search_path TO public;');
|
await connection.query('SET search_path TO public;');
|
||||||
},
|
});
|
||||||
};
|
|
||||||
}
|
}
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
@ -769,7 +795,15 @@ export class Database extends EventEmitter implements AsyncEmitter {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.sequelize.close();
|
await this.emitAsync('beforeClose', this);
|
||||||
|
|
||||||
|
const closeResult = this.sequelize.close();
|
||||||
|
|
||||||
|
if (this.options?.customHooks?.['afterClose']) {
|
||||||
|
await this.options.customHooks['afterClose'](this);
|
||||||
|
}
|
||||||
|
|
||||||
|
return closeResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
on(event: EventType, listener: any): this;
|
on(event: EventType, listener: any): this;
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
import { merge, uid } from '@nocobase/utils';
|
import { merge, uid } from '@nocobase/utils';
|
||||||
import { resolve } from 'path';
|
import { resolve } from 'path';
|
||||||
import { Database, IDatabaseOptions } from './database';
|
import { Database, IDatabaseOptions } from './database';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import path from 'path';
|
||||||
|
import { customAlphabet } from 'nanoid';
|
||||||
export class MockDatabase extends Database {
|
export class MockDatabase extends Database {
|
||||||
constructor(options: IDatabaseOptions) {
|
constructor(options: IDatabaseOptions) {
|
||||||
super({
|
super({
|
||||||
storage: ':memory:',
|
storage: ':memory:',
|
||||||
tablePrefix: `mock_${uid(6)}_`,
|
|
||||||
dialect: 'sqlite',
|
dialect: 'sqlite',
|
||||||
...options,
|
...options,
|
||||||
});
|
});
|
||||||
@ -52,5 +53,47 @@ function customLogger(queryString, queryObject) {
|
|||||||
|
|
||||||
export function mockDatabase(options: IDatabaseOptions = {}): MockDatabase {
|
export function mockDatabase(options: IDatabaseOptions = {}): MockDatabase {
|
||||||
const dbOptions = merge(getConfigByEnv(), options) as any;
|
const dbOptions = merge(getConfigByEnv(), options) as any;
|
||||||
return new MockDatabase(dbOptions);
|
|
||||||
|
if (process.env['DB_TEST_PREFIX']) {
|
||||||
|
let configKey = 'database';
|
||||||
|
if (dbOptions.dialect === 'sqlite') {
|
||||||
|
configKey = 'storage';
|
||||||
|
} else {
|
||||||
|
configKey = 'database';
|
||||||
|
}
|
||||||
|
|
||||||
|
const shouldChange = () => {
|
||||||
|
if (dbOptions.dialect === 'sqlite') {
|
||||||
|
return !dbOptions[configKey].includes(process.env['DB_TEST_PREFIX']);
|
||||||
|
}
|
||||||
|
|
||||||
|
return !dbOptions[configKey].startsWith(process.env['DB_TEST_PREFIX']);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (dbOptions[configKey] && shouldChange()) {
|
||||||
|
const nanoid = customAlphabet('1234567890abcdefghijklmnopqrstuvwxyz', 10);
|
||||||
|
|
||||||
|
const instanceId = `d_${nanoid()}`;
|
||||||
|
const databaseName = `${process.env['DB_TEST_PREFIX']}_${instanceId}`;
|
||||||
|
|
||||||
|
if (dbOptions.dialect === 'sqlite') {
|
||||||
|
dbOptions.storage = path.resolve(path.dirname(dbOptions.storage), databaseName);
|
||||||
|
} else {
|
||||||
|
dbOptions.database = databaseName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env['DB_TEST_DISTRIBUTOR_PORT']) {
|
||||||
|
dbOptions.hooks = dbOptions.hooks || {};
|
||||||
|
|
||||||
|
dbOptions.hooks.beforeConnect = async (config) => {
|
||||||
|
const url = `http://127.0.0.1:${process.env['DB_TEST_DISTRIBUTOR_PORT']}/acquire?via=${db.instanceId}&name=${config.database}`;
|
||||||
|
await fetch(url);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = new MockDatabase(dbOptions);
|
||||||
|
|
||||||
|
return db;
|
||||||
}
|
}
|
||||||
|
@ -178,8 +178,6 @@ export class AppSupervisor extends EventEmitter implements AsyncEmitter {
|
|||||||
throw new Error(`app ${app.name} already exists`);
|
throw new Error(`app ${app.name} already exists`);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`add app ${app.name} into supervisor`);
|
|
||||||
|
|
||||||
this.bindAppEvents(app);
|
this.bindAppEvents(app);
|
||||||
|
|
||||||
this.apps[app.name] = app;
|
this.apps[app.name] = app;
|
||||||
|
@ -749,7 +749,7 @@ export class Application<StateT = DefaultState, ContextT = DefaultContext> exten
|
|||||||
this._version = new ApplicationVersion(this);
|
this._version = new ApplicationVersion(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
private createDatabase(options: ApplicationOptions) {
|
protected createDatabase(options: ApplicationOptions) {
|
||||||
const db = new Database({
|
const db = new Database({
|
||||||
...(options.database instanceof Database ? options.database.options : options.database),
|
...(options.database instanceof Database ? options.database.options : options.database),
|
||||||
migrator: {
|
migrator: {
|
||||||
|
@ -81,6 +81,17 @@ export class MockServer extends Application {
|
|||||||
await this.db.clean({ drop: true });
|
await this.db.clean({ drop: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createDatabase(options: ApplicationOptions) {
|
||||||
|
const oldDatabase = this._db;
|
||||||
|
|
||||||
|
const databaseOptions = oldDatabase ? oldDatabase.options : <any>options?.database || {};
|
||||||
|
const database = mockDatabase(databaseOptions);
|
||||||
|
database.setLogger(this._logger);
|
||||||
|
database.setContext({ app: this });
|
||||||
|
|
||||||
|
return database;
|
||||||
|
}
|
||||||
|
|
||||||
async destroy(options: any = {}): Promise<void> {
|
async destroy(options: any = {}): Promise<void> {
|
||||||
await super.destroy(options);
|
await super.destroy(options);
|
||||||
|
|
||||||
@ -195,17 +206,9 @@ export function mockServer(options: ApplicationOptions = {}) {
|
|||||||
PluginManager.findPackagePatched = true;
|
PluginManager.findPackagePatched = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
let database;
|
|
||||||
if (options?.database instanceof Database) {
|
|
||||||
database = options.database;
|
|
||||||
} else {
|
|
||||||
database = mockDatabase(<any>options?.database || {});
|
|
||||||
}
|
|
||||||
|
|
||||||
const app = new MockServer({
|
const app = new MockServer({
|
||||||
acl: false,
|
acl: false,
|
||||||
...options,
|
...options,
|
||||||
database,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return app;
|
return app;
|
||||||
|
150
packages/core/test/src/scripts/test-db-creator.ts
Normal file
150
packages/core/test/src/scripts/test-db-creator.ts
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
import http from 'http';
|
||||||
|
import url from 'url';
|
||||||
|
import pg from 'pg';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import path from 'path';
|
||||||
|
import mysql from 'mysql2/promise';
|
||||||
|
|
||||||
|
dotenv.config({ path: path.resolve(process.cwd(), '.env.test') });
|
||||||
|
|
||||||
|
abstract class BaseClient<Client> {
|
||||||
|
private createdDBs: Set<string> = new Set();
|
||||||
|
protected _client: Client | null = null;
|
||||||
|
|
||||||
|
abstract _createDB(name: string): Promise<void>;
|
||||||
|
abstract _createConnection(): Promise<Client>;
|
||||||
|
abstract _removeDB(name: string): Promise<void>;
|
||||||
|
|
||||||
|
async createDB(name: string) {
|
||||||
|
if (this.createdDBs.has(name)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this._client) {
|
||||||
|
this._client = await this._createConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Creating database: ${name}`);
|
||||||
|
await this._createDB(name);
|
||||||
|
this.createdDBs.add(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
async releaseAll() {
|
||||||
|
if (!this._client) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dbNames = Array.from(this.createdDBs);
|
||||||
|
|
||||||
|
for (const name of dbNames) {
|
||||||
|
console.log(`Removing database: ${name}`);
|
||||||
|
await this._removeDB(name);
|
||||||
|
this.createdDBs.delete(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PostgresClient extends BaseClient<typeof pg.Client> {
|
||||||
|
async _removeDB(name: string): Promise<void> {
|
||||||
|
await this._client.query(`DROP DATABASE IF EXISTS ${name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _createDB(name: string): Promise<void> {
|
||||||
|
await this._client.query(`DROP DATABASE IF EXISTS ${name}`);
|
||||||
|
await this._client.query(`CREATE DATABASE ${name};`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _createConnection(): Promise<typeof pg.Client> {
|
||||||
|
const client = new pg.Client({
|
||||||
|
host: process.env['DB_HOST'],
|
||||||
|
port: Number(process.env['DB_PORT']),
|
||||||
|
user: process.env['DB_USER'],
|
||||||
|
password: process.env['DB_PASSWORD'],
|
||||||
|
database: process.env['DB_DATABASE'],
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.connect();
|
||||||
|
return client;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class MySQLClient extends BaseClient<any> {
|
||||||
|
async _removeDB(name: string): Promise<void> {
|
||||||
|
await this._client.query(`DROP DATABASE IF EXISTS ${name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _createDB(name: string): Promise<void> {
|
||||||
|
await this._client.query(`CREATE DATABASE IF NOT EXISTS ${name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _createConnection(): Promise<mysql.Connection> {
|
||||||
|
const connection = await mysql.createConnection({
|
||||||
|
host: process.env['DB_HOST'],
|
||||||
|
port: Number(process.env['DB_PORT']),
|
||||||
|
user: process.env['DB_USER'],
|
||||||
|
password: process.env['DB_PASSWORD'],
|
||||||
|
database: process.env['DB_DATABASE'],
|
||||||
|
});
|
||||||
|
|
||||||
|
return connection;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = {
|
||||||
|
postgres: () => {
|
||||||
|
return new PostgresClient();
|
||||||
|
},
|
||||||
|
mysql: () => {
|
||||||
|
return new MySQLClient();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const dialect = process.env['DB_DIALECT'];
|
||||||
|
|
||||||
|
if (!client[dialect]) {
|
||||||
|
throw new Error(`Unknown dialect: ${dialect}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dbClient = client[dialect]();
|
||||||
|
|
||||||
|
const server = http.createServer((req, res) => {
|
||||||
|
const parsedUrl = url.parse(req.url, true);
|
||||||
|
const path = parsedUrl.pathname;
|
||||||
|
const trimmedPath = path.replace(/^\/+|\/+$/g, '');
|
||||||
|
|
||||||
|
if (trimmedPath === 'acquire') {
|
||||||
|
const via = parsedUrl.query.via as string;
|
||||||
|
const name = parsedUrl.query.name as string | undefined;
|
||||||
|
|
||||||
|
dbClient
|
||||||
|
.createDB(name)
|
||||||
|
.then(() => {
|
||||||
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
|
res.end();
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error(error);
|
||||||
|
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ error }));
|
||||||
|
});
|
||||||
|
} else if (trimmedPath === 'release') {
|
||||||
|
dbClient
|
||||||
|
.releaseAll()
|
||||||
|
.then(() => {
|
||||||
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
|
res.end();
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error(error);
|
||||||
|
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ error }));
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.writeHead(404, { 'Content-Type': 'text/plain' });
|
||||||
|
res.end('Not Found\n');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen(23450, '127.0.0.1', () => {
|
||||||
|
console.log('Server is running at http://127.0.0.1:23450/');
|
||||||
|
});
|
259
packages/core/test/src/scripts/test-db-distributor.ts
Normal file
259
packages/core/test/src/scripts/test-db-distributor.ts
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
import http from 'http';
|
||||||
|
import url from 'url';
|
||||||
|
import pg from 'pg';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import path from 'path';
|
||||||
|
import fs from 'fs';
|
||||||
|
|
||||||
|
dotenv.config({ path: path.resolve(process.cwd(), '.env.test') });
|
||||||
|
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
|
||||||
|
class DBManager {
|
||||||
|
private acquiredDBs: Map<string, Set<string>> = new Map();
|
||||||
|
|
||||||
|
acquire(name: string, via: string) {
|
||||||
|
console.log('acquire', name, 'via', via);
|
||||||
|
if (this.acquiredDBs.has(name)) {
|
||||||
|
// If DB is already acquired, add the via to the set
|
||||||
|
this.acquiredDBs.get(name)!.add(via);
|
||||||
|
} else {
|
||||||
|
// If DB is not acquired yet, set the set with the via
|
||||||
|
this.acquiredDBs.set(name, new Set([via]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async release(name: string, via: string, relaseDb?: () => Promise<void>) {
|
||||||
|
console.log('release', name, 'via', via);
|
||||||
|
const vias = this.acquiredDBs.get(name);
|
||||||
|
if (!vias || !vias.has(via)) {
|
||||||
|
console.log(`Cannot release ${name}, it is not acquired via ${via}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove the via from the set
|
||||||
|
vias.delete(via);
|
||||||
|
|
||||||
|
// If no more vias, remove the DB from the map
|
||||||
|
if (vias.size === 0) {
|
||||||
|
console.log('DB', name, 'is not used anymore, release it');
|
||||||
|
// delay 1000ms to make sure the DB is not used anymore
|
||||||
|
await delay(1000);
|
||||||
|
|
||||||
|
if (this.acquiredDBs.get(name)?.size === 0) {
|
||||||
|
console.log('start to release DB', name);
|
||||||
|
await relaseDb?.();
|
||||||
|
this.acquiredDBs.delete(name);
|
||||||
|
console.log('DB', name, 'is released, current usesd db count:', this.acquiredDBs.size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
isAcquired(name: string): boolean {
|
||||||
|
return this.acquiredDBs.has(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getDBNames = (size: number, name: string) => {
|
||||||
|
const names = [];
|
||||||
|
for (let i = 0; i < size; i++) {
|
||||||
|
names.push(`auto_named_${name}_${i}`);
|
||||||
|
}
|
||||||
|
return names;
|
||||||
|
};
|
||||||
|
|
||||||
|
abstract class BasePool {
|
||||||
|
dbManager: DBManager = new DBManager();
|
||||||
|
constructor(protected size: number) {}
|
||||||
|
|
||||||
|
abstract createDatabase(name: string, options?: any): Promise<void>;
|
||||||
|
abstract cleanDatabase(name: string): Promise<void>;
|
||||||
|
|
||||||
|
abstract getConfiguredDatabaseName(): string;
|
||||||
|
abstract getDatabaseConfiguration(): any;
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
const promises = [];
|
||||||
|
for (const name of getDBNames(this.size, this.getConfiguredDatabaseName())) {
|
||||||
|
promises.push(
|
||||||
|
(async () => {
|
||||||
|
console.log('create database', name);
|
||||||
|
await this.createDatabase(name);
|
||||||
|
})(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(promises);
|
||||||
|
}
|
||||||
|
|
||||||
|
async acquire(name: string | undefined, via: string) {
|
||||||
|
if (!name) {
|
||||||
|
name = getDBNames(this.size, this.getConfiguredDatabaseName()).find((name) => !this.dbManager.isAcquired(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!name) {
|
||||||
|
throw new Error('No available database');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dbManager.acquire(name, via);
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
async release(name: string, via: string) {
|
||||||
|
await this.dbManager.release(name, via, async () => {
|
||||||
|
await this.cleanDatabase(name);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PostgresPool extends BasePool {
|
||||||
|
private async _createConnection(options, callback) {
|
||||||
|
const config = this.getDatabaseConfiguration();
|
||||||
|
const databaseName = this.getConfiguredDatabaseName();
|
||||||
|
|
||||||
|
const client = new pg.Client({
|
||||||
|
host: config['host'],
|
||||||
|
port: config['port'],
|
||||||
|
user: config['username'],
|
||||||
|
password: config['password'],
|
||||||
|
database: databaseName,
|
||||||
|
...options,
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
await callback(client);
|
||||||
|
|
||||||
|
await client.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanDatabase(name: string): Promise<void> {
|
||||||
|
await this._createConnection({ database: name }, async (client) => {
|
||||||
|
await client.query(`DROP SCHEMA public CASCADE;CREATE SCHEMA public;`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async createDatabase(name: string, options?: any): Promise<void> {
|
||||||
|
const { log } = options || {};
|
||||||
|
|
||||||
|
await this._createConnection({}, async (client) => {
|
||||||
|
if (log) {
|
||||||
|
console.log(`DROP DATABASE IF EXISTS ${name}`);
|
||||||
|
}
|
||||||
|
await client.query(`DROP DATABASE IF EXISTS ${name}`);
|
||||||
|
if (log) {
|
||||||
|
console.log(`CREATE DATABASE ${name}`);
|
||||||
|
}
|
||||||
|
await client.query(`CREATE DATABASE ${name}`);
|
||||||
|
|
||||||
|
if (log) {
|
||||||
|
console.log(`end`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getDatabaseConfiguration() {
|
||||||
|
return {
|
||||||
|
host: process.env.DB_HOST,
|
||||||
|
port: process.env.DB_PORT,
|
||||||
|
username: process.env.DB_USERNAME,
|
||||||
|
password: process.env.DB_PASSWORD,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getConfiguredDatabaseName() {
|
||||||
|
return process.env.DB_DATABASE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SqlitePool extends BasePool {
|
||||||
|
async createDatabase(name: string, options?: any): Promise<void> {
|
||||||
|
return fs.promises.writeFile(path.resolve(this.getStoragePath(), name), '');
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanDatabase(name: string): Promise<void> {
|
||||||
|
return fs.promises.unlink(path.resolve(this.getStoragePath(), name));
|
||||||
|
}
|
||||||
|
|
||||||
|
getDatabaseConfiguration(): any {
|
||||||
|
return {
|
||||||
|
storage: process.env.DB_STORAGE,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getConfiguredDatabaseName() {
|
||||||
|
const storagePath = process.env.DB_STORAGE;
|
||||||
|
if (storagePath && storagePath !== ':memory:') {
|
||||||
|
return path.basename(storagePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getStoragePath() {
|
||||||
|
const storagePath = process.env.DB_STORAGE;
|
||||||
|
if (storagePath && storagePath !== ':memory:') {
|
||||||
|
// return path without file name
|
||||||
|
return path.dirname(storagePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const pools = {
|
||||||
|
postgres: PostgresPool,
|
||||||
|
sqlite: SqlitePool,
|
||||||
|
};
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
const poolSize = process.env.TEST_DB_POOL_SIZE || 100;
|
||||||
|
const poolClass = pools[process.env.DB_DIALECT];
|
||||||
|
|
||||||
|
if (!poolClass) {
|
||||||
|
throw new Error(`Unknown pool class ${process.env.DB_DIALECT}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pool = new poolClass(poolSize);
|
||||||
|
await pool.init();
|
||||||
|
|
||||||
|
return pool;
|
||||||
|
})()
|
||||||
|
.then((pool: BasePool) => {
|
||||||
|
const server = http.createServer((req, res) => {
|
||||||
|
const parsedUrl = url.parse(req.url, true);
|
||||||
|
const path = parsedUrl.pathname;
|
||||||
|
const trimmedPath = path.replace(/^\/+|\/+$/g, '');
|
||||||
|
|
||||||
|
if (trimmedPath === 'acquire') {
|
||||||
|
const via = parsedUrl.query.via as string;
|
||||||
|
const name = parsedUrl.query.name as string | undefined;
|
||||||
|
pool
|
||||||
|
.acquire(name, via)
|
||||||
|
.then((name) => {
|
||||||
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ name }));
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ error: err.message }));
|
||||||
|
});
|
||||||
|
} else if (trimmedPath === 'release') {
|
||||||
|
const via = parsedUrl.query.via as string;
|
||||||
|
const name = parsedUrl.query.name as string;
|
||||||
|
|
||||||
|
pool.release(name, via);
|
||||||
|
|
||||||
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
|
res.end();
|
||||||
|
} else {
|
||||||
|
res.writeHead(404, { 'Content-Type': 'text/plain' });
|
||||||
|
res.end('Not Found\n');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen(23450, '127.0.0.1', () => {
|
||||||
|
console.log('Server is running at http://127.0.0.1:23450/');
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
@ -119,8 +119,11 @@ describe('collections repository', () => {
|
|||||||
const app2 = mockServer({
|
const app2 = mockServer({
|
||||||
database: {
|
database: {
|
||||||
tablePrefix: 'through_',
|
tablePrefix: 'through_',
|
||||||
|
database: app1.db.options.database,
|
||||||
|
storage: app1.db.options.storage,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
app2.plugin(PluginErrorHandler, { name: 'error-handler' });
|
app2.plugin(PluginErrorHandler, { name: 'error-handler' });
|
||||||
app2.plugin(Plugin, { name: 'collection-manager' });
|
app2.plugin(Plugin, { name: 'collection-manager' });
|
||||||
await app2.load();
|
await app2.load();
|
||||||
|
@ -18,9 +18,7 @@ export class CollectionRepository extends Repository {
|
|||||||
|
|
||||||
async load(options: LoadOptions = {}) {
|
async load(options: LoadOptions = {}) {
|
||||||
const { filter, skipExist } = options;
|
const { filter, skipExist } = options;
|
||||||
console.log('start load collections');
|
|
||||||
const instances = (await this.find({ filter, appends: ['fields'] })) as CollectionModel[];
|
const instances = (await this.find({ filter, appends: ['fields'] })) as CollectionModel[];
|
||||||
console.log('end load collections');
|
|
||||||
|
|
||||||
const graphlib = CollectionsGraph.graphlib();
|
const graphlib = CollectionsGraph.graphlib();
|
||||||
|
|
||||||
@ -111,8 +109,6 @@ export class CollectionRepository extends Repository {
|
|||||||
this.app.setMaintainingMessage(`load ${collectionName} collection fields`);
|
this.app.setMaintainingMessage(`load ${collectionName} collection fields`);
|
||||||
await nameMap[collectionName].loadFields({ includeFields: skipField });
|
await nameMap[collectionName].loadFields({ includeFields: skipField });
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('finished load collection');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async db2cm(collectionName: string) {
|
async db2cm(collectionName: string) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { MockServer, mockServer } from '@nocobase/test';
|
import { MockServer, mockServer } from '@nocobase/test';
|
||||||
import * as formatter from '../actions/formatter';
|
const formatter = require('../actions/formatter');
|
||||||
import { cacheMiddleware, parseBuilder, parseFieldAndAssociations } from '../actions/query';
|
import { cacheMiddleware, parseBuilder, parseFieldAndAssociations } from '../actions/query';
|
||||||
import compose from 'koa-compose';
|
import compose from 'koa-compose';
|
||||||
|
|
||||||
@ -128,6 +128,7 @@ describe('query', () => {
|
|||||||
|
|
||||||
it('should parse dimensions', async () => {
|
it('should parse dimensions', async () => {
|
||||||
jest.spyOn(formatter, 'formatter').mockReturnValue('formatted-field');
|
jest.spyOn(formatter, 'formatter').mockReturnValue('formatted-field');
|
||||||
|
|
||||||
const dimensions = [
|
const dimensions = [
|
||||||
{
|
{
|
||||||
field: ['createdAt'],
|
field: ['createdAt'],
|
||||||
|
@ -123,11 +123,14 @@ export class PluginMultiAppManager extends Plugin {
|
|||||||
private beforeGetApplicationMutex = new Mutex();
|
private beforeGetApplicationMutex = new Mutex();
|
||||||
|
|
||||||
static getDatabaseConfig(app: Application): IDatabaseOptions {
|
static getDatabaseConfig(app: Application): IDatabaseOptions {
|
||||||
const oldConfig =
|
let oldConfig =
|
||||||
app.options.database instanceof Database
|
app.options.database instanceof Database
|
||||||
? (app.options.database as Database).options
|
? (app.options.database as Database).options
|
||||||
: (app.options.database as IDatabaseOptions);
|
: (app.options.database as IDatabaseOptions);
|
||||||
|
|
||||||
|
if (!oldConfig && app.db) {
|
||||||
|
oldConfig = app.db.options;
|
||||||
|
}
|
||||||
return lodash.cloneDeep(lodash.omit(oldConfig, ['migrator']));
|
return lodash.cloneDeep(lodash.omit(oldConfig, ['migrator']));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,9 +21,6 @@ describe('workflow > actions > workflows', () => {
|
|||||||
|
|
||||||
afterEach(() => app.destroy());
|
afterEach(() => app.destroy());
|
||||||
|
|
||||||
// describe('create', () => {
|
|
||||||
// });
|
|
||||||
|
|
||||||
describe('update', () => {
|
describe('update', () => {
|
||||||
it('update unexecuted workflow should be ok', async () => {
|
it('update unexecuted workflow should be ok', async () => {
|
||||||
const workflow = await WorkflowModel.create({
|
const workflow = await WorkflowModel.create({
|
||||||
|
@ -85,6 +85,8 @@ describe('workflow > triggers > collection', () => {
|
|||||||
plugins: ['error-handler', 'collection-manager'],
|
plugins: ['error-handler', 'collection-manager'],
|
||||||
database: {
|
database: {
|
||||||
tablePrefix: db.options.tablePrefix,
|
tablePrefix: db.options.tablePrefix,
|
||||||
|
storage: db.options.storage,
|
||||||
|
database: db.options.database,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
101
yarn.lock
101
yarn.lock
@ -3268,6 +3268,13 @@
|
|||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
strip-ansi "^6.0.0"
|
strip-ansi "^6.0.0"
|
||||||
|
|
||||||
|
"@jest/create-cache-key-function@^27.4.2":
|
||||||
|
version "27.5.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@jest/create-cache-key-function/-/create-cache-key-function-27.5.1.tgz#7448fae15602ea95c828f5eceed35c202a820b31"
|
||||||
|
integrity sha512-dmH1yW+makpTSURTy8VzdUwFnfQh1G8R+DxO2Ho2FFmBbKFEVm+3jWdvFhE2VqB/LATCTokkP0dotjyQyw5/AQ==
|
||||||
|
dependencies:
|
||||||
|
"@jest/types" "^27.5.1"
|
||||||
|
|
||||||
"@jest/environment@^29.6.1":
|
"@jest/environment@^29.6.1":
|
||||||
version "29.6.1"
|
version "29.6.1"
|
||||||
resolved "https://registry.npmmirror.com/@jest/environment/-/environment-29.6.1.tgz#ee358fff2f68168394b4a50f18c68278a21fe82f"
|
resolved "https://registry.npmmirror.com/@jest/environment/-/environment-29.6.1.tgz#ee358fff2f68168394b4a50f18c68278a21fe82f"
|
||||||
@ -3532,7 +3539,7 @@
|
|||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
write-file-atomic "^4.0.2"
|
write-file-atomic "^4.0.2"
|
||||||
|
|
||||||
"@jest/types@27.5.1":
|
"@jest/types@27.5.1", "@jest/types@^27.5.1":
|
||||||
version "27.5.1"
|
version "27.5.1"
|
||||||
resolved "https://registry.npmmirror.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80"
|
resolved "https://registry.npmmirror.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80"
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -5255,42 +5262,92 @@
|
|||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.57.tgz#6af39458bf4ae58c8e4d12be2f2d69563b5a932a"
|
resolved "https://registry.npmmirror.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.57.tgz#6af39458bf4ae58c8e4d12be2f2d69563b5a932a"
|
||||||
|
|
||||||
|
"@swc/core-darwin-arm64@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.92.tgz#0498d3584cf877e39107c94705c38fa4a8c04789"
|
||||||
|
integrity sha512-v7PqZUBtIF6Q5Cp48gqUiG8zQQnEICpnfNdoiY3xjQAglCGIQCjJIDjreZBoeZQZspB27lQN4eZ43CX18+2SnA==
|
||||||
|
|
||||||
"@swc/core-darwin-x64@1.3.57":
|
"@swc/core-darwin-x64@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.57.tgz#e7133f88e1aa166ad7e5966302df495551c3505c"
|
resolved "https://registry.npmmirror.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.57.tgz#e7133f88e1aa166ad7e5966302df495551c3505c"
|
||||||
|
|
||||||
|
"@swc/core-darwin-x64@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.92.tgz#1728e7ebbfe37b56c07d99e29dde78bfa90cf8d1"
|
||||||
|
integrity sha512-Q3XIgQfXyxxxms3bPN+xGgvwk0TtG9l89IomApu+yTKzaIIlf051mS+lGngjnh9L0aUiCp6ICyjDLtutWP54fw==
|
||||||
|
|
||||||
"@swc/core-linux-arm-gnueabihf@1.3.57":
|
"@swc/core-linux-arm-gnueabihf@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.57.tgz#f8d95c8a9e03d1cccbb85d214fe24ca95940852a"
|
resolved "https://registry.npmmirror.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.57.tgz#f8d95c8a9e03d1cccbb85d214fe24ca95940852a"
|
||||||
|
|
||||||
|
"@swc/core-linux-arm-gnueabihf@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.92.tgz#6f7c20833b739f8911c936c9783976ded2c449dc"
|
||||||
|
integrity sha512-tnOCoCpNVXC+0FCfG84PBZJyLlz0Vfj9MQhyhCvlJz9hQmvpf8nTdKH7RHrOn8VfxtUBLdVi80dXgIFgbvl7qA==
|
||||||
|
|
||||||
"@swc/core-linux-arm64-gnu@1.3.57":
|
"@swc/core-linux-arm64-gnu@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.57.tgz#66fb53f081f43a652de79374d8b7c78b10469ef8"
|
resolved "https://registry.npmmirror.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.57.tgz#66fb53f081f43a652de79374d8b7c78b10469ef8"
|
||||||
|
|
||||||
|
"@swc/core-linux-arm64-gnu@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.92.tgz#bb01dd9b922b0c076c38924013bd10036ce39c7c"
|
||||||
|
integrity sha512-lFfGhX32w8h1j74Iyz0Wv7JByXIwX11OE9UxG+oT7lG0RyXkF4zKyxP8EoxfLrDXse4Oop434p95e3UNC3IfCw==
|
||||||
|
|
||||||
"@swc/core-linux-arm64-musl@1.3.57":
|
"@swc/core-linux-arm64-musl@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.57.tgz#28709d982d2eeb55b173c79a7d5feed1a2cc065d"
|
resolved "https://registry.npmmirror.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.57.tgz#28709d982d2eeb55b173c79a7d5feed1a2cc065d"
|
||||||
|
|
||||||
|
"@swc/core-linux-arm64-musl@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.92.tgz#0070165eed2805475c98eb732bab8bdca955932e"
|
||||||
|
integrity sha512-rOZtRcLj57MSAbiecMsqjzBcZDuaCZ8F6l6JDwGkQ7u1NYR57cqF0QDyU7RKS1Jq27Z/Vg21z5cwqoH5fLN+Sg==
|
||||||
|
|
||||||
"@swc/core-linux-x64-gnu@1.3.57":
|
"@swc/core-linux-x64-gnu@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.57.tgz#3bea8b7d115fc2bf65a7b4b6930dd878f434034f"
|
resolved "https://registry.npmmirror.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.57.tgz#3bea8b7d115fc2bf65a7b4b6930dd878f434034f"
|
||||||
|
|
||||||
|
"@swc/core-linux-x64-gnu@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.92.tgz#d9785f93b9121eeef0f54e8d845dd216698e0115"
|
||||||
|
integrity sha512-qptoMGnBL6v89x/Qpn+l1TH1Y0ed+v0qhNfAEVzZvCvzEMTFXphhlhYbDdpxbzRmCjH6GOGq7Y+xrWt9T1/ARg==
|
||||||
|
|
||||||
"@swc/core-linux-x64-musl@1.3.57":
|
"@swc/core-linux-x64-musl@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.57.tgz#40bf395ac8971d561e38ef2af54b72d3705c199e"
|
resolved "https://registry.npmmirror.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.57.tgz#40bf395ac8971d561e38ef2af54b72d3705c199e"
|
||||||
|
|
||||||
|
"@swc/core-linux-x64-musl@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.92.tgz#8fe5cf244695bf4f0bc7dc7df450a9bd1bfccc2b"
|
||||||
|
integrity sha512-g2KrJ43bZkCZHH4zsIV5ErojuV1OIpUHaEyW1gf7JWKaFBpWYVyubzFPvPkjcxHGLbMsEzO7w/NVfxtGMlFH/Q==
|
||||||
|
|
||||||
"@swc/core-win32-arm64-msvc@1.3.57":
|
"@swc/core-win32-arm64-msvc@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.57.tgz#5edf457fb86c455b6cecb89e4fa29a8dfa47910b"
|
resolved "https://registry.npmmirror.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.57.tgz#5edf457fb86c455b6cecb89e4fa29a8dfa47910b"
|
||||||
|
|
||||||
|
"@swc/core-win32-arm64-msvc@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.92.tgz#d6150785455c813a8e62f4e4b0a22773baf398eb"
|
||||||
|
integrity sha512-3MCRGPAYDoQ8Yyd3WsCMc8eFSyKXY5kQLyg/R5zEqA0uthomo0m0F5/fxAJMZGaSdYkU1DgF73ctOWOf+Z/EzQ==
|
||||||
|
|
||||||
"@swc/core-win32-ia32-msvc@1.3.57":
|
"@swc/core-win32-ia32-msvc@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.57.tgz#0d9d4b851f5380dce04094412ecae8ac538f5dc8"
|
resolved "https://registry.npmmirror.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.57.tgz#0d9d4b851f5380dce04094412ecae8ac538f5dc8"
|
||||||
|
|
||||||
|
"@swc/core-win32-ia32-msvc@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.92.tgz#8142166bceafbaa209d440b36fdc8cd4b4f82768"
|
||||||
|
integrity sha512-zqTBKQhgfWm73SVGS8FKhFYDovyRl1f5dTX1IwSKynO0qHkRCqJwauFJv/yevkpJWsI2pFh03xsRs9HncTQKSA==
|
||||||
|
|
||||||
"@swc/core-win32-x64-msvc@1.3.57":
|
"@swc/core-win32-x64-msvc@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.57.tgz#4923a71e9bd3a1bae3ab3a1c6e07a31973d45f61"
|
resolved "https://registry.npmmirror.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.57.tgz#4923a71e9bd3a1bae3ab3a1c6e07a31973d45f61"
|
||||||
|
|
||||||
|
"@swc/core-win32-x64-msvc@1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.92.tgz#4ba542875fc690b579232721ccec7873e139646a"
|
||||||
|
integrity sha512-41bE66ddr9o/Fi1FBh0sHdaKdENPTuDpv1IFHxSg0dJyM/jX8LbkjnpdInYXHBxhcLVAPraVRrNsC4SaoPw2Pg==
|
||||||
|
|
||||||
"@swc/core@1.3.57":
|
"@swc/core@1.3.57":
|
||||||
version "1.3.57"
|
version "1.3.57"
|
||||||
resolved "https://registry.npmmirror.com/@swc/core/-/core-1.3.57.tgz#a6c2c04ad72668a3144661fee27508556fff3fed"
|
resolved "https://registry.npmmirror.com/@swc/core/-/core-1.3.57.tgz#a6c2c04ad72668a3144661fee27508556fff3fed"
|
||||||
@ -5306,6 +5363,43 @@
|
|||||||
"@swc/core-win32-ia32-msvc" "1.3.57"
|
"@swc/core-win32-ia32-msvc" "1.3.57"
|
||||||
"@swc/core-win32-x64-msvc" "1.3.57"
|
"@swc/core-win32-x64-msvc" "1.3.57"
|
||||||
|
|
||||||
|
"@swc/core@^1.3.92":
|
||||||
|
version "1.3.92"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.3.92.tgz#f51808cdb6cbb90b0877b9a51806eea9a70eafca"
|
||||||
|
integrity sha512-vx0vUrf4YTEw59njOJ46Ha5i0cZTMYdRHQ7KXU29efN1MxcmJH2RajWLPlvQarOP1ab9iv9cApD7SMchDyx2vA==
|
||||||
|
dependencies:
|
||||||
|
"@swc/counter" "^0.1.1"
|
||||||
|
"@swc/types" "^0.1.5"
|
||||||
|
optionalDependencies:
|
||||||
|
"@swc/core-darwin-arm64" "1.3.92"
|
||||||
|
"@swc/core-darwin-x64" "1.3.92"
|
||||||
|
"@swc/core-linux-arm-gnueabihf" "1.3.92"
|
||||||
|
"@swc/core-linux-arm64-gnu" "1.3.92"
|
||||||
|
"@swc/core-linux-arm64-musl" "1.3.92"
|
||||||
|
"@swc/core-linux-x64-gnu" "1.3.92"
|
||||||
|
"@swc/core-linux-x64-musl" "1.3.92"
|
||||||
|
"@swc/core-win32-arm64-msvc" "1.3.92"
|
||||||
|
"@swc/core-win32-ia32-msvc" "1.3.92"
|
||||||
|
"@swc/core-win32-x64-msvc" "1.3.92"
|
||||||
|
|
||||||
|
"@swc/counter@^0.1.1":
|
||||||
|
version "0.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.2.tgz#bf06d0770e47c6f1102270b744e17b934586985e"
|
||||||
|
integrity sha512-9F4ys4C74eSTEUNndnER3VJ15oru2NumfQxS8geE+f3eB5xvfxpWyqE5XlVnxb/R14uoXi6SLbBwwiDSkv+XEw==
|
||||||
|
|
||||||
|
"@swc/jest@^0.2.29":
|
||||||
|
version "0.2.29"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/jest/-/jest-0.2.29.tgz#b27d647ec430c909f9bb567d1df2a47eaa3841f4"
|
||||||
|
integrity sha512-8reh5RvHBsSikDC3WGCd5ZTd2BXKkyOdK7QwynrCH58jk2cQFhhHhFBg/jvnWZehUQe/EoOImLENc9/DwbBFow==
|
||||||
|
dependencies:
|
||||||
|
"@jest/create-cache-key-function" "^27.4.2"
|
||||||
|
jsonc-parser "^3.2.0"
|
||||||
|
|
||||||
|
"@swc/types@^0.1.5":
|
||||||
|
version "0.1.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/@swc/types/-/types-0.1.5.tgz#043b731d4f56a79b4897a3de1af35e75d56bc63a"
|
||||||
|
integrity sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==
|
||||||
|
|
||||||
"@szmarczak/http-timer@^1.1.2":
|
"@szmarczak/http-timer@^1.1.2":
|
||||||
version "1.1.2"
|
version "1.1.2"
|
||||||
resolved "https://registry.npmmirror.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421"
|
resolved "https://registry.npmmirror.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421"
|
||||||
@ -15015,6 +15109,11 @@ jest@^29.6.2:
|
|||||||
import-local "^3.0.2"
|
import-local "^3.0.2"
|
||||||
jest-cli "^29.6.2"
|
jest-cli "^29.6.2"
|
||||||
|
|
||||||
|
jest_workaround@^0.79.19:
|
||||||
|
version "0.79.19"
|
||||||
|
resolved "https://registry.yarnpkg.com/jest_workaround/-/jest_workaround-0.79.19.tgz#f5bc569163c33c4533e1c4053c533a8811152a91"
|
||||||
|
integrity sha512-g/MtKSwyb4Ohnd5GHeJaduTgznkyst81x+eUBGOSGK7f8doWuRMPpt6XM/13sM2jLB2QNzT/7Djj7o2PhsozIA==
|
||||||
|
|
||||||
jose@^4.14.1:
|
jose@^4.14.1:
|
||||||
version "4.14.4"
|
version "4.14.4"
|
||||||
resolved "https://registry.npmmirror.com/jose/-/jose-4.14.4.tgz#59e09204e2670c3164ee24cbfe7115c6f8bff9ca"
|
resolved "https://registry.npmmirror.com/jose/-/jose-4.14.4.tgz#59e09204e2670c3164ee24cbfe7115c6f8bff9ca"
|
||||||
|
Loading…
Reference in New Issue
Block a user