feat: application backup and restore (#3268)

* fix: perform load action on boot main app

* feat: add dataType option in collection duplicator

* chore: reset optional dumpable config

* chore: dump command

* chore: dump & restore command

* chore: delay restore

* fix: dump test

* chore: restore command

* chore: dump command action

* chore: dumpable collection api

* chore: client collection option

* feat: backup& restore client

* chore: content disposition header in dump response

* chore: download backup field

* feat: collection origin option

* fix: test

* chore: collection manager collection origin

* chore: upload  backup field

* chore: upload restore file

* chore: upload restore file

* fix: test

* chore: backup and restore support learn more

* refactor: upload restore file

* refactor: upload restore file

* fix: test

* fix: test

* chore: dumpable collection with title

* chore: pg only test

* chore: test

* fix: test

* chore: test sleep

* style: locale improve

* refactor: download backup file

* refactor: start restore

* fix: restore key name

* refactor: start restore

* refactor: start restore

* refactor: start restore

* refactor: start restore

* refactor: start restore

* refactor: start restore

* chore: unify duplicator option

* fix: dump empty collection

* chore: test

* chore: test

* style: style improve

* refactor: locale improve

* chore: dumpalbe collection orders

* style: style improve

* style: style improve

* style: icon adjust

* chore: nginx body size

* chore: get file status

* feat: run dump task

* feat: download api

* chore: backup files resourcer

* feat: restore destroy api

* chore: backup files resoucer

* feat: list backup files action

* chore: get collection meta from dumped file

* fix: dump file name

* fix: test

* chore: backup and restore ui

* chore: swagger api for backup & restore

* chore: api doc

* chore: api doc

* chore: api doc

* chore: backup and restore ui

* chore: backup and restore ui

* chore: backup and restore ui

* chore: backup and restore ui

* chore: backup and restore ui

* fix: restore values

* style: style improve

* fix: download field respontype

* fix: restore form local file

* refactor: local improve

* refactor: delete backup file

* fix: in progress status

* refactor: locale improve

* refactor: locale improve

* refactor: style improve

* refactor: style improve

* refactor: style improve

* test: dump collection table attribute

* chore: dump collection with table attributes

* chore: test

* chore: create new table in restore

* fix: import error

* chore: restore table from backup file

* chore: sync collection after restore collections

* fix: restore json data

* style: style improve

* chore: restore with fields

* chore: test

* fix: test

* fix: test with underscored

* style: style improve

* fix: lock file state

* chore: add test file

* refactor: backup & restore plugin

* fix: mysql test

* chore: skip import view collection

* chore: restore collection with inherits topo order

* fix: import

* style: style improve

* fix: restore sequence fields

* fix: themeConfig collection duplicator option

* fix: restore with dialectOnly meta

* fix: throw error

* fix: restore

* fix: import backup file created in postgres into mysql

* fix: repeated items in inherits

* chore: upgrade after restore

* feat: check database env before restore

* feat: handle autoincr val in postgres

* chore: sqlite & mysql queryInterface

* chore: test

* fix: test

* chore: test

* fix: build

* fix: pg test

* fix: restore with date field

* chore: theme-config collection

* chore: chage import collections method to support collection origin

* chore: fallback get autoincr value in mysql

* fix: dataType normalize

* chore: delay restore

* chore: test

* fix: build

* feat: collectin onDump

* feat: collection onDump interface

* chore: dump with view collection

* chore: sync in restore

* refactor: locale improve

* refactor: code improve

* fix: test

* fix: data sync

* chore: rename backup & restore plugin

* chore: skip test

* style: style improve

* style: style improve

* style: style improve

* style: style improve

* chore: import version check

* chore: backup file dir

* chore: build

* fix: bugs

* fix: error

* fix: pageSize

* fix: import origin

* fix: improve code

* fix: remove namespace

* chore: dump rules config

* fix: dump custom collection

* chore: version

* fix: test

* fix: test

* fix: test

* fix: test

* chore: test

* fix: load custom collection

* fix: client

* fix: translation

* chore: code

* fix: bug

* fix:  support shared option

* fix: roles collection dumpRules

* chore: test

* fix: define collections

* chore: collection group

* fix: translation

* fix: translation

* fix: restore options

* chore: restore command

* chore: dump error

* fix: too many open files

---------

Co-authored-by: katherinehhh <katherine_15995@163.com>
Co-authored-by: chenos <chenlinxh@gmail.com>
This commit is contained in:
ChengLei Shao 2024-01-08 18:59:56 +08:00 committed by GitHub
parent 1a2e19b5eb
commit fa97d0a642
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
161 changed files with 4151 additions and 1899 deletions

1
.gitignore vendored
View File

@ -22,6 +22,7 @@ docker/**/storage
cache/diskstore-*
*.nbdump
storage/duplicator/*
storage/backups/*
**/.dumi/tmp
**/.dumi/tmp-test
**/.dumi/tmp-production

View File

@ -4,6 +4,27 @@ import { ReactNode } from 'react';
type dumpable = 'required' | 'optional' | 'skip';
type CollectionSortable = string | boolean | { name?: string; scopeKey?: string };
type MetaDataType = 'meta';
type ConfigDataType = 'config';
type BusinessDataType = 'business';
type DumpDataType = MetaDataType | ConfigDataType | BusinessDataType;
type Dumpable = 'required' | 'optional' | 'skip';
type BaseDuplicatorObject = {
with?: string[] | string;
delayRestore?: any;
};
type Duplicator =
| Dumpable
| ({
dumpable?: Dumpable;
} & BaseDuplicatorObject)
| ({
dataType?: DumpDataType;
} & BaseDuplicatorObject);
export interface CollectionOptions {
name: string;
title?: string;
@ -16,14 +37,7 @@ export interface CollectionOptions {
* @prop {string[] | string} [with] - Collections dumped with this collection
* @prop {any} [delayRestore] - A function to execute after all collections are restored
*/
duplicator?:
| dumpable
| {
dumpable: dumpable;
with?: string[] | string;
delayRestore?: any;
};
duplicator?: Duplicator;
tableName?: string;
inherits?: string[] | string;
viewName?: string;

View File

@ -0,0 +1,49 @@
import { CollectionGroupManager } from '../collection-group-manager';
describe('collection group manager', () => {
it('should unify duplicator option', async () => {
expect(CollectionGroupManager.unifyDumpRules('skipped')).toMatchObject({
group: 'skipped',
});
expect(CollectionGroupManager.unifyDumpRules('required')).toMatchObject({
group: 'required',
});
expect(
CollectionGroupManager.unifyDumpRules({
required: true,
}),
).toMatchObject({
group: 'required',
});
expect(
CollectionGroupManager.unifyDumpRules({
skipped: true,
}),
).toMatchObject({
group: 'skipped',
});
expect(
CollectionGroupManager.unifyDumpRules({
group: 'required',
delayRestore: {},
}),
).toMatchObject({
group: 'required',
delayRestore: {},
});
expect(
CollectionGroupManager.unifyDumpRules({
group: 'logs',
delayRestore: {},
}),
).toMatchObject({
group: 'logs',
delayRestore: {},
});
});
});

View File

@ -5,6 +5,7 @@ import { Model } from './model';
type CollectionTypeOptions = {
condition: (options: CollectionOptions) => boolean;
onSync?: (model: typeof Model, options: any) => Promise<void>;
onDump?: (dumper: any, collection: Collection) => Promise<void>;
};
export class CollectionFactory {

View File

@ -1,94 +1,62 @@
import { BaseDumpRules, DumpRules } from './collection';
import Database from './database';
import { isString, castArray } from 'lodash';
type RequiredGroup = 'required';
type SkippedGroup = 'skipped';
export type BuiltInGroup = RequiredGroup | SkippedGroup;
export type DumpRulesGroupType = BuiltInGroup | string;
// Collection Group is a collection of collections, which can be dumped and restored together.
export interface CollectionGroup {
namespace: string;
collections: string[];
function: string;
dumpable: 'required' | 'optional' | 'skip';
dataType: DumpRulesGroupType;
delayRestore?: any;
}
export interface CollectionGroupWithCollectionTitle extends Omit<CollectionGroup, 'collections'> {
collections: Array<{
name: string;
title: string;
}>;
}
export class CollectionGroupManager {
constructor(public db: Database) {}
getGroups() {
const collections = [...this.db.collections.values()];
const groups = new Map<string, CollectionGroup>();
const skipped = [];
for (const collection of collections) {
const groupKey = collection.options.namespace;
if (!groupKey) {
continue;
}
const [namespace, groupFunc] = groupKey.split('.');
if (!groupFunc) {
skipped.push({
name: collection.name,
reason: 'no-group-function',
});
continue;
}
if (!groups.has(groupKey)) {
const dumpable = (() => {
if (!collection.options.duplicator) {
return undefined;
}
if (isString(collection.options.duplicator)) {
return {
dumpable: collection.options.duplicator,
};
}
return collection.options.duplicator;
})();
if (!dumpable) {
skipped.push({
name: collection.name,
reason: 'no-dumpable',
});
continue;
}
const group: CollectionGroup = {
namespace,
function: groupFunc,
collections: dumpable.with ? castArray(dumpable.with) : [],
dumpable: dumpable.dumpable,
};
if (dumpable.delayRestore) {
group.delayRestore = dumpable.delayRestore;
}
groups.set(groupKey, group);
}
const group = groups.get(groupKey);
group.collections.push(collection.name);
static unifyDumpRules(dumpRules: DumpRules):
| (BaseDumpRules & {
group: DumpRulesGroupType;
})
| undefined {
if (!dumpRules) {
return undefined;
}
const results = [...groups.values()];
const groupCollections = results.map((i) => i.collections).flat();
for (const skipItem of skipped) {
if (groupCollections.includes(skipItem.name)) {
continue;
}
this.db.logger.warn(`collection ${skipItem.name} is not in any collection group, reason: ${skipItem.reason}.`);
if (typeof dumpRules === 'string') {
return {
group: dumpRules,
};
}
return results;
if ('required' in dumpRules && (dumpRules as { required: true }).required) {
return {
...dumpRules,
group: 'required',
};
}
if ('skipped' in dumpRules && (dumpRules as { skipped: true }).skipped) {
return {
...dumpRules,
group: 'skipped',
};
}
return dumpRules as BaseDumpRules & {
group: DumpRulesGroupType;
};
}
}

View File

@ -27,11 +27,13 @@ export class ImporterReader {
const files = await readdir(this.directory, {
encoding: 'utf-8',
});
const modules = files
.filter((fileName) => {
if (fileName.endsWith('.d.ts')) {
return false;
}
const ext = path.parse(fileName).ext.replace('.', '');
return this.extensions.has(ext);
})

View File

@ -16,12 +16,20 @@ import { Model } from './model';
import { AdjacencyListRepository } from './repositories/tree-repository/adjacency-list-repository';
import { Repository } from './repository';
import { checkIdentifier, md5, snakeCase } from './utils';
import { BuiltInGroup } from './collection-group-manager';
export type RepositoryType = typeof Repository;
export type CollectionSortable = string | boolean | { name?: string; scopeKey?: string };
export type CollectionSortable =
| string
| boolean
| {
name?: string;
scopeKey?: string;
};
type dumpable = 'required' | 'optional' | 'skip';
type dumpableType = 'meta' | 'business' | 'config';
function EnsureAtomicity(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
@ -53,6 +61,16 @@ function EnsureAtomicity(target: any, propertyKey: string, descriptor: PropertyD
return descriptor;
}
export type BaseDumpRules = {
delayRestore?: any;
};
export type DumpRules =
| BuiltInGroup
| ({ required: true } & BaseDumpRules)
| ({ skipped: true } & BaseDumpRules)
| ({ group: BuiltInGroup | string } & BaseDumpRules);
export interface CollectionOptions extends Omit<ModelOptions, 'name' | 'hooks'> {
name: string;
title?: string;
@ -65,13 +83,7 @@ export interface CollectionOptions extends Omit<ModelOptions, 'name' | 'hooks'>
* @prop {string[] | string} [with] - Collections dumped with this collection
* @prop {any} [delayRestore] - A function to execute after all collections are restored
*/
duplicator?:
| dumpable
| {
dumpable: dumpable;
with?: string[] | string;
delayRestore?: any;
};
dumpRules?: DumpRules;
tableName?: string;
inherits?: string[] | string;
@ -91,11 +103,19 @@ export interface CollectionOptions extends Omit<ModelOptions, 'name' | 'hooks'>
* @default 'options'
*/
magicAttribute?: string;
tree?: string;
template?: string;
/**
* where is the collection from
*
* values
* - 'plugin' - collection is from plugin
* - 'core' - collection is from core
* - 'user' - collection is from user
*/
origin?: string;
[key: string]: any;
}
@ -152,6 +172,10 @@ export class Collection<
return this.options.name;
}
get origin() {
return this.options.origin || 'core';
}
get titleField() {
return (this.options.titleField as string) || this.model.primaryKeyAttribute;
}
@ -564,7 +588,16 @@ export class Collection<
this.setField(options.name || name, options);
}
addIndex(index: string | string[] | { fields: string[]; unique?: boolean; [key: string]: any }) {
addIndex(
index:
| string
| string[]
| {
fields: string[];
unique?: boolean;
[key: string]: any;
},
) {
if (!index) {
return;
}

View File

@ -1,5 +1,6 @@
import { Logger, LoggerOptions, createConsoleLogger, createLogger } from '@nocobase/logger';
import { createConsoleLogger, createLogger, Logger, LoggerOptions } from '@nocobase/logger';
import { applyMixins, AsyncEmitter } from '@nocobase/utils';
import chalk from 'chalk';
import merge from 'deepmerge';
import { EventEmitter } from 'events';
import { backOff } from 'exponential-backoff';
@ -22,6 +23,7 @@ import {
} from 'sequelize';
import { SequelizeStorage, Umzug } from 'umzug';
import { Collection, CollectionOptions, RepositoryType } from './collection';
import { CollectionFactory } from './collection-factory';
import { CollectionGroupManager } from './collection-group-manager';
import { ImporterReader, ImportFileExtension } from './collection-importer';
import DatabaseUtils from './database-utils';
@ -30,6 +32,7 @@ import { referentialIntegrityCheck } from './features/referential-integrity-chec
import { ArrayFieldRepository } from './field-repository/array-field-repository';
import * as FieldTypes from './fields';
import { Field, FieldContext, RelationField } from './fields';
import { checkDatabaseVersion } from './helpers';
import { InheritedCollection } from './inherited-collection';
import InheritanceMap from './inherited-map';
import { registerBuiltInListeners } from './listeners';
@ -72,9 +75,6 @@ import {
import { patchSequelizeQueryInterface, snakeCase } from './utils';
import { BaseValueParser, registerFieldValueParsers } from './value-parsers';
import { ViewCollection } from './view-collection';
import { CollectionFactory } from './collection-factory';
import chalk from 'chalk';
import { checkDatabaseVersion } from './helpers';
export type MergeOptions = merge.Options;
@ -194,7 +194,7 @@ export class Database extends EventEmitter implements AsyncEmitter {
utils = new DatabaseUtils(this);
referenceMap = new ReferencesMap();
inheritanceMap = new InheritanceMap();
importedFrom = new Map<string, Array<string>>();
importedFrom = new Map<string, Set<string>>();
modelHook: ModelHook;
version: DatabaseVersion;
delayCollectionExtend = new Map<string, { collectionOptions: CollectionOptions; mergeOptions?: any }[]>();
@ -307,8 +307,8 @@ export class Database extends EventEmitter implements AsyncEmitter {
name: 'migrations',
autoGenId: false,
timestamps: false,
namespace: 'core.migration',
duplicator: 'required',
dumpRules: 'required',
origin: 'core',
fields: [{ type: 'string', name: 'name', primaryKey: true }],
});
@ -336,26 +336,6 @@ export class Database extends EventEmitter implements AsyncEmitter {
return this._instanceId;
}
registerCollectionType() {
this.collectionFactory.registerCollectionType(InheritedCollection, {
condition: (options) => {
return options.inherits && lodash.castArray(options.inherits).length > 0;
},
});
this.collectionFactory.registerCollectionType(ViewCollection, {
condition: (options) => {
return options.viewName || options.view;
},
});
this.collectionFactory.registerCollectionType(SqlCollection, {
condition: (options) => {
return options.sql;
},
});
}
setContext(context: any) {
this.context = context;
}
@ -459,6 +439,15 @@ export class Database extends EventEmitter implements AsyncEmitter {
}
});
this.on('afterDefineCollection', async (collection: Collection) => {
const options = collection.options;
if (options.origin) {
const existsSet = this.importedFrom.get(options.origin) || new Set();
existsSet.add(collection.name);
this.importedFrom.set(options.origin, existsSet);
}
});
registerBuiltInListeners(this);
}
@ -912,11 +901,10 @@ export class Database extends EventEmitter implements AsyncEmitter {
if (module.extend) {
this.extendCollection(module.collectionOptions, module.mergeOptions);
} else {
const collection = this.collection(module);
if (options.from) {
this.importedFrom.set(options.from, [...(this.importedFrom.get(options.from) || []), collection.name]);
}
const collection = this.collection({
...module,
origin: options.from,
});
result.set(collection.name, collection);
}
@ -924,6 +912,44 @@ export class Database extends EventEmitter implements AsyncEmitter {
return result;
}
private registerCollectionType() {
this.collectionFactory.registerCollectionType(InheritedCollection, {
condition: (options) => {
return options.inherits && lodash.castArray(options.inherits).length > 0;
},
});
this.collectionFactory.registerCollectionType(ViewCollection, {
condition: (options) => {
return options.viewName || options.view;
},
async onSync() {
return;
},
async onDump(dumper, collection: Collection) {
const viewDef = await collection.db.queryInterface.viewDef(collection.getTableNameWithSchemaAsString());
dumper.writeSQLContent(`view-${collection.name}`, {
sql: [
`DROP VIEW IF EXISTS ${collection.getTableNameWithSchemaAsString()}`,
`CREATE VIEW ${collection.getTableNameWithSchemaAsString()} AS ${viewDef}`,
],
group: 'required',
});
return;
},
});
this.collectionFactory.registerCollectionType(SqlCollection, {
condition: (options) => {
return options.sql;
},
});
}
}
export function extendCollection(collectionOptions: CollectionOptions, mergeOptions?: MergeOptions) {

View File

@ -71,6 +71,10 @@ export class BelongsToManyField extends RelationField {
name: through,
};
if (this.collection.options.dumpRules) {
throughCollectionOptions['dumpRules'] = this.collection.options.dumpRules;
}
// set through collection schema
if (this.collection.collectionSchema()) {
throughCollectionOptions['schema'] = this.collection.collectionSchema();

View File

@ -152,8 +152,10 @@ export abstract class Field {
toSequelize(): any {
const opts = _.omit(this.options, ['name']);
if (this.dataType) {
Object.assign(opts, { type: this.dataType });
// @ts-ignore
Object.assign(opts, { type: this.database.sequelize.normalizeDataType(this.dataType) });
}
return opts;

View File

@ -1,4 +1,4 @@
import { Transactionable } from 'sequelize';
import { Transaction, Transactionable } from 'sequelize';
import { Collection } from '../collection';
import sqlParser from '../sql-parser';
import QueryInterface, { TableInfo } from './query-interface';
@ -86,4 +86,45 @@ export default class MysqlQueryInterface extends QueryInterface {
return results[0]['Create Table'];
}
async getAutoIncrementInfo(options: { tableInfo: TableInfo; fieldName: string }): Promise<{
seqName?: string;
currentVal: number;
}> {
const { tableInfo, fieldName } = options;
const sql = `SELECT AUTO_INCREMENT as currentVal
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = '${tableInfo.tableName}';`;
const results = await this.db.sequelize.query(sql, { type: 'SELECT' });
let currentVal = results[0]['currentVal'] as number;
if (currentVal === null) {
// use max value of field instead
const maxSql = `SELECT MAX(${fieldName}) as currentVal
FROM ${tableInfo.tableName};`;
const maxResults = await this.db.sequelize.query(maxSql, { type: 'SELECT' });
currentVal = maxResults[0]['currentVal'] as number;
}
return {
currentVal,
};
}
async setAutoIncrementVal(options: {
tableInfo: TableInfo;
columnName: string;
seqName?: string;
currentVal: number;
transaction?: Transaction;
}): Promise<void> {
const { tableInfo, columnName, seqName, currentVal, transaction } = options;
const sql = `ALTER TABLE ${tableInfo.tableName} AUTO_INCREMENT = ${currentVal};`;
await this.db.sequelize.query(sql, { transaction });
}
}

View File

@ -2,12 +2,78 @@ import lodash from 'lodash';
import { Collection } from '../collection';
import sqlParser from '../sql-parser/postgres';
import QueryInterface, { TableInfo } from './query-interface';
import { Transaction } from 'sequelize';
export default class PostgresQueryInterface extends QueryInterface {
constructor(db) {
super(db);
}
async setAutoIncrementVal(options: {
tableInfo: TableInfo;
columnName: string;
seqName?: string;
currentVal?: number;
transaction?: Transaction;
}): Promise<void> {
const { tableInfo, columnName, seqName, currentVal, transaction } = options;
if (!seqName) {
throw new Error('seqName is required to set auto increment val in postgres');
}
await this.db.sequelize.query(
`alter table ${this.db.utils.quoteTable({
tableName: tableInfo.tableName,
schema: tableInfo.schema,
})}
alter column "${columnName}" set default nextval('${seqName}')`,
{
transaction,
},
);
if (currentVal) {
await this.db.sequelize.query(`select setval('${seqName}', ${currentVal})`, {
transaction,
});
}
}
async getAutoIncrementInfo(options: {
tableInfo: TableInfo;
fieldName: string;
}): Promise<{ seqName?: string; currentVal: number }> {
const fieldName = options.fieldName || 'id';
const tableInfo = options.tableInfo;
const sequenceNameResult = await this.db.sequelize.query(
`SELECT column_default
FROM information_schema.columns
WHERE table_name = '${tableInfo.tableName}'
and table_schema = '${tableInfo.schema || 'public'}'
and "column_name" = '${fieldName}';`,
);
const columnDefault = sequenceNameResult[0][0]['column_default'];
const regex = new RegExp(/nextval\('(.*)'::regclass\)/);
const match = regex.exec(columnDefault);
const sequenceName = match[1];
const sequenceCurrentValResult = await this.db.sequelize.query(
`select last_value
from ${sequenceName}`,
);
const sequenceCurrentVal = parseInt(sequenceCurrentValResult[0][0]['last_value']);
return {
seqName: sequenceName,
currentVal: sequenceCurrentVal,
};
}
async collectionTableExists(collection: Collection, options?) {
const transaction = options?.transaction;

View File

@ -1,4 +1,4 @@
import { QueryInterface as SequelizeQueryInterface, Transactionable } from 'sequelize';
import { QueryInterface as SequelizeQueryInterface, Transaction, Transactionable } from 'sequelize';
import { Collection } from '../collection';
import Database from '../database';
@ -51,4 +51,17 @@ export default abstract class QueryInterface {
await this.db.sequelize.getQueryInterface().dropAllTables(options);
}
abstract getAutoIncrementInfo(options: {
tableInfo: TableInfo;
fieldName: string;
}): Promise<{ seqName?: string; currentVal: number }>;
abstract setAutoIncrementVal(options: {
tableInfo: TableInfo;
columnName: string;
seqName?: string;
currentVal: number;
transaction?: Transaction;
}): Promise<void>;
}

View File

@ -1,6 +1,7 @@
import { Collection } from '../collection';
import sqlParser from '../sql-parser';
import QueryInterface, { TableInfo } from './query-interface';
import { Transaction } from 'sequelize';
export default class SqliteQueryInterface extends QueryInterface {
constructor(db) {
@ -90,4 +91,48 @@ export default class SqliteQueryInterface extends QueryInterface {
showTableDefinition(tableInfo: TableInfo): Promise<any> {
return Promise.resolve(undefined);
}
async getAutoIncrementInfo(options: { tableInfo: TableInfo; fieldName: string }): Promise<{
seqName?: string;
currentVal: number;
}> {
const { tableInfo } = options;
const tableName = tableInfo.tableName;
const sql = `SELECT seq
FROM sqlite_sequence
WHERE name = '${tableName}';`;
const results = await this.db.sequelize.query(sql, { type: 'SELECT' });
const row = results[0];
if (!row) {
return {
currentVal: 0,
};
}
return {
currentVal: row['seq'],
};
}
async setAutoIncrementVal(options: {
tableInfo: TableInfo;
columnName: string;
seqName?: string;
currentVal: number;
transaction?: Transaction;
}): Promise<void> {
const { tableInfo, columnName, seqName, currentVal, transaction } = options;
const tableName = tableInfo.tableName;
const sql = `UPDATE sqlite_sequence
SET seq = ${currentVal}
WHERE name = '${tableName}';`;
await this.db.sequelize.query(sql, { transaction });
}
}

View File

@ -28,6 +28,7 @@ export class SyncRunner {
const tableName = inheritedCollection.getTableNameWithSchema();
const attributes = model.tableAttributes;
const childAttributes = lodash.pickBy(attributes, (value) => {
return !value.inherit;
});

View File

@ -264,7 +264,7 @@ export class AppSupervisor extends EventEmitter implements AsyncEmitter {
if (
maintainingStatus &&
['install', 'upgrade', 'pm.add', 'pm.update', 'pm.enable', 'pm.disable', 'pm.remove'].includes(
['install', 'upgrade', 'pm.add', 'pm.update', 'pm.enable', 'pm.disable', 'pm.remove', 'restore'].includes(
maintainingStatus.command.name,
) &&
!startOptions.recover

View File

@ -500,7 +500,6 @@ export class Application<StateT = DefaultState, ContextT = DefaultContext> exten
return command;
} catch (error) {
console.log({ error });
if (!this.activatedCommand) {
this.activatedCommand = {
name: 'unknown',

View File

@ -12,5 +12,9 @@ export function getErrorLevel(e: Error): ErrorLevel {
return 'warn';
}
if (e.name === 'RestoreCheckError') {
return 'warn';
}
return 'fatal';
}

View File

@ -14,7 +14,7 @@ import { parse } from 'url';
import xpipe from 'xpipe';
import { AppSupervisor } from '../app-supervisor';
import { ApplicationOptions } from '../application';
import { PLUGIN_STATICS_PATH, getPackageDirByExposeUrl, getPackageNameByExposeUrl } from '../plugin-manager';
import { getPackageDirByExposeUrl, getPackageNameByExposeUrl, PLUGIN_STATICS_PATH } from '../plugin-manager';
import { applyErrorWithArgs, getErrorWithCode } from './errors';
import { IPCSocketClient } from './ipc-socket-client';
import { IPCSocketServer } from './ipc-socket-server';
@ -326,6 +326,8 @@ export class Gateway extends EventEmitter {
const mainApp = AppSupervisor.getInstance().bootMainApp(options.mainAppOptions);
await mainApp.load();
mainApp
.runAsCLI(process.argv, {
throwError: true,

View File

@ -11,9 +11,9 @@ export class ApplicationVersion {
if (!app.db.hasCollection('applicationVersion')) {
app.db.collection({
name: 'applicationVersion',
namespace: 'core.applicationVersion',
duplicator: 'required',
dataType: 'meta',
timestamps: false,
dumpRules: 'required',
fields: [{ name: 'value', type: 'string' }],
});
}

View File

@ -2,8 +2,7 @@ import { defineCollection } from '@nocobase/database';
export default defineCollection({
name: 'applicationPlugins',
namespace: 'core.applicationPlugins',
duplicator: 'required',
dumpRules: 'required',
repository: 'PluginManagerRepository',
fields: [
{ type: 'string', name: 'name', unique: true },

View File

@ -4,7 +4,7 @@ import fs from 'fs';
import type { TFuncKey, TOptions } from 'i18next';
import { resolve } from 'path';
import { Application } from './application';
import { InstallOptions, getExposeChangelogUrl, getExposeReadmeUrl } from './plugin-manager';
import { getExposeChangelogUrl, getExposeReadmeUrl, InstallOptions } from './plugin-manager';
import { checkAndGetCompatible } from './plugin-manager/utils';
export interface PluginInterface {
@ -109,7 +109,7 @@ export abstract class Plugin<O = any> implements PluginInterface {
async importCollections(collectionsPath: string) {
await this.db.import({
directory: collectionsPath,
from: this.getName(),
from: `plugin:${this.getName()}`,
});
}
@ -129,20 +129,30 @@ export abstract class Plugin<O = any> implements PluginInterface {
...this.options,
};
}
const file = await fs.promises.realpath(resolve(process.env.NODE_MODULES_PATH, packageName));
const lastUpdated = (await fs.promises.stat(file)).ctime;
const others = await checkAndGetCompatible(packageName);
return {
const results = {
...this.options,
...others,
readmeUrl: getExposeReadmeUrl(packageName, locale),
changelogUrl: getExposeChangelogUrl(packageName),
lastUpdated,
file,
updatable: file.startsWith(process.env.PLUGIN_STORAGE_PATH),
displayName: packageJson[`displayName.${locale}`] || packageJson.displayName || name,
description: packageJson[`description.${locale}`] || packageJson.description,
};
if (!options.withOutOpenFile) {
const file = await fs.promises.realpath(
resolve(process.env.NODE_MODULES_PATH || resolve(process.cwd(), 'node_modules'), packageName),
);
return {
...results,
...(await checkAndGetCompatible(packageName)),
lastUpdated: (await fs.promises.stat(file)).ctime,
file,
updatable: file.startsWith(process.env.PLUGIN_STORAGE_PATH),
};
}
return results;
}
}

View File

@ -6,6 +6,7 @@ export { default as supertest } from 'supertest';
export * from './mockServer';
export const pgOnly: () => any = () => (process.env.DB_DIALECT == 'postgres' ? describe : describe.skip);
export const isPg = () => process.env.DB_DIALECT == 'postgres';
export function randomStr() {
// create random string

View File

@ -1,8 +1,9 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
export default {
export default defineCollection({
name: 'rolesUsers',
duplicator: 'optional',
namespace: 'acl.acl',
dumpRules: {
group: 'user',
},
fields: [{ type: 'boolean', name: 'default' }],
} as CollectionOptions;
});

View File

@ -1,11 +1,7 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'acl.acl',
duplicator: {
dumpable: 'required',
with: 'uiSchemas',
},
export default defineCollection({
dumpRules: 'required',
name: 'roles',
title: '{{t("Roles")}}',
autoGenId: false,
@ -98,4 +94,4 @@ export default {
through: 'rolesUsers',
},
],
} as CollectionOptions;
});

View File

@ -1,8 +1,7 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'acl.acl',
duplicator: 'required',
export default defineCollection({
dumpRules: 'required',
name: 'rolesResources',
model: 'RoleResourceModel',
indexes: [
@ -30,4 +29,4 @@ export default {
target: 'rolesResourcesActions',
},
],
} as CollectionOptions;
});

View File

@ -1,8 +1,7 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'acl.acl',
duplicator: 'required',
export default defineCollection({
dumpRules: 'required',
name: 'rolesResourcesActions',
model: 'RoleResourceActionModel',
fields: [
@ -28,4 +27,4 @@ export default {
onDelete: 'RESTRICT',
},
],
} as CollectionOptions;
});

View File

@ -1,8 +1,7 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'acl.acl',
duplicator: 'required',
export default defineCollection({
dumpRules: 'required',
name: 'rolesResourcesScopes',
fields: [
{
@ -22,4 +21,4 @@ export default {
name: 'scope',
},
],
} as CollectionOptions;
});

View File

@ -1,6 +1,6 @@
import { extend } from '@nocobase/database';
import { extendCollection } from '@nocobase/database';
export default extend({
export default extendCollection({
name: 'users',
fields: [
{

View File

@ -1,5 +1,6 @@
import { NoPermissionError } from '@nocobase/acl';
import { Context, utils as actionUtils } from '@nocobase/actions';
import { Cache } from '@nocobase/cache';
import { Collection, RelationField, snakeCase } from '@nocobase/database';
import { Plugin } from '@nocobase/server';
import { Mutex } from 'async-mutex';
@ -13,7 +14,6 @@ import { setCurrentRole } from './middlewares/setCurrentRole';
import { RoleModel } from './model/RoleModel';
import { RoleResourceActionModel } from './model/RoleResourceActionModel';
import { RoleResourceModel } from './model/RoleResourceModel';
import { Cache } from '@nocobase/cache';
export interface AssociationFieldAction {
associationActions: string[];
@ -891,10 +891,11 @@ export class PluginACL extends Plugin {
async load() {
await this.importCollections(resolve(__dirname, 'collections'));
this.db.extendCollection({
name: 'rolesUischemas',
namespace: 'acl.acl',
duplicator: 'required',
dumpRules: 'required',
origin: `plugin:${this.name}`,
});
}
}

View File

@ -1,7 +1,7 @@
import { RecursionField } from '@formily/react';
import { CollectionManagerProvider, SchemaComponentOptions, useCurrentRoles } from '@nocobase/client';
import React from 'react';
import apiKeysCollection from '../../collections';
import apiKeysCollection from '../../collections/apiKeys';
import { ExpiresSelect } from './ExpiresSelect';
import { configurationSchema } from './schema';

View File

@ -2,12 +2,12 @@ import type { CollectionOptions } from '@nocobase/database';
import { generateNTemplate } from '../locale';
export default {
namespace: 'api-keys',
duplicator: 'optional',
dumpRules: {
group: 'user',
},
shared: true,
name: 'apiKeys',
title: '{{t("API keys", {"ns": "api-keys"})}}',
sortable: 'sort',
model: 'ApiKeyModel',
createdBy: true,
updatedAt: false,
updatedBy: false,

View File

@ -1 +0,0 @@
export { default } from './api-keys';

View File

@ -34,9 +34,7 @@ export default class ApiKeysPlugin extends Plugin<ApiKeysPluginConfig> {
}
async load() {
await this.db.import({
directory: resolve(__dirname, '../collections'),
});
await this.importCollections(resolve(__dirname, '../collections'));
this.app.resourcer.use(async (ctx, next) => {
const { resourceName, actionName } = ctx.action.params;

View File

@ -1,14 +1,16 @@
import { defineCollection } from '@nocobase/database';
export default defineCollection({
namespace: 'audit-logs.auditLogs',
duplicator: 'optional',
dumpRules: {
group: 'log',
},
name: 'auditChanges',
title: '变动值',
createdBy: false,
updatedBy: false,
createdAt: false,
updatedAt: false,
shared: true,
fields: [
{
type: 'json',

View File

@ -1,12 +1,14 @@
import { defineCollection } from '@nocobase/database';
export default defineCollection({
namespace: 'audit-logs.auditLogs',
duplicator: 'optional',
dumpRules: {
group: 'log',
},
name: 'auditLogs',
createdBy: false,
updatedBy: false,
updatedAt: false,
shared: true,
fields: [
{
type: 'date',

View File

@ -10,9 +10,7 @@ export default class PluginActionLogs extends Plugin {
}
async load() {
await this.db.import({
directory: path.resolve(__dirname, 'collections'),
});
await this.importCollections(path.resolve(__dirname, 'collections'));
this.db.addMigrations({
namespace: 'audit-logs',

View File

@ -1,14 +1,15 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
/**
* Collection for extended authentication methods,
*/
export default {
namespace: 'auth.auth',
duplicator: 'optional',
export default defineCollection({
dumpRules: {
group: 'third-party',
},
shared: true,
name: 'authenticators',
sortable: true,
title: '{{t("Authenticators")}}',
model: 'AuthModel',
createdBy: true,
updatedBy: true,
@ -95,4 +96,4 @@ export default {
through: 'usersAuthenticators',
},
],
} as CollectionOptions;
});

View File

@ -1,8 +1,10 @@
import { CollectionOptions } from '@nocobase/client';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'auth.token-black',
duplicator: 'optional',
export default defineCollection({
dumpRules: {
group: 'log',
},
shared: true,
name: 'tokenBlacklist',
model: 'TokenBlacklistModel',
fields: [
@ -16,4 +18,4 @@ export default {
name: 'expiration',
},
],
} as CollectionOptions;
});

View File

@ -1,20 +1,15 @@
import { CollectionOptions } from '@nocobase/database';
import { defineCollection } from '@nocobase/database';
/**
* Collection for user information of extended authentication methods,
* such as saml, oicd, oauth, sms, etc.
*/
export default {
namespace: 'auth.auth',
duplicator: {
dumpable: 'optional',
/**
* When dump this collection, the users collection is required to be dumped.
*/
with: 'users',
export default defineCollection({
dumpRules: {
group: 'user',
},
shared: true,
name: 'usersAuthenticators',
title: '{{t("Users Authenticators")}}',
model: 'UserAuthModel',
createdBy: true,
updatedBy: true,
@ -70,4 +65,4 @@ export default {
defaultValue: {},
},
],
} as CollectionOptions;
});

View File

@ -24,9 +24,7 @@ export class AuthPlugin extends Plugin {
async load() {
// Set up database
await this.db.import({
directory: resolve(__dirname, 'collections'),
});
await this.importCollections(resolve(__dirname, 'collections'));
this.db.addMigrations({
namespace: 'auth',
directory: resolve(__dirname, 'migrations'),

View File

@ -1,5 +1,5 @@
{
"name": "@nocobase/plugin-duplicator",
"name": "@nocobase/plugin-backup-restore",
"displayName": "App backup & restore",
"displayName.zh-CN": "应用的备份与还原",
"description": "Can be used for application replication, migration, upgrade, etc",
@ -10,7 +10,7 @@
"repository": {
"type": "git",
"url": "git+https://github.com/nocobase/nocobase.git",
"directory": "packages/plugins/duplicator"
"directory": "packages/plugins/plugin-backup-restore"
},
"devDependencies": {
"@koa/multer": "^3.0.2",
@ -23,14 +23,17 @@
"koa-send": "^5.0.1",
"mkdirp": "^1.0.4",
"react": "^18.2.0",
"tar": "^6.1.13"
"tar": "^6.1.13",
"object-path": "^0.11.8",
"content-disposition": "^0.5.4",
"semver": "^7.5.4"
},
"peerDependencies": {
"@nocobase/client": "0.x",
"@nocobase/database": "0.x",
"@nocobase/server": "0.x",
"@nocobase/test": "0.x",
"@nocobase/utils": "0.x"
},
"gitHead": "979a9c59a98c61a2287dd847580746a9b597cbde"
"@nocobase/utils": "0.x",
"@nocobase/actions": "0.x"
}
}

View File

@ -0,0 +1,476 @@
import { InboxOutlined, PlusOutlined, ReloadOutlined, UploadOutlined } from '@ant-design/icons';
import { FormItem } from '@formily/antd-v5';
import { Checkbox, DatePicker, useAPIClient, useCompile } from '@nocobase/client';
import { Alert, App, Button, Card, Divider, Modal, Space, Spin, Table, Tabs, Upload, UploadProps, message } from 'antd';
import { saveAs } from 'file-saver';
import React, { useEffect, useMemo, useState } from 'react';
import { useDuplicatorTranslation } from './locale';
const { Dragger } = Upload;
function useUploadProps(props: UploadProps): any {
const onChange = (param) => {
props.onChange?.(param);
};
const api = useAPIClient();
return {
...props,
customRequest({ action, data, file, filename, headers, onError, onProgress, onSuccess, withCredentials }) {
const formData = new FormData();
if (data) {
Object.keys(data).forEach((key) => {
formData.append(key, data[key]);
});
}
formData.append(filename, file);
// eslint-disable-next-line promise/catch-or-return
api.axios
.post(action, formData, {
withCredentials,
headers,
onUploadProgress: ({ total, loaded }) => {
onProgress({ percent: Math.round((loaded / total) * 100).toFixed(2) }, file);
},
})
.then(({ data }) => {
onSuccess(data, file);
})
.catch(onError)
.finally(() => {});
return {
abort() {
console.log('upload progress is aborted.');
},
};
},
onChange,
};
}
const LearnMore: any = (props: { collectionsData?: any; isBackup?: boolean }) => {
const { collectionsData } = props;
const { t } = useDuplicatorTranslation();
const [isModalOpen, setIsModalOpen] = useState(false);
const [dataSource, setDataSource] = useState<any>(collectionsData);
useEffect(() => {
setDataSource(collectionsData);
}, [collectionsData]);
const apiClient = useAPIClient();
const compile = useCompile();
const resource = useMemo(() => {
return apiClient.resource('backupFiles');
}, [apiClient]);
const showModal = async () => {
if (props.isBackup) {
const data = await resource.dumpableCollections();
setDataSource(data?.data);
setIsModalOpen(true);
}
setIsModalOpen(true);
};
const handleOk = () => {
setIsModalOpen(false);
};
const handleCancel = () => {
setIsModalOpen(false);
};
const columns = [
{
title: t('Collection'),
dataIndex: 'collection',
key: 'collection',
render: (_, data) => {
return (
<div>
{compile(data.title)}
<br />
<div style={{ color: 'rgba(0, 0, 0, 0.3)', fontSize: '0.9em' }}>{data.name}</div>
</div>
);
},
},
{
title: t('Origin'),
dataIndex: 'plugin',
key: 'origin',
width: '50%',
render: (_, data) => {
const { origin } = data;
return (
<div>
{origin.title}
<br />
<div style={{ color: 'rgba(0, 0, 0, 0.3)', fontSize: '0.9em' }}>{origin.name}</div>
</div>
);
},
},
];
const items = Object.keys(dataSource || {}).map((item) => {
return {
key: item,
label: t(`${item}.title`),
children: (
<>
<Alert style={{ marginBottom: 16 }} message={t(`${item}.description`)} />
<Table
pagination={{ pageSize: 100 }}
bordered
size={'small'}
dataSource={dataSource[item]}
columns={columns}
scroll={{ y: 400 }}
/>
</>
),
};
});
return (
<>
<a onClick={showModal}>{t('Learn more')}</a>
<Modal
title={t('Backup instructions')}
width={800}
open={isModalOpen}
footer={null}
onOk={handleOk}
onCancel={handleCancel}
>
<Tabs defaultActiveKey="required" items={items} />
</Modal>
</>
);
};
const Restore: React.FC<any> = ({ ButtonComponent = Button, title, upload = false, fileData }) => {
const { t } = useDuplicatorTranslation();
const [dataTypes, setDataTypes] = useState<any[]>(['required']);
const [isModalOpen, setIsModalOpen] = useState(false);
const [restoreData, setRestoreData] = useState<any>(null);
const [loading, setLoading] = useState(false);
const apiClient = useAPIClient();
const resource = useMemo(() => {
return apiClient.resource('backupFiles');
}, [apiClient]);
const [dataSource, setDataSource] = useState([]);
useEffect(() => {
setDataSource(
Object.keys(restoreData?.dumpableCollectionsGroupByGroup || []).map((key) => ({
value: key,
label: t(`${key}.title`),
disabled: ['required', 'skipped'].includes(key),
})),
);
}, [restoreData]);
const showModal = async () => {
setIsModalOpen(true);
if (!upload) {
setLoading(true);
const { data } = await resource.get({ filterByTk: fileData.name });
setDataSource(
Object.keys(data?.data?.meta?.dumpableCollectionsGroupByGroup || []).map((key) => ({
value: key,
label: t(`${key}.title`),
disabled: ['required', 'skipped'].includes(key),
})),
);
setRestoreData(data?.data?.meta);
setLoading(false);
}
};
const handleOk = () => {
resource.restore({
values: {
dataTypes,
filterByTk: fileData?.name,
key: restoreData?.key,
},
});
setIsModalOpen(false);
};
const handleCancel = () => {
setIsModalOpen(false);
setRestoreData(null);
setDataTypes(['required']);
};
return (
<>
<ButtonComponent onClick={showModal}>{title}</ButtonComponent>
<Modal
title={t('Restore')}
width={800}
footer={upload && !restoreData ? null : undefined}
open={isModalOpen}
onOk={handleOk}
onCancel={handleCancel}
>
<Spin spinning={loading}>
{upload && !restoreData && <RestoreUpload setRestoreData={setRestoreData} />}
{(!upload || restoreData) && [
<strong style={{ fontWeight: 600, display: 'block', margin: '16px 0 8px' }} key="info">
{t('Select the data to be restored')} (
<LearnMore collectionsData={restoreData?.dumpableCollectionsGroupByGroup} />
):
</strong>,
<div style={{ lineHeight: 2, marginBottom: 8 }} key="dataType">
<FormItem>
<Checkbox.Group
options={dataSource}
style={{ flexDirection: 'column' }}
value={dataTypes}
onChange={(checkValue) => setDataTypes(checkValue)}
/>
</FormItem>
</div>,
]}
</Spin>
</Modal>
</>
);
};
const NewBackup: React.FC<any> = ({ ButtonComponent = Button, refresh }) => {
const { t } = useDuplicatorTranslation();
const [isModalOpen, setIsModalOpen] = useState(false);
const [dataTypes, setBackupData] = useState<any[]>(['required']);
const apiClient = useAPIClient();
const [dataSource, setDataSource] = useState([]);
const showModal = async () => {
const { data } = await apiClient.resource('backupFiles').dumpableCollections();
setDataSource(
Object.keys(data || []).map((key) => ({
value: key,
label: t(`${key}.title`),
disabled: ['required', 'skipped'].includes(key),
})),
);
setIsModalOpen(true);
};
const handleOk = () => {
apiClient.request({
url: 'backupFiles:create',
method: 'post',
data: {
dataTypes,
},
});
setIsModalOpen(false);
setBackupData(['required']);
setTimeout(() => {
refresh();
}, 500);
};
const handleCancel = () => {
setIsModalOpen(false);
setBackupData(['required']);
};
return (
<>
<ButtonComponent icon={<PlusOutlined />} type="primary" onClick={showModal}>
{t('New backup')}
</ButtonComponent>
<Modal title={t('New backup')} width={800} open={isModalOpen} onOk={handleOk} onCancel={handleCancel}>
<strong style={{ fontWeight: 600, display: 'block', margin: '16px 0 8px' }}>
{t('Select the data to be backed up')} (
<LearnMore isBackup={true} />
):
</strong>
<div style={{ lineHeight: 2, marginBottom: 8 }}>
<Checkbox.Group
options={dataSource}
style={{ flexDirection: 'column' }}
onChange={(checkValue) => setBackupData(checkValue)}
value={dataTypes}
/>
</div>
</Modal>
</>
);
};
const RestoreUpload: React.FC<any> = (props: any) => {
const { t } = useDuplicatorTranslation();
const uploadProps: UploadProps = {
multiple: false,
action: '/backupFiles:upload',
onChange(info) {
if (info.fileList.length > 1) {
info.fileList.splice(0, info.fileList.length - 1); // 只保留一个文件
}
const { status } = info.file;
if (status === 'done') {
message.success(`${info.file.name} ` + t('file uploaded successfully'));
props.setRestoreData({ ...info.file.response?.data?.meta, key: info.file.response?.data.key });
} else if (status === 'error') {
message.error(`${info.file.name} ` + t('file upload failed'));
}
},
onDrop(e) {
console.log('Dropped files', e.dataTransfer.files);
},
};
return (
<Dragger {...useUploadProps(uploadProps)}>
<p className="ant-upload-drag-icon">
<InboxOutlined />
</p>
<p className="ant-upload-text"> {t('Click or drag file to this area to upload')}</p>
</Dragger>
);
};
export const BackupAndRestoreList = () => {
const { t } = useDuplicatorTranslation();
const apiClient = useAPIClient();
const [dataSource, setDataSource] = useState([]);
const [loading, setLoading] = useState(false);
const [downloadTarget, setDownloadTarget] = useState(false);
const { modal } = App.useApp();
const resource = useMemo(() => {
return apiClient.resource('backupFiles');
}, [apiClient]);
useEffect(() => {
queryFieldList();
}, []);
const queryFieldList = async () => {
setLoading(true);
const { data } = await resource.list();
setDataSource(data.data);
setLoading(false);
};
const handleDownload = async (fileData) => {
setDownloadTarget(fileData.name);
const data = await apiClient.request({
url: 'backupFiles:download',
method: 'get',
params: {
filterByTk: fileData.name,
},
responseType: 'blob',
});
setDownloadTarget(false);
const blob = new Blob([data.data]);
saveAs(blob, fileData.name);
};
const handleRefresh = async () => {
await queryFieldList();
};
const handleDestory = (fileData) => {
modal.confirm({
title: t('Delete record', { ns: 'client' }),
content: t('Are you sure you want to delete it?', { ns: 'client' }),
onOk: async () => {
await resource.destroy({ filterByTk: fileData.name });
await queryFieldList();
message.success(t('Deleted successfully'));
},
});
};
return (
<div>
<Card bordered={false}>
<Space style={{ float: 'right', marginBottom: 16 }}>
<Button onClick={handleRefresh} icon={<ReloadOutlined />}>
{t('Refresh')}
</Button>
<Restore
upload
title={
<>
<UploadOutlined /> {t('Restore backup from local')}
</>
}
/>
<NewBackup refresh={handleRefresh} />
</Space>
<Table
dataSource={dataSource}
loading={loading}
columns={[
{
title: t('Backup file'),
dataIndex: 'name',
width: 400,
onCell: (data) => {
return data.inProgress
? {
colSpan: 4,
}
: {};
},
render: (name, data) =>
data.inProgress ? (
<div style={{ color: 'rgba(0, 0, 0, 0.88)' }}>
{name}({t('Backing up')}...)
</div>
) : (
<div>{name}</div>
),
},
{
title: t('File size'),
dataIndex: 'fileSize',
onCell: (data) => {
return data.inProgress
? {
colSpan: 0,
}
: {};
},
},
{
title: t('Created at', { ns: 'client' }),
dataIndex: 'createdAt',
onCell: (data) => {
return data.inProgress
? {
colSpan: 0,
}
: {};
},
render: (value) => {
return <DatePicker.ReadPretty value={value} showTime />;
},
},
{
title: t('Actions', { ns: 'client' }),
dataIndex: 'actions',
onCell: (data) => {
return data.inProgress
? {
colSpan: 0,
}
: {};
},
render: (_, record) => (
<Space split={<Divider type="vertical" />}>
<Restore ButtonComponent={'a'} title={t('Restore')} fileData={record} />
<a type="link" onClick={() => handleDownload(record)}>
{t('Download')}
</a>
<a onClick={() => handleDestory(record)}>{t('Delete')}</a>
</Space>
),
},
]}
/>
</Card>
</div>
);
};

View File

@ -0,0 +1,12 @@
import { SchemaComponentOptions, CurrentAppInfoProvider } from '@nocobase/client';
import React, { FC } from 'react';
export const DuplicatorProvider: FC = function (props) {
return (
<CurrentAppInfoProvider>
<SchemaComponentOptions>{props.children}</SchemaComponentOptions>
</CurrentAppInfoProvider>
);
};
DuplicatorProvider.displayName = 'DuplicatorProvider';

View File

@ -0,0 +1,17 @@
import { Plugin } from '@nocobase/client';
import { BackupAndRestoreList } from './Configuration';
import { DuplicatorProvider } from './DuplicatorProvider';
import { NAMESPACE } from './locale';
export class DuplicatorPlugin extends Plugin {
async load() {
this.app.use(DuplicatorProvider);
this.app.pluginSettingsManager.add(NAMESPACE, {
title: `{{t("Backup & Restore", { ns: "${NAMESPACE}" })}}`,
icon: 'CloudServerOutlined',
Component: BackupAndRestoreList,
aclSnippet: 'pm.backup.restore',
});
}
}
export default DuplicatorPlugin;

View File

@ -0,0 +1,18 @@
import { i18n } from '@nocobase/client';
import { useTranslation } from 'react-i18next';
export const NAMESPACE = 'backup-restore';
export function lang(key: string) {
return i18n.t(key, { ns: NAMESPACE });
}
export function generateNTemplate(key: string) {
return `{{t('${key}', { ns: '${NAMESPACE}', nsMode: 'fallback' })}}`;
}
export function useDuplicatorTranslation() {
return useTranslation(NAMESPACE, {
nsMode: 'fallback',
});
}

View File

@ -0,0 +1,44 @@
{
"Backup file": "Backup file",
"System metadata": "System metadata",
"System config": "System config",
"Business data": "Business data",
"Backup & Restore": "Backup & Restore",
"Backup": "Backup",
"Restore": "Restore",
"Configuration": "Configuration",
"Select the data to be backed up": "Select the data to be backed up",
"Select the data to be restored": "Select the data to be restored",
"Click or drag file to this area to upload": "Click or drag file to this area to upload",
"Learn more": "Learn more",
"Start backup": "Start backup",
"Start restore": "Start restore",
"Backed up successfully": "Backed up successfully",
"Plugin": "Plugin",
"file uploaded successfully": "file uploaded successfully",
"file upload failed": "file upload failed",
"Download": "Download",
"Restore backup from local": "Restore backup from local",
"Backup instructions": "Backup instructions",
"File size": "File size",
"New backup": "New backup",
"Origin": "Origin",
"Backing up": "Backing up",
"Refresh": "Refresh",
"Delete": "Delete",
"Deleted successfully": "Deleted successfully",
"required.title": "Required data",
"user.title": "User data",
"log.title": "Log data",
"custom.title": "Custom collection data",
"skipped.title": "Skipped data",
"unknown.title": "Unknown",
"third-party.title": "Third-party service information",
"required.description": "Required data",
"user.description": "User data",
"log.description": "Log data",
"custom.description": "Custom collection data",
"skipped.description": "Skipped data",
"unknown.description": "Data without configured dump rules",
"third-party.description": "Third-party service information"
}

View File

@ -0,0 +1,24 @@
export default {
'System metadata': 'システムメタデータ',
'System config': 'システム構成',
'Business data': 'ビジネスデータ',
'Backup & Restore': 'バックアップとリストア',
Backup: 'バックアップ',
Restore: 'リストア',
Configuration: '構成',
'Select the data to be backed up': 'バックアップされたデータの選択',
'Select the data to be restored': 'リストアされたデータの選択',
'Learn more': '詳細',
'Start backup': 'バックアップの開始',
'Start restore': 'リストアを開始',
'Backed up successfully': 'バックアップ成功',
Plugin: 'プラグイン',
'file uploaded successfully': 'ファイルのアップロードに成功しました',
Download: 'ダウンロード',
'Restore backup from local':'ローカルからバックアップに返信',
'Backup instructions':'バックアップの説明',
'File size':'ファイルサイズ',
'New backup':'新規バックアップ',
'Origin':'ソース',
'Backing up':'バックアップ中'
};

View File

@ -0,0 +1,27 @@
const locale = {
'System metadata': 'Metadados do Sistema',
'System config': 'configuração do sistema',
'Business data': 'Dados comerciais',
'Backup & Restore': 'Cópia de Segurança e Restauração',
Backup: 'cópias de segurança',
Restore: 'redução',
Configuration: 'atribuição',
'Select the data to be backed up': 'Seleccionar os dados de cópia de segurança',
'Select the data to be restored': 'Seleccionar os dados restaurados',
'Learn more': 'Saiba mais',
'Start restore': 'Iniciar a Restauração',
'Start backup': 'Iniciar a cópia de segurança',
'Backed up successfully': 'Cópia de segurança bem sucedida',
'Plugin':'Plugins',
'file uploaded successfully': 'O ficheiro foi enviado com sucesso',
'file upload failed': 'O envio do ficheiro falhou',
'Download':'download',
'Restore backup from local':'Restaurar a cópia de segurança localmente',
'Backup instructions':'Instruções de cópia de segurança',
'File size':'tamanho do ficheiro',
'New backup':'Nova Cópia de Segurança',
'Origin':'fonte',
'Backing up':'Cópia de segurança em curso'
};
export default locale;

View File

@ -0,0 +1,50 @@
{
"Backup file": "备份文件",
"Select Import data": "请选择导入数据",
"Select Import Plugins": "请选择导入插件",
"Select User Collections": "请选择用户数据",
"Basic Data": "基础数据",
"Optional Data": "可选数据",
"User Data": "用户数据",
"System metadata": "系统元数据",
"System config": "系统配置",
"Business data": "业务数据",
"Backup & Restore": "备份和还原",
"Backup": "备份",
"Restore": "还原",
"Configuration": "配置",
"Select the data to be backed up": "选择备份的数据",
"Select the data to be restored": "选择还原的数据",
"Click or drag file to this area to upload": "点击或拖拽文件至此区域即可上传",
"Learn more": "了解更多",
"Start backup": "开始备份",
"Start restore": "开始还原",
"Backed up successfully": "备份成功",
"Deleted successfully": "删除成功",
"Plugin": "插件",
"file uploaded successfully": "文件上传成功",
"file upload failed": "文件上传失败",
"Download": "下载",
"Restore backup from local": "从本地恢复备份",
"Backup instructions": "备份说明",
"File size":"文件大小",
"New backup":"新建备份",
"Origin":"来源",
"Backing up":"备份中",
"Refresh": "刷新",
"Delete": "删除",
"required.title": "必备数据",
"user.title": "用户数据",
"log.title": "日志数据",
"custom.title": "自建表数据",
"skipped.title": "跳过的数据",
"unknown.title": "未知数据",
"third-party.title": "第三方服务信息",
"required.description": "必备数据",
"user.description": "用户数据",
"log.description": "日志数据",
"custom.description": "自建表数据",
"skipped.description": "跳过的数据",
"unknown.description": "没有配置备份规则的数据",
"third-party.description": "第三方服务信息"
}

View File

@ -0,0 +1,177 @@
import { MockServer, waitSecond } from '@nocobase/test';
import createApp from './index';
import { Dumper } from '../dumper';
describe('backup files', () => {
let app: MockServer;
beforeEach(async () => {
app = await createApp();
});
afterEach(async () => {
await app.destroy();
});
it('should create dump file', async () => {
const createResponse = await app
.agent()
.resource('backupFiles')
.create({
dataTypes: ['meta', 'config', 'business'],
});
expect(createResponse.status).toBe(200);
const dumpKey = createResponse.body.data.key;
expect(dumpKey).toBeDefined();
const promise = Dumper.getTaskPromise(dumpKey);
await promise;
});
describe('resource action', () => {
let dumpKey: string;
let dumper: Dumper;
beforeEach(async () => {
dumper = new Dumper(app);
dumpKey = await dumper.runDumpTask({
groups: new Set(['meta', 'config', 'business']),
});
const promise = Dumper.getTaskPromise(dumpKey);
await promise;
});
it('should list backup file with in progress status', async () => {
await waitSecond(1000);
const fileName = Dumper.generateFileName();
await dumper.writeLockFile(fileName);
const listResponse = await app.agent().resource('backupFiles').list();
expect(listResponse.status).toBe(200);
const body = listResponse.body;
const firstItem = body.data[0];
expect(firstItem.status).toEqual('in_progress');
});
it('should list backup file', async () => {
const listResponse = await app.agent().resource('backupFiles').list();
expect(listResponse.status).toBe(200);
const body = listResponse.body;
expect(body.meta.count).toBeDefined();
expect(body.meta.totalPage).toBeDefined();
});
it('should get backup file', async () => {
const getResponse = await app.agent().resource('backupFiles').get({
filterByTk: dumpKey,
});
expect(getResponse.status).toBe(200);
expect(getResponse.body.data.name).toEqual(dumpKey);
console.log({ getResponse: getResponse.body.data });
});
it('should restore from file name', async () => {
const restoreResponse = await app
.agent()
.resource('backupFiles')
.restore({
values: {
filterByTk: dumpKey,
dataTypes: ['meta', 'config', 'business'],
},
});
expect(restoreResponse.status).toBe(200);
});
it('should destroy dump file', async () => {
const destroyResponse = await app.agent().resource('backupFiles').destroy({
filterByTk: dumpKey,
});
expect(destroyResponse.status).toBe(200);
const getResponse = await app.agent().resource('backupFiles').get({
filterByTk: dumpKey,
});
expect(getResponse.status).toBe(404);
});
it('should restore from upload file', async () => {
const filePath = dumper.backUpFilePath(dumpKey);
const packageInfoResponse = await app.agent().post('/backupFiles:upload').attach('file', filePath);
expect(packageInfoResponse.status).toBe(200);
const data = packageInfoResponse.body.data;
expect(data['key']).toBeTruthy();
expect(data['meta']).toBeTruthy();
const restoreResponse = await app
.agent()
.resource('backupFiles')
.restore({
values: {
key: data['key'],
dataTypes: ['meta', 'config', 'business'],
},
});
expect(restoreResponse.status).toBe(200);
});
});
it('should get dumpable collections', async () => {
await app.db.getCollection('collections').repository.create({
values: {
name: 'test',
title: '测试',
fields: [
{
name: 'title',
type: 'string',
title: '标题',
},
],
},
context: {},
});
const response = await app.agent().get('/backupFiles:dumpableCollections');
expect(response.status).toBe(200);
const body = response.body;
expect(body['required']).toBeTruthy();
expect(body['third-party']).toBeTruthy();
expect(body['custom']).toBeTruthy();
const testCollectionInfo = body['custom'].find((item: any) => item.name === 'test');
expect(testCollectionInfo).toMatchObject({
name: 'test',
title: '测试',
group: 'custom',
origin: {
name: '@nocobase/plugin-collection-manager',
},
});
});
});

View File

@ -120,52 +120,4 @@ describe('dump', () => {
await db.sequelize.query(sql, { type: 'INSERT' });
});
it('should dump user defined functions', async () => {
if (db.sequelize.getDialect() !== 'postgres') {
return;
}
await db.sequelize.query(`
CREATE OR REPLACE FUNCTION add(integer, integer) RETURNS integer
AS 'select $1 + $2;'
LANGUAGE SQL
IMMUTABLE
RETURNS NULL ON NULL INPUT;
`);
await db.sequelize.query(`
CREATE OR REPLACE FUNCTION trigger_function()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
-- trigger logic
END;
$$`);
await db.sequelize.query(`
CREATE TRIGGER last_name_changes
BEFORE UPDATE
ON ${app.db.getCollection('users').quotedTableName()}
FOR EACH ROW
EXECUTE PROCEDURE trigger_function();
`);
await db.sequelize.query(`
CREATE OR REPLACE VIEW vistaView AS SELECT 'Hello World' as hello;
`);
const dumper = new Dumper(app, {
workDir: testDir,
});
await dumper.dumpDb();
const restorer = new Restorer(app, {
workDir: testDir,
});
await restorer.importDb();
});
});

View File

@ -0,0 +1,729 @@
import { MockServer } from '@nocobase/test';
import createApp from './index';
import { Dumper } from '../dumper';
import { Restorer } from '../restorer';
import path from 'path';
import fs from 'fs';
import { Database } from '@nocobase/database';
describe('dumper', () => {
let app: MockServer;
let db: Database;
beforeEach(async () => {
app = await createApp();
db = app.db;
});
afterEach(async () => {
await app.destroy();
});
it.skip('should restore from file', async () => {
const file = '/home/chareice/Downloads/backup_20231121_100606_4495.nbdump';
const restorer = new Restorer(app, {
backUpFilePath: file,
});
await restorer.restore({
groups: new Set(['meta', 'business']),
});
});
it('should write sql content', async () => {
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required']),
});
});
it('should dump and restore date field', async () => {
await db.getRepository('collections').create({
values: {
name: 'tests',
fields: [
{
type: 'date',
name: 'test_data',
},
],
},
context: {},
});
await db.getRepository('tests').create({
values: {
date: new Date(),
},
});
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'custom']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required', 'custom']),
});
const testCollection = app.db.getCollection('tests');
const items = await testCollection.repository.find();
expect(items.length).toBe(1);
});
describe('id seq', () => {
let allGroups;
beforeEach(async () => {
await db.getRepository('collections').create({
values: {
name: 'tests',
fields: [
{
type: 'string',
name: 'name',
},
],
},
context: {},
});
const Test = db.getCollection('tests');
for (let i = 0; i < 10; i++) {
await Test.repository.create({
values: {
name: `test${i}`,
},
});
}
const dumper = new Dumper(app);
const collections = await dumper.collectionsGroupByDataTypes();
allGroups = Object.keys(collections);
const result = await dumper.dump({
groups: new Set(allGroups),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(allGroups),
});
});
it('should reset id seq after restore collection', async () => {
const testCollection = app.db.getCollection('tests');
await testCollection.repository.create({
values: {
name: 'test',
},
});
});
});
it('should restore parent collection', async () => {
if (!db.inDialect('postgres')) {
return;
}
await db.getRepository('collections').create({
values: {
name: 'parent',
fields: [
{
type: 'string',
name: 'parentName',
},
],
},
context: {},
});
await db.getRepository('collections').create({
values: {
name: 'child',
inherits: ['parent'],
fields: [
{
type: 'string',
name: 'childName',
},
],
},
context: {},
});
await db.getRepository('parent').create({
values: {
parentName: 'parentName',
},
});
await db.getRepository('child').create({
values: {
childName: 'childName',
},
});
expect(await app.db.getRepository('parent').count()).toEqual(2);
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'custom']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required', 'custom']),
});
expect(await app.db.getRepository('parent').count()).toEqual(2);
});
it('should restore with audit logs', async () => {
await app.runCommand('pm', 'enable', 'audit-logs');
await app.db.getRepository('collections').create({
values: {
name: 'tests',
logging: true,
fields: [
{
type: 'string',
name: 'name',
},
],
},
context: {},
});
const Post = app.db.getCollection('tests').model;
const post = await Post.create({ name: '123456' });
await post.update({ name: '223456' });
await post.destroy();
const auditLogs = await app.db.getCollection('auditLogs').repository.find({
appends: ['changes'],
});
expect(auditLogs.length).toBe(3);
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'log']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required', 'log']),
});
const log = await app.db.getCollection('auditLogs').repository.findOne({
filter: { type: 'update' },
appends: ['changes'],
});
const changes = log.get('changes');
expect(typeof changes[0].before).toBe('string');
});
it('should sort collections by inherits', async () => {
const collections = [
{
name: 'parent1',
inherits: [],
},
{
name: 'parent2',
inherits: [],
},
{
name: 'child3',
inherits: ['child1', 'child2'],
},
{
name: 'child1',
inherits: ['parent1', 'parent2'],
},
{
name: 'child2',
inherits: ['parent1'],
},
];
const sorted = Restorer.sortCollectionsByInherits(collections);
expect(sorted[0].name).toBe('parent1');
expect(sorted[1].name).toBe('parent2');
expect(sorted[2].name).toBe('child1');
expect(sorted[3].name).toBe('child2');
expect(sorted[4].name).toBe('child3');
});
it('should handle inherited collection order', async () => {
if (!db.inDialect('postgres')) {
return;
}
await db.getRepository('collections').create({
values: {
name: 'parent1',
fields: [
{
type: 'string',
name: 'parent1Name',
},
],
},
context: {},
});
await db.getRepository('collections').create({
values: {
name: 'parent2',
fields: [
{
type: 'string',
name: 'parent2Name',
},
],
},
context: {},
});
await db.getRepository('collections').create({
values: {
name: 'child1',
inherits: ['parent1', 'parent2'],
fields: [
{
type: 'string',
name: 'child1Name',
},
],
},
context: {},
});
await db.getRepository('parent1').create({
values: {
parent1Name: 'parent1Name',
},
});
await db.getRepository('parent2').create({
values: {
parent2Name: 'parent2Name',
},
});
await db.getRepository('child1').create({
values: {
child1Name: 'child1Name',
},
});
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'custom']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
const meta = await restorer.parseBackupFile();
const businessCollections = meta.dumpableCollectionsGroupByGroup.custom;
const child1 = businessCollections.find(({ name }) => name === 'child1');
expect(child1.inherits).toEqual(['parent1', 'parent2']);
await restorer.restore({
groups: new Set(['required', 'custom']),
});
});
it.skip('should list dumped files', async () => {
const dumper = new Dumper(app);
const list = await dumper.allBackUpFilePaths({
includeInProgress: true,
dir: path.join(__dirname, './fixtures/files'),
});
console.log({ list });
expect(list.length).toBe(2);
});
it('should dump and restore with view collection', async () => {
await db.getRepository('collections').create({
values: {
name: 'tests',
fields: [
{
type: 'string',
name: 'name',
},
],
},
context: {},
});
const testCollection = db.getCollection('tests');
const viewName = 'test_view';
const dropViewSQL = `DROP VIEW IF EXISTS ${viewName}`;
await db.sequelize.query(dropViewSQL);
const viewSQL = `CREATE VIEW ${viewName} as SELECT * FROM ${testCollection.quotedTableName()}`;
await db.sequelize.query(viewSQL);
await db.getRepository('collections').create({
values: {
name: viewName,
view: true,
schema: db.inDialect('postgres') ? 'public' : undefined,
fields: [
{
type: 'string',
name: 'name',
},
],
},
context: {},
});
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'custom']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required', 'custom']),
});
});
it('should dump & restore sequence data', async () => {
await db.getRepository('collections').create({
values: {
name: 'tests',
fields: [
{
type: 'sequence',
name: 'name',
patterns: [
{
type: 'integer',
options: { key: 1 },
},
],
},
],
},
context: {},
});
const Test = db.getCollection('tests');
const sequenceCollection = db.getCollection('sequences');
expect(await sequenceCollection.repository.count()).toBe(1);
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'custom']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required', 'custom']),
});
expect(await app.db.getCollection('sequences').repository.count()).toBe(1);
});
it('should dump and restore map file', async () => {
const data = {
polygon: [
[114.081074, 22.563646],
[114.147335, 22.559207],
[114.134975, 22.531621],
[114.09103, 22.520045],
[114.033695, 22.575376],
[114.025284, 22.55461],
[114.033523, 22.533048],
],
point: [114.048868, 22.554927],
circle: [114.058996, 22.549695, 4171],
lineString: [
[114.047323, 22.534158],
[114.120966, 22.544146],
],
};
await app.runAsCLI(['pm', 'enable', 'map'], { from: 'user' });
const fields = [
{
type: 'point',
name: 'point',
},
{
type: 'polygon',
name: 'polygon',
},
{
type: 'circle',
name: 'circle',
},
{
type: 'lineString',
name: 'lineString',
},
];
await app.db.getRepository('collections').create({
values: {
name: 'tests',
fields,
},
context: {},
});
await app.db.getRepository('tests').create({
values: {
...data,
},
});
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required', 'custom']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.restore({
groups: new Set(['required', 'custom']),
});
const testCollection = app.db.getCollection('tests');
const tableInfo = await app.db.sequelize.getQueryInterface().describeTable(testCollection.getTableNameWithSchema());
expect(tableInfo.point).toBeDefined();
});
it('should dump collection meta', async () => {
await db.getRepository('collections').create({
values: {
name: 'tests',
fields: [
{
type: 'string',
name: 'name',
},
],
},
context: {},
});
await db.getRepository('tests').create({
values: [
{
name: 'test1',
},
{
name: 'test2',
},
],
});
const dumper = new Dumper(app);
await dumper.dumpCollection({
name: 'tests',
});
const collectionDir = path.resolve(dumper.workDir, 'collections', 'tests');
const metaFile = path.resolve(collectionDir, 'meta');
const meta = JSON.parse(fs.readFileSync(metaFile, 'utf8'));
expect(meta.name).toBe('tests');
const autoIncrement = meta.autoIncrement;
expect(autoIncrement).toBeDefined();
});
it('should save dump meta to dump file', async () => {
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
const meta = await restorer.parseBackupFile();
expect(meta.dumpableCollectionsGroupByGroup.required).toBeTruthy();
expect(meta.DB_UNDERSCORED).toBeDefined();
});
describe('get file status', function () {
it('should get in progress status', async () => {
const fileName = 'backup_20231111_112233.nbdump';
const fullPath = path.resolve(__dirname, './fixtures', fileName);
const status = await Dumper.getFileStatus(fullPath);
expect(status['inProgress']).toBeTruthy();
});
it('should get ok status', async () => {
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required']),
});
const status = await Dumper.getFileStatus(result.filePath);
expect(status['inProgress']).toBeFalsy();
});
it('should throw error when file not exists', async () => {
await expect(Dumper.getFileStatus('not_exists_file')).rejects.toThrowError();
});
});
it('should run dump task', async () => {
const dumper = new Dumper(app);
const taskId = await dumper.runDumpTask({
groups: new Set(['meta']),
});
expect(taskId).toBeDefined();
const promise = Dumper.getTaskPromise(taskId);
expect(promise).toBeDefined();
await promise;
});
it('should create dump file name', async () => {
expect(Dumper.generateFileName()).toMatch(/^backup_\d{8}_\d{6}_\d{4}\.nbdump$/);
});
it('should get dumped collections by data types', async () => {
await app.db.getRepository('collections').create({
values: {
name: 'test_collection',
fields: [
{
name: 'test_field1',
type: 'string',
},
],
},
context: {},
});
const dumper = new Dumper(app);
const collections = await dumper.getCollectionsByDataTypes(new Set(['custom']));
expect(collections.includes('test_collection')).toBeTruthy();
});
it('should dump collection table structure', async () => {
await app.db.getRepository('collections').create({
values: {
name: 'test_collection',
fields: [
{
name: 'test_field1',
type: 'string',
},
],
},
context: {},
});
const dumper = new Dumper(app);
await dumper.dumpCollection({
name: 'test_collection',
});
const collectionDir = path.resolve(dumper.workDir, 'collections', 'test_collection');
const metaFile = path.resolve(collectionDir, 'meta');
const meta = JSON.parse(fs.readFileSync(metaFile, 'utf8'));
const attributes = meta.attributes;
expect(attributes).toBeDefined();
expect(attributes.id.isCollectionField).toBeFalsy();
expect(attributes.id.type).toBe('BIGINT');
expect(attributes['test_field1'].isCollectionField).toBeTruthy();
expect(attributes['test_field1'].type).toBe('string');
});
it('should get dumped collections with origin option', async () => {
const dumper = new Dumper(app);
const dumpableCollections = await dumper.dumpableCollections();
const applicationPlugins = dumpableCollections.find(({ name }) => name === 'applicationPlugins');
expect(applicationPlugins.origin).toMatchObject({
title: 'core',
name: 'core',
});
});
it('should get custom collections group', async () => {
await app.db.getRepository('collections').create({
values: {
name: 'test_collection',
fields: [
{
name: 'test_field1',
type: 'string',
},
],
},
context: {},
});
const dumper = new Dumper(app);
const dumpableCollections = await dumper.collectionsGroupByDataTypes();
expect(dumpableCollections.custom).toBeDefined();
});
});

View File

@ -1,10 +1,13 @@
import { mockServer } from '@nocobase/test';
export default async function createApp() {
const app = mockServer();
const app = mockServer({
plugins: ['nocobase'],
});
await app.cleanDb();
app.plugin((await import('../server')).default, { name: 'duplicator' });
app.plugin('error-handler');
app.plugin('collection-manager');
await app.loadAndInstall({ clean: true });
return app;

View File

@ -0,0 +1,71 @@
import { MockServer } from '@nocobase/test';
import { Collection, Database } from '@nocobase/database';
import createApp from './index';
import { Dumper } from '../dumper';
import { Restorer } from '../restorer';
import path from 'path';
import fs from 'fs';
describe('on dump', () => {
let app: MockServer;
let db: Database;
beforeEach(async () => {
app = await createApp();
db = app.db;
});
afterEach(async () => {
await app.destroy();
});
it('should handle collection onDump api', async () => {
class OnDumpCollection extends Collection {}
app.db.collectionFactory.registerCollectionType(OnDumpCollection, {
condition(options) {
return options.onDump;
},
async onSync(model, options) {
return;
},
async onDump(dumper: Dumper, collection) {
dumper.writeSQLContent('onDumpCollection', {
sql: `CREATE TABLE ${collection.getTableNameWithSchemaAsString()} (id int);`,
group: 'required',
});
},
});
await app.db.getCollection('collections').repository.create({
values: {
name: 'onDumpCollection',
title: 'onDumpCollection',
onDump: true,
},
context: {},
});
const dumper = new Dumper(app);
const result = await dumper.dump({
groups: new Set(['required']),
});
const restorer = new Restorer(app, {
backUpFilePath: result.filePath,
});
await restorer.parseBackupFile();
const sqlContentPath = path.resolve(restorer.workDir, 'sql-content.json');
const sqlContent = JSON.parse(await fs.promises.readFile(sqlContentPath, 'utf8'));
expect(sqlContent).toBeDefined();
await restorer.restore({
groups: new Set(['required', 'custom']),
});
});
});

View File

@ -0,0 +1,41 @@
import { Application } from '@nocobase/server';
import { applyMixins, AsyncEmitter } from '@nocobase/utils';
import crypto from 'crypto';
import EventEmitter from 'events';
import fsPromises from 'fs/promises';
import * as os from 'os';
import path from 'path';
export type AppMigratorOptions = {
workDir?: string;
};
abstract class AppMigrator extends EventEmitter {
public readonly workDir: string;
public app: Application;
abstract direction: 'restore' | 'dump';
declare emitAsync: (event: string | symbol, ...args: any[]) => Promise<boolean>;
constructor(app: Application, options?: AppMigratorOptions) {
super();
this.app = app;
this.workDir = options?.workDir || this.tmpDir();
}
tmpDir() {
return path.resolve(os.tmpdir(), `nocobase-${crypto.randomUUID()}`);
}
async rmDir(dir: string) {
await fsPromises.rm(dir, { recursive: true, force: true });
}
async clearWorkDir() {
await this.rmDir(this.workDir);
}
}
applyMixins(AppMigrator, [AsyncEmitter]);
export { AppMigrator };

View File

@ -0,0 +1,5 @@
import { CollectionGroup } from '@nocobase/database';
export class CollectionGroupManager {
static collectionGroups: CollectionGroup[] = [];
}

View File

@ -0,0 +1,67 @@
import { Application, AppSupervisor } from '@nocobase/server';
import { Restorer } from '../restorer';
import { DumpRulesGroupType } from '@nocobase/database';
export default function addRestoreCommand(app: Application) {
app
.command('restore')
.ipc()
.argument('<string>', 'restore file path')
.option('-a, --app <appName>', 'sub app name if you want to restore into a sub app')
.option('-f, --force', 'force restore')
.option(
'-g, --groups <groups>',
'groups to restore',
(value, previous) => {
return previous.concat([value]);
},
[],
)
.action(async (restoreFilePath, options) => {
// should confirm data will be overwritten
if (!options.force) {
app.log.warn('This action will overwrite your current data, please make sure you have a backup❗');
return;
}
let importApp = app;
if (options.app) {
if (
!(await app.db.getCollection('applications').repository.findOne({
filter: { name: options.app },
}))
) {
// create sub app if not exists
await app.db.getCollection('applications').repository.create({
values: {
name: options.app,
},
});
}
const subApp = await AppSupervisor.getInstance().getApp(options.app);
if (!subApp) {
app.log.error(`app ${options.app} not found`);
await app.stop();
return;
}
importApp = subApp;
}
const groups: Set<string> = new Set<DumpRulesGroupType>(options.groups);
groups.add('required');
const restorer = new Restorer(importApp, {
backUpFilePath: restoreFilePath,
});
await restorer.restore({
groups,
});
await app.restart();
});
}

View File

@ -0,0 +1,543 @@
import { Collection, CollectionGroupManager as DBCollectionGroupManager, DumpRulesGroupType } from '@nocobase/database';
import archiver from 'archiver';
import dayjs from 'dayjs';
import fs from 'fs';
import fsPromises from 'fs/promises';
import { default as _, default as lodash } from 'lodash';
import mkdirp from 'mkdirp';
import path from 'path';
import * as process from 'process';
import stream from 'stream';
import util from 'util';
import { AppMigrator } from './app-migrator';
import { FieldValueWriter } from './field-value-writer';
import { DUMPED_EXTENSION, humanFileSize, sqlAdapter } from './utils';
const finished = util.promisify(stream.finished);
type DumpOptions = {
groups: Set<DumpRulesGroupType>;
fileName?: string;
};
type BackUpStatusOk = {
name: string;
createdAt: Date;
fileSize: string;
status: 'ok';
};
type BackUpStatusDoing = {
name: string;
inProgress: true;
status: 'in_progress';
};
export class Dumper extends AppMigrator {
static dumpTasks: Map<string, Promise<any>> = new Map();
direction = 'dump' as const;
sqlContent: {
[key: string]: {
sql: string | string[];
group: DumpRulesGroupType;
};
} = {};
static getTaskPromise(taskId: string): Promise<any> | undefined {
return this.dumpTasks.get(taskId);
}
static async getFileStatus(filePath: string): Promise<BackUpStatusOk | BackUpStatusDoing> {
const lockFile = filePath + '.lock';
const fileName = path.basename(filePath);
return fs.promises
.stat(lockFile)
.then((lockFileStat) => {
if (lockFileStat.isFile()) {
return {
name: fileName,
inProgress: true,
status: 'in_progress',
} as BackUpStatusDoing;
} else {
throw new Error('Lock file is not a file');
}
})
.catch((error) => {
// 如果 Lock 文件不存在,检查备份文件
if (error.code === 'ENOENT') {
return fs.promises.stat(filePath).then((backupFileStat) => {
if (backupFileStat.isFile()) {
return {
name: fileName,
createdAt: backupFileStat.birthtime,
fileSize: humanFileSize(backupFileStat.size),
status: 'ok',
} as BackUpStatusOk;
} else {
throw new Error('Path is not a file');
}
});
}
// 其他错误直接抛出
throw error;
});
}
static generateFileName() {
return `backup_${dayjs().format(`YYYYMMDD_HHmmss_${Math.floor(1000 + Math.random() * 9000)}`)}.${DUMPED_EXTENSION}`;
}
writeSQLContent(
key: string,
data: {
sql: string | string[];
group: DumpRulesGroupType;
},
) {
this.sqlContent[key] = data;
}
getSQLContent(key: string) {
return this.sqlContent[key];
}
async getCollectionsByDataTypes(groups: Set<DumpRulesGroupType>): Promise<string[]> {
const dumpableCollectionsGroupByDataTypes = await this.collectionsGroupByDataTypes();
return [...groups].reduce((acc, key) => {
return acc.concat(dumpableCollectionsGroupByDataTypes[key] || []);
}, []);
}
async dumpableCollections() {
return (
await Promise.all(
[...this.app.db.collections.values()].map(async (c) => {
try {
const dumpRules = DBCollectionGroupManager.unifyDumpRules(c.options.dumpRules);
let origin = c.origin;
let originTitle = origin;
// plugin collections
if (origin.startsWith('plugin:')) {
const plugin = this.app.pm.get(origin.replace(/^plugin:/, ''));
const pluginInfo = await plugin.toJSON({
withOutOpenFile: true,
});
originTitle = pluginInfo.displayName;
origin = pluginInfo.packageName;
}
// user collections
if (origin === 'collection-manager') {
originTitle = 'user';
origin = 'user';
}
const options: any = {
name: c.name,
title: c.options.title || c.name,
options: c.options,
group: dumpRules?.group,
isView: c.isView(),
origin: {
name: origin,
title: originTitle,
},
};
if (c.options.inherits && c.options.inherits.length > 0) {
options.inherits = c.options.inherits;
}
return options;
} catch (e) {
console.error(e);
throw new Error(`collection ${c.name} has invalid dumpRules option`, { cause: e });
}
}),
)
).map((item) => {
if (!item.group) {
item.group = 'unknown';
}
return item;
});
}
async collectionsGroupByDataTypes() {
const grouped = lodash.groupBy(await this.dumpableCollections(), 'group');
return Object.fromEntries(Object.entries(grouped).map(([key, value]) => [key, value.map((item) => item.name)]));
}
backUpStorageDir() {
return path.resolve(process.cwd(), 'storage', 'backups');
}
async allBackUpFilePaths(options?: { includeInProgress?: boolean; dir?: string }) {
const dirname = options?.dir || this.backUpStorageDir();
const includeInProgress = options?.includeInProgress;
try {
const files = await fsPromises.readdir(dirname);
const lockFilesSet = new Set(
files.filter((file) => path.extname(file) === '.lock').map((file) => path.basename(file, '.lock')),
);
const filteredFiles = files
.filter((file) => {
const baseName = path.basename(file);
const isLockFile = path.extname(file) === '.lock';
const isDumpFile = path.extname(file) === `.${DUMPED_EXTENSION}`;
return (includeInProgress && isLockFile) || (isDumpFile && !lockFilesSet.has(baseName));
})
.map(async (file) => {
const filePath = path.resolve(dirname, file);
const stats = await fsPromises.stat(filePath);
return { filePath, birthtime: stats.birthtime.getTime() };
});
const filesData = await Promise.all(filteredFiles);
filesData.sort((a, b) => b.birthtime - a.birthtime);
return filesData.map((fileData) => fileData.filePath);
} catch (error) {
if (!error.message.includes('no such file or directory')) {
console.error('Error reading directory:', error);
}
return [];
}
}
backUpFilePath(fileName: string) {
const dirname = this.backUpStorageDir();
return path.resolve(dirname, fileName);
}
lockFilePath(fileName: string) {
const lockFile = fileName + '.lock';
const dirname = this.backUpStorageDir();
return path.resolve(dirname, lockFile);
}
async writeLockFile(fileName: string) {
const dirname = this.backUpStorageDir();
await mkdirp(dirname);
const filePath = this.lockFilePath(fileName);
await fsPromises.writeFile(filePath, 'lock', 'utf8');
}
async cleanLockFile(fileName: string) {
const filePath = this.lockFilePath(fileName);
await fsPromises.unlink(filePath);
}
async runDumpTask(options: Omit<DumpOptions, 'fileName'>) {
const backupFileName = Dumper.generateFileName();
await this.writeLockFile(backupFileName);
const promise = this.dump({
groups: options.groups,
fileName: backupFileName,
}).finally(() => {
this.cleanLockFile(backupFileName);
Dumper.dumpTasks.delete(backupFileName);
});
Dumper.dumpTasks.set(backupFileName, promise);
return backupFileName;
}
async dumpableCollectionsGroupByGroup() {
return _(await this.dumpableCollections())
.map((c) => _.pick(c, ['name', 'group', 'origin', 'title', 'isView', 'inherits']))
.groupBy('group')
.mapValues((items) => _.sortBy(items, (item) => item.name))
.value();
}
async dump(options: DumpOptions) {
const dumpingGroups = options.groups;
dumpingGroups.add('required');
const delayCollections = new Set();
const dumpedCollections = await this.getCollectionsByDataTypes(dumpingGroups);
for (const collectionName of dumpedCollections) {
const collection = this.app.db.getCollection(collectionName);
if (lodash.get(collection.options, 'duplicator.delayRestore')) {
delayCollections.add(collectionName);
}
await this.dumpCollection({
name: collectionName,
});
}
await this.dumpMeta({
dumpableCollectionsGroupByGroup: lodash.pick(await this.dumpableCollectionsGroupByGroup(), [...dumpingGroups]),
dumpedGroups: [...dumpingGroups],
delayCollections: [...delayCollections],
});
await this.dumpDb(options);
const backupFileName = options.fileName || Dumper.generateFileName();
const filePath = await this.packDumpedDir(backupFileName);
await this.clearWorkDir();
return filePath;
}
async dumpDb(options: DumpOptions) {
for (const collection of this.app.db.collections.values()) {
const collectionOnDumpOption = this.app.db.collectionFactory.collectionTypes.get(
collection.constructor as typeof Collection,
)?.onDump;
if (collectionOnDumpOption) {
await collectionOnDumpOption(this, collection);
}
}
if (this.hasSqlContent()) {
const dbDumpPath = path.resolve(this.workDir, 'sql-content.json');
await fsPromises.writeFile(
dbDumpPath,
JSON.stringify(
Object.keys(this.sqlContent)
.filter((key) => options.groups.has(this.sqlContent[key].group))
.reduce((acc, key) => {
acc[key] = this.sqlContent[key];
return acc;
}, {}),
),
'utf8',
);
}
}
hasSqlContent() {
return Object.keys(this.sqlContent).length > 0;
}
async dumpMeta(additionalMeta: object = {}) {
const metaPath = path.resolve(this.workDir, 'meta');
const metaObj = {
version: await this.app.version.get(),
dialect: this.app.db.sequelize.getDialect(),
DB_UNDERSCORED: process.env.DB_UNDERSCORED,
DB_TABLE_PREFIX: process.env.DB_TABLE_PREFIX,
DB_SCHEMA: process.env.DB_SCHEMA,
COLLECTION_MANAGER_SCHEMA: process.env.COLLECTION_MANAGER_SCHEMA,
...additionalMeta,
};
if (this.app.db.inDialect('postgres')) {
if (this.app.db.inheritanceMap.nodes.size > 0) {
metaObj['dialectOnly'] = true;
}
}
if (this.hasSqlContent()) {
metaObj['dialectOnly'] = true;
}
await fsPromises.writeFile(metaPath, JSON.stringify(metaObj), 'utf8');
}
async dumpCollection(options: { name: string }) {
const app = this.app;
const dir = this.workDir;
const collectionName = options.name;
app.log.info(`dumping collection ${collectionName}`);
const collection = app.db.getCollection(collectionName);
if (!collection) {
this.app.log.warn(`collection ${collectionName} not found`);
return;
}
const collectionOnDumpOption = this.app.db.collectionFactory.collectionTypes.get(
collection.constructor as typeof Collection,
)?.onDump;
if (collectionOnDumpOption) {
return;
}
// @ts-ignore
const attributes = collection.model.tableAttributes;
// @ts-ignore
const columns: string[] = [...new Set(lodash.map(attributes, 'field'))];
const collectionDataDir = path.resolve(dir, 'collections', collectionName);
await fsPromises.mkdir(collectionDataDir, { recursive: true });
let count = 0;
if (columns.length !== 0) {
// write collection data
const dataFilePath = path.resolve(collectionDataDir, 'data');
const dataStream = fs.createWriteStream(dataFilePath);
const rows = await app.db.sequelize.query(
sqlAdapter(
app.db,
`SELECT *
FROM ${collection.isParent() ? 'ONLY' : ''} ${collection.quotedTableName()}`,
),
{
type: 'SELECT',
},
);
for (const row of rows) {
const rowData = JSON.stringify(
columns.map((col) => {
const val = row[col];
const field = collection.getField(col);
return field ? FieldValueWriter.toDumpedValue(field, val) : val;
}),
);
dataStream.write(rowData + '\r\n', 'utf8');
}
dataStream.end();
await finished(dataStream);
count = rows.length;
}
const metaAttributes = lodash.mapValues(attributes, (attr, key) => {
const collectionField = collection.getField(key);
const fieldOptionKeys = ['field', 'primaryKey', 'autoIncrement', 'allowNull', 'defaultValue', 'unique'];
if (collectionField) {
// is a field
return {
field: attr.field,
isCollectionField: true,
type: collectionField.type,
typeOptions: collectionField.options,
};
}
return {
...lodash.pick(attr, fieldOptionKeys),
type: attr.type.constructor.toString(),
isCollectionField: false,
typeOptions: attr.type.options,
};
});
const meta = {
name: collectionName,
tableName: collection.getTableNameWithSchema(),
count,
columns,
attributes: metaAttributes,
};
if (collection.options.inherits) {
meta['inherits'] = lodash.uniq(collection.options.inherits);
}
// @ts-ignore
const autoIncrAttr = collection.model.autoIncrementAttribute;
if (
autoIncrAttr &&
collection.model.rawAttributes[autoIncrAttr] &&
collection.model.rawAttributes[autoIncrAttr].autoIncrement
) {
const queryInterface = app.db.queryInterface;
const autoIncrInfo = await queryInterface.getAutoIncrementInfo({
tableInfo: {
tableName: collection.model.tableName,
schema: collection.collectionSchema(),
},
fieldName: autoIncrAttr,
});
meta['autoIncrement'] = {
...autoIncrInfo,
fieldName: autoIncrAttr,
};
}
// write meta file
await fsPromises.writeFile(path.resolve(collectionDataDir, 'meta'), JSON.stringify(meta), 'utf8');
}
async packDumpedDir(fileName: string) {
const dirname = this.backUpStorageDir();
await mkdirp(dirname);
const filePath = path.resolve(dirname, fileName);
const output = fs.createWriteStream(filePath);
const archive = archiver('zip', {
zlib: { level: 9 },
});
// Create a promise that resolves when the 'close' event is fired
const onClose = new Promise((resolve, reject) => {
output.on('close', function () {
console.log('dumped file size: ' + humanFileSize(archive.pointer(), true));
resolve(true);
});
output.on('end', function () {
console.log('Data has been drained');
});
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
reject(err);
}
});
archive.on('error', function (err) {
reject(err);
});
});
archive.pipe(output);
archive.directory(this.workDir, false);
// Finalize the archive
await archive.finalize();
// Wait for the 'close' event
await onClose;
return {
filePath,
dirname,
};
}
}

View File

@ -0,0 +1,6 @@
export class RestoreCheckError extends Error {
constructor(message) {
super(message);
this.name = 'RestoreCheckError';
}
}

View File

@ -58,9 +58,18 @@ export class FieldValueWriter {
}
}
function isJSONObjectOrArrayString(str) {
try {
const parsed = JSON.parse(str);
return typeof parsed === 'object' && parsed !== null;
} catch (e) {
return false;
}
}
FieldValueWriter.registerWriter([DataTypes.JSON.toString(), DataTypes.JSONB.toString()], (val) => {
try {
return lodash.isString(val) ? JSON.parse(val) : val;
return isJSONObjectOrArrayString(val) ? JSON.parse(val) : val;
} catch (err) {
if (err instanceof SyntaxError && err.message.includes('Unexpected')) {
return val;

View File

@ -0,0 +1,212 @@
import { Dumper } from '../dumper';
import { DumpRulesGroupType } from '@nocobase/database';
import fs from 'fs';
import { koaMulter as multer } from '@nocobase/utils';
import os from 'os';
import path from 'path';
import fsPromises from 'fs/promises';
import { Restorer } from '../restorer';
import { DEFAULT_PAGE, DEFAULT_PER_PAGE } from '@nocobase/actions';
export default {
name: 'backupFiles',
middleware: async (ctx, next) => {
if (ctx.action.actionName !== 'upload') {
return next();
}
const storage = multer.diskStorage({
destination: os.tmpdir(),
filename: function (req, file, cb) {
const randomName = Date.now().toString() + Math.random().toString().slice(2); // 随机生成文件名
cb(null, randomName);
},
});
const upload = multer({ storage }).single('file');
return upload(ctx, next);
},
actions: {
async list(ctx, next) {
const { page = DEFAULT_PAGE, pageSize = DEFAULT_PER_PAGE } = ctx.action.params;
const dumper = new Dumper(ctx.app);
const backupFiles = await dumper.allBackUpFilePaths({
includeInProgress: true,
});
// handle pagination
const count = backupFiles.length;
const rows = await Promise.all(
backupFiles.slice((page - 1) * pageSize, page * pageSize).map(async (file) => {
// if file is lock file, remove lock extension
return await Dumper.getFileStatus(file.endsWith('.lock') ? file.replace('.lock', '') : file);
}),
);
ctx.body = {
count,
rows,
page: Number(page),
pageSize: Number(pageSize),
totalPage: Math.ceil(count / pageSize),
};
await next();
},
async get(ctx, next) {
const { filterByTk } = ctx.action.params;
const dumper = new Dumper(ctx.app);
const filePath = dumper.backUpFilePath(filterByTk);
async function sendError(message, status = 404) {
ctx.body = { status: 'error', message };
ctx.status = status;
}
try {
const fileState = await Dumper.getFileStatus(filePath);
if (fileState.status !== 'ok') {
await sendError(`Backup file ${filterByTk} not found`);
} else {
const restorer = new Restorer(ctx.app, {
backUpFilePath: filePath,
});
const restoreMeta = await restorer.parseBackupFile();
ctx.body = {
...fileState,
meta: restoreMeta,
};
}
} catch (e) {
if (e.code === 'ENOENT') {
await sendError(`Backup file ${filterByTk} not found`);
}
}
await next();
},
/**
* create dump task
* @param ctx
* @param next
*/
async create(ctx, next) {
const data = <
{
dataTypes: string[];
}
>ctx.request.body;
const dumper = new Dumper(ctx.app);
const taskId = await dumper.runDumpTask({
groups: new Set(data.dataTypes) as Set<DumpRulesGroupType>,
});
ctx.body = {
key: taskId,
};
await next();
},
/**
* download backup file
* @param ctx
* @param next
*/
async download(ctx, next) {
const { filterByTk } = ctx.action.params;
const dumper = new Dumper(ctx.app);
const filePath = dumper.backUpFilePath(filterByTk);
const fileState = await Dumper.getFileStatus(filePath);
if (fileState.status !== 'ok') {
throw new Error(`Backup file ${filterByTk} not found`);
}
ctx.attachment(filePath);
ctx.body = fs.createReadStream(filePath);
await next();
},
async restore(ctx, next) {
const { dataTypes, filterByTk, key } = ctx.action.params.values;
const filePath = (() => {
if (key) {
const tmpDir = os.tmpdir();
return path.resolve(tmpDir, key);
}
if (filterByTk) {
const dumper = new Dumper(ctx.app);
return dumper.backUpFilePath(filterByTk);
}
})();
if (!filePath) {
throw new Error(`Backup file ${filterByTk} not found`);
}
const args = ['restore', '-f', filePath];
for (const dataType of dataTypes) {
args.push('-g', dataType);
}
await ctx.app.runCommand(...args);
await next();
},
async destroy(ctx, next) {
const { filterByTk } = ctx.action.params;
const dumper = new Dumper(ctx.app);
const filePath = dumper.backUpFilePath(filterByTk);
await fsPromises.unlink(filePath);
// remove file
ctx.body = {
status: 'ok',
};
await next();
},
async upload(ctx, next) {
const file = ctx.file;
const fileName = file.filename;
const restorer = new Restorer(ctx.app, {
backUpFilePath: file.path,
});
const restoreMeta = await restorer.parseBackupFile();
ctx.body = {
key: fileName,
meta: restoreMeta,
};
await next();
},
async dumpableCollections(ctx, next) {
ctx.withoutDataWrapping = true;
const dumper = new Dumper(ctx.app);
ctx.body = await dumper.dumpableCollectionsGroupByGroup();
await next();
},
},
};

View File

@ -0,0 +1,405 @@
import decompress from 'decompress';
import fs from 'fs';
import fsPromises from 'fs/promises';
import path from 'path';
import { AppMigrator, AppMigratorOptions } from './app-migrator';
import { readLines } from './utils';
import { Application } from '@nocobase/server';
import { DataTypes, DumpRulesGroupType } from '@nocobase/database';
import lodash, { isPlainObject } from 'lodash';
import { FieldValueWriter } from './field-value-writer';
import * as Topo from '@hapi/topo';
import { RestoreCheckError } from './errors/restore-check-error';
import semver from 'semver';
type RestoreOptions = {
groups: Set<DumpRulesGroupType>;
};
export class Restorer extends AppMigrator {
direction = 'restore' as const;
backUpFilePath: string;
decompressed = false;
importedCollections: string[] = [];
constructor(
app: Application,
options: AppMigratorOptions & {
backUpFilePath?: string;
},
) {
super(app, options);
const { backUpFilePath } = options;
if (backUpFilePath) {
this.setBackUpFilePath(backUpFilePath);
}
}
static sortCollectionsByInherits(
collections: Array<{
name: string;
inherits: string[];
}>,
): any {
const sorter = new Topo.Sorter();
for (const collection of collections) {
const options: any = {
group: collection.name,
};
if (collection.inherits?.length) {
options.after = collection.inherits;
}
sorter.add(collection, options);
}
return sorter.sort();
}
setBackUpFilePath(backUpFilePath: string) {
if (path.isAbsolute(backUpFilePath)) {
this.backUpFilePath = backUpFilePath;
} else if (path.basename(backUpFilePath) === backUpFilePath) {
const dirname = path.resolve(process.cwd(), 'storage', 'duplicator');
this.backUpFilePath = path.resolve(dirname, backUpFilePath);
} else {
this.backUpFilePath = path.resolve(process.cwd(), backUpFilePath);
}
}
async parseBackupFile() {
await this.decompressBackup(this.backUpFilePath);
return await this.getImportMeta();
}
async restore(options: RestoreOptions) {
await this.decompressBackup(this.backUpFilePath);
await this.checkMeta();
await this.importCollections(options);
await this.importDb(options);
await this.upgradeApp();
await this.clearWorkDir();
}
async getImportMeta() {
const metaFile = path.resolve(this.workDir, 'meta');
return JSON.parse(await fsPromises.readFile(metaFile, 'utf8')) as any;
}
async checkMeta() {
const meta = await this.getImportMeta();
if (meta['dialectOnly'] && !this.app.db.inDialect(meta['dialect'])) {
throw new RestoreCheckError(`this backup file can only be imported in database ${meta['dialect']}`);
}
const checkEnv = (envName: string) => {
const valueInPackage = meta[envName] || '';
const valueInEnv = process.env[envName] || '';
if (valueInPackage && valueInEnv !== valueInPackage) {
throw new RestoreCheckError(`for use this backup file, please set ${envName}=${valueInPackage}`);
}
};
for (const envName of ['DB_UNDERSCORED', 'DB_SCHEMA', 'COLLECTION_MANAGER_SCHEMA', 'DB_TABLE_PREFIX']) {
checkEnv(envName);
}
const version = meta['version'];
if (semver.lt(version, '0.18.0-alpha.2')) {
throw new RestoreCheckError(`this backup file can only be imported in nocobase ${version}`);
}
}
async importCollections(options: RestoreOptions) {
const importCollection = async (collectionName: string) => {
await this.importCollection({
name: collectionName,
});
};
const { dumpableCollectionsGroupByGroup, delayCollections } = await this.parseBackupFile();
// import plugins
await importCollection('applicationPlugins');
await this.app.reload();
// import required collections
const metaCollections = dumpableCollectionsGroupByGroup.required;
for (const collection of metaCollections) {
if (collection.name === 'applicationPlugins') {
continue;
}
if (delayCollections.includes(collection.name)) {
continue;
}
await importCollection(collection.name);
}
options.groups.delete('required');
// import other groups
const importGroups = [...options.groups];
for (const group of importGroups) {
const collections = dumpableCollectionsGroupByGroup[group];
if (!collections) {
this.app.log.warn(`group ${group} not found`);
continue;
}
for (const collection of Restorer.sortCollectionsByInherits(collections)) {
await importCollection(collection.name);
}
}
await this.app.reload();
await (this.app.db.getRepository('collections') as any).load();
// sync new plugins and new collections from backup file
await this.app.db.sync();
for (const collectionName of delayCollections) {
const delayRestore = this.app.db.getCollection(collectionName).options.dumpRules['delayRestore'];
await delayRestore(this);
}
await this.emitAsync('restoreCollectionsFinished');
}
async decompressBackup(backupFilePath: string) {
if (!this.decompressed) await decompress(backupFilePath, this.workDir);
}
async readCollectionMeta(collectionName: string) {
const dir = this.workDir;
const collectionMetaPath = path.resolve(dir, 'collections', collectionName, 'meta');
const metaContent = await fsPromises.readFile(collectionMetaPath, 'utf8');
return JSON.parse(metaContent);
}
async importCollection(options: {
name: string;
insert?: boolean;
clear?: boolean;
rowCondition?: (row: any) => boolean;
}) {
const app = this.app;
const db = app.db;
const collectionName = options.name;
if (!collectionName) {
throw new Error('collection name is required');
}
const dir = this.workDir;
const collectionDataPath = path.resolve(dir, 'collections', collectionName, 'data');
const collectionMetaPath = path.resolve(dir, 'collections', collectionName, 'meta');
try {
await fsPromises.stat(collectionMetaPath);
} catch (e) {
app.logger.info(`${collectionName} has no meta`);
return;
}
const metaContent = await fsPromises.readFile(collectionMetaPath, 'utf8');
const meta = JSON.parse(metaContent);
let addSchemaTableName: any = meta.tableName;
if (!this.app.db.inDialect('postgres') && isPlainObject(addSchemaTableName)) {
addSchemaTableName = addSchemaTableName.tableName;
}
const columns = meta['columns'];
if (columns.length == 0) {
app.logger.info(`${collectionName} has no columns`);
return;
}
const fieldAttributes = lodash.mapValues(meta.attributes, (attr) => {
if (attr.isCollectionField) {
const fieldClass = db.fieldTypes.get(attr.type);
if (!fieldClass) throw new Error(`field type ${attr.type} not found`);
return new fieldClass(attr.typeOptions, {
database: db,
});
}
return undefined;
});
const rawAttributes = lodash.mapValues(meta.attributes, (attr, key) => {
if (attr.isCollectionField) {
const field = fieldAttributes[key];
return {
...field.toSequelize(),
field: attr.field,
};
}
const DataTypeClass = DataTypes[db.options.dialect as string][attr.type] || DataTypes[attr.type];
const obj = {
...attr,
type: new DataTypeClass(),
};
if (attr.defaultValue && ['JSON', 'JSONB', 'JSONTYPE'].includes(attr.type)) {
obj.defaultValue = JSON.stringify(attr.defaultValue);
}
return obj;
});
if (options.clear !== false) {
// drop table
await db.sequelize.getQueryInterface().dropTable(addSchemaTableName, {
cascade: true,
});
// create table
await db.sequelize.getQueryInterface().createTable(addSchemaTableName, rawAttributes);
if (meta.inherits) {
for (const inherit of lodash.uniq(meta.inherits)) {
const parentMeta = await this.readCollectionMeta(inherit as string);
const sql = `ALTER TABLE ${app.db.utils.quoteTable(addSchemaTableName)} INHERIT ${app.db.utils.quoteTable(
parentMeta.tableName,
)};`;
await db.sequelize.query(sql);
}
}
}
// read file content from collection data
const rows = await readLines(collectionDataPath);
if (rows.length == 0) {
app.logger.info(`${collectionName} has no data to import`);
this.importedCollections.push(collectionName);
return;
}
const rowsWithMeta = rows
.map((row) =>
JSON.parse(row)
.map((val, index) => [columns[index], val])
.reduce((carry, [column, val]) => {
const field = fieldAttributes[column];
carry[column] = field ? FieldValueWriter.write(field, val) : val;
return carry;
}, {}),
)
.filter((row) => {
if (options.rowCondition) {
return options.rowCondition(row);
}
return true;
});
if (rowsWithMeta.length == 0) {
app.logger.info(`${collectionName} has no data to import`);
this.importedCollections.push(collectionName);
return;
}
const insertGeneratorAttributes = lodash.mapKeys(rawAttributes, (value, key) => {
return value.field;
});
//@ts-ignore
const sql = db.sequelize.queryInterface.queryGenerator.bulkInsertQuery(
addSchemaTableName,
rowsWithMeta,
{},
insertGeneratorAttributes,
);
if (options.insert === false) {
return sql;
}
await app.db.sequelize.query(sql, {
type: 'INSERT',
});
app.logger.info(`${collectionName} imported with ${rowsWithMeta.length} rows`);
if (meta.autoIncrement) {
const queryInterface = app.db.queryInterface;
await queryInterface.setAutoIncrementVal({
tableInfo: isPlainObject(meta.tableName)
? meta.tableName
: {
schema: 'public',
tableName: meta.tableName,
},
columnName: meta.autoIncrement.fieldName,
seqName: meta.autoIncrement.seqName,
currentVal: meta.autoIncrement.currentVal,
});
}
this.importedCollections.push(collectionName);
}
async importDb(options: RestoreOptions) {
const sqlContentPath = path.resolve(this.workDir, 'sql-content.json');
// if db.sql file not exists, skip import
if (!fs.existsSync(sqlContentPath)) {
return;
}
// read file content from db.sql
const sqlData = JSON.parse(await fsPromises.readFile(sqlContentPath, 'utf8'));
const sqlContent = Object.keys(sqlData)
.filter((key) => options.groups.has(sqlData[key].group))
.reduce((acc, key) => {
acc[key] = sqlData[key];
return acc;
}, {});
const queries = Object.values(
sqlContent as {
[key: string]: {
sql: string | string[];
group: DumpRulesGroupType;
};
},
);
for (const sqlData of queries) {
try {
this.app.log.info(`import sql: ${sqlData.sql}`);
for (const sql of lodash.castArray(sqlData.sql)) {
await this.app.db.sequelize.query(sql);
}
} catch (e) {
if (e.name === 'SequelizeDatabaseError') {
this.app.logger.error(e.message);
} else {
throw e;
}
}
}
}
async upgradeApp() {
await this.app.runCommand('upgrade');
}
}

View File

@ -0,0 +1,13 @@
import { Plugin } from '@nocobase/server';
import backupFilesResourcer from './resourcers/backup-files';
import addRestoreCommand from './commands/restore-command';
export default class Duplicator extends Plugin {
beforeLoad() {
addRestoreCommand(this.app);
}
async load() {
this.app.resourcer.define(backupFilesResourcer);
}
}

View File

@ -0,0 +1,433 @@
export default {
info: {
title: 'NocoBase API - Backup & Restore plugin',
},
tags: [],
paths: {
'/backupFiles:create': {
post: {
summary: 'Create a new backup file',
requestBody: {
required: true,
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/DumpOptions',
},
},
},
},
responses: {
'200': {
description: 'Dump successful',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
key: {
type: 'string',
},
},
},
},
},
},
},
},
},
'/backupFiles:list': {
get: {
summary: 'Get backup file list',
parameters: [
{
name: 'page',
in: 'query',
description: 'Page number of item to retrieve',
required: false,
schema: {
type: 'integer',
format: 'int32',
default: 1,
},
},
{
name: 'pageSize',
in: 'query',
description: 'Number of item to retrieve per page',
required: false,
schema: {
type: 'integer',
format: 'int32',
default: 10,
},
},
],
responses: {
'200': {
description: 'A paged array of backup statuses',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
data: {
type: 'array',
items: {
oneOf: [
{
$ref: '#/components/schemas/BackUpStatusOk',
},
{
$ref: '#/components/schemas/BackUpStatusDoing',
},
],
},
},
meta: {
type: 'object',
properties: {
page: {
type: 'integer',
format: 'int32',
},
pageSize: {
type: 'integer',
format: 'int32',
},
count: {
type: 'integer',
format: 'int64',
},
totalPage: {
type: 'integer',
format: 'int32',
},
},
},
},
required: ['data', 'meta'],
},
},
},
},
},
},
},
'/backupFiles:get': {
get: {
summary: 'Get backup file info',
parameters: [
{
name: 'filterByTk',
in: 'query',
required: true,
schema: {
type: 'string',
},
},
],
responses: {
'200': {
description: 'Status of the backup operation',
content: {
'application/json': {
schema: {
oneOf: [
{
$ref: '#/components/schemas/BackUpStatusOk',
},
{
$ref: '#/components/schemas/BackUpStatusDoing',
},
],
},
},
},
},
},
},
},
'/backupFiles:download': {
get: {
summary: 'Download a backup file',
parameters: [
{
name: 'filterByTk',
in: 'query',
required: true,
schema: {
type: 'string',
},
},
],
responses: {
'200': {
description: 'Download successful',
content: {
'application/octet-stream': {
schema: {
type: 'string',
format: 'binary',
},
},
},
},
},
},
},
'/backupFiles:destroy': {
post: {
summary: 'Destroy a backup file',
parameters: [
{
name: 'filterByTk',
in: 'query',
required: true,
schema: {
type: 'string',
},
},
],
responses: {
'200': {
description: 'Destroy successful',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
status: {
type: 'string',
},
},
},
},
},
},
},
},
},
'/backupFiles:upload': {
post: {
summary: 'Upload a backup file',
requestBody: {
required: true,
content: {
'multipart/form-data': {
schema: {
type: 'object',
properties: {
file: {
type: 'string',
format: 'binary',
},
},
},
},
},
},
responses: {
200: {
description: 'Upload successful',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
key: {
type: 'string',
},
},
},
},
},
},
},
},
},
'/backupFiles:restore': {
post: {
summary: 'Restore from a backup file',
requestBody: {
required: true,
content: {
'application/json': {
schema: {
type: 'object',
properties: {
filterByTk: {
type: 'string',
},
dataTypes: {
type: 'array',
items: {
$ref: '#/components/schemas/DumpDataType',
},
uniqueItems: true,
},
key: {
type: 'string',
},
},
oneOf: [
{
required: ['filterByTk', 'dataTypes'],
},
{
required: ['key', 'dataTypes'],
},
],
},
},
},
},
responses: {
'200': {
description: 'Restore successful',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
status: {
type: 'string',
},
},
},
},
},
},
},
},
},
'/backupFiles:dumpableCollections': {
get: {
summary: 'Get dumpable collections',
responses: {
'200': {
description: 'A list of dumpable collections',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
meta: {
type: 'array',
items: {
type: 'object',
properties: {
name: {
type: 'string',
},
title: {
type: 'string',
},
},
},
},
config: {
type: 'array',
items: {
type: 'object',
properties: {
name: {
type: 'string',
},
title: {
type: 'string',
},
},
},
},
business: {
type: 'array',
items: {
type: 'object',
properties: {
name: {
type: 'string',
},
title: {
type: 'string',
},
},
},
},
},
required: ['meta', 'config', 'business'],
},
},
},
},
},
},
},
},
components: {
schemas: {
BackUpStatusOk: {
type: 'object',
properties: {
name: {
type: 'string',
},
createdAt: {
type: 'string',
format: 'date-time',
},
fileSize: {
type: 'string',
},
status: {
type: 'string',
enum: ['ok'],
},
},
required: ['name', 'createdAt', 'fileSize', 'status'],
},
BackUpStatusDoing: {
type: 'object',
properties: {
name: {
type: 'string',
},
inProgress: {
type: 'boolean',
enum: [true],
},
status: {
type: 'string',
enum: ['in_progress'],
},
},
required: ['name', 'inProgress', 'status'],
},
DumpDataType: {
type: 'string',
enum: ['meta', 'config', 'business'],
},
DumpOptions: {
type: 'object',
properties: {
dataTypes: {
type: 'array',
items: {
$ref: '#/components/schemas/DumpDataType',
},
uniqueItems: true,
},
},
required: ['dataTypes'],
},
},
},
};

View File

@ -1,9 +1,9 @@
import { defineCollection } from '@nocobase/database';
export default defineCollection({
namespace: 'charts.chartsQueries',
duplicator: 'optional',
dumpRules: 'required',
name: 'chartsQueries',
shared: true,
fields: [
{
name: 'title',

View File

@ -30,9 +30,7 @@ export class ChartsPlugin extends Plugin {
}
async load() {
await this.db.import({
directory: resolve(__dirname, 'collections'),
});
await this.importCollections(resolve(__dirname, 'collections'));
this.app.resourcer.registerActionHandlers({
'chartsQueries:getData': getData,

View File

@ -1,10 +1,8 @@
import { defineCollection } from '@nocobase/database';
export default defineCollection({
namespace: 'china-region.china-region',
duplicator: 'skip',
dumpRules: 'skipped',
name: 'chinaRegions',
title: '中国行政区划',
autoGenId: false,
fields: [
// 如使用代码作为 id 可能更节省,但由于代码数字最长为 12 字节,除非使用 bigint(64) 才够放置

View File

@ -16,9 +16,7 @@ export class PluginChinaRegion extends Plugin {
}
async load() {
await this.db.import({
directory: resolve(__dirname, 'collections'),
});
await this.importCollections(resolve(__dirname, 'collections'));
this.app.acl.allow('chinaRegions', 'list', 'loggedIn');

View File

@ -1,4 +1,4 @@
import Database, { Collection as DBCollection, HasManyRepository } from '@nocobase/database';
import Database, { Collection as DBCollection, CollectionGroupManager, HasManyRepository } from '@nocobase/database';
import Application from '@nocobase/server';
import { createApp } from '.';
import CollectionManagerPlugin, { CollectionRepository } from '../index';
@ -120,6 +120,52 @@ describe('collections repository', () => {
});
});
it('should create collection with optional duplicator option', async () => {
await Collection.repository.create({
values: {
name: 'tests',
dumpRules: {
group: 'business',
},
fields: [
{
type: 'string',
name: 'title',
},
],
},
context: {},
});
const testsCollection = db.getCollection('tests');
const duplicator = CollectionGroupManager.unifyDumpRules(testsCollection.options.dumpRules);
expect(duplicator.group).toEqual('business');
});
it('should create collection with required duplicator option', async () => {
await Collection.repository.create({
values: {
name: 'tests',
dumpRules: {
group: 'required',
},
fields: [
{
type: 'string',
name: 'title',
},
],
},
context: {},
});
const testsCollection = db.getCollection('tests');
const duplicator = CollectionGroupManager.unifyDumpRules(testsCollection.options.dumpRules);
expect(duplicator.group).toEqual('required');
});
it('should create collection with sortable option', async () => {
await Collection.repository.create({
values: {

View File

@ -1,11 +1,10 @@
import { CollectionOptions } from '@nocobase/database';
export default {
namespace: 'collection-manager.collections',
duplicator: {
dumpable: 'required',
with: 'collectionCategory',
dumpRules: {
group: 'required',
},
shared: true,
name: 'collectionCategories',
autoGenId: true,
sortable: true,

View File

@ -1,10 +1,9 @@
import { CollectionOptions } from '@nocobase/database';
export default {
namespace: 'collection-manager.collections',
duplicator: 'required',
dumpRules: 'required',
shared: true,
name: 'collections',
title: '数据表配置',
sortable: 'sort',
autoGenId: false,
model: 'CollectionModel',

View File

@ -1,8 +1,8 @@
import { CollectionOptions } from '@nocobase/database';
export default {
namespace: 'collection-manager.collections',
duplicator: 'required',
dumpRules: 'required',
shared: true,
name: 'fields',
autoGenId: false,
model: 'FieldModel',

View File

@ -22,10 +22,16 @@ export class CollectionModel extends MagicAttributeModel {
let collection: Collection;
const collectionOptions = {
namespace: 'collections.business',
origin: 'plugin:collection-manager',
...this.get(),
fields: [],
};
if (!this.db.inDialect('postgres') && collectionOptions.schema) {
delete collectionOptions.schema;
}
if (this.db.hasCollection(name)) {
collection = this.db.getCollection(name);
@ -39,6 +45,10 @@ export class CollectionModel extends MagicAttributeModel {
collection.updateOptions(collectionOptions);
} else {
if (!collectionOptions.dumpRules) {
lodash.set(collectionOptions, 'dumpRules.group', 'custom');
}
collection = this.db.collection(collectionOptions);
}

View File

@ -361,8 +361,8 @@ export class CollectionManagerPlugin extends Plugin {
this.app.db.extendCollection({
name: 'collectionCategory',
namespace: 'collection-manager',
duplicator: 'required',
dumpRules: 'required',
origin: `plugin:${this.name}`,
});
}
}

View File

@ -1,10 +1,8 @@
import { CollectionOptions } from '@nocobase/client';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'custom-requests',
duplicator: 'optional',
export default defineCollection({
dumpRules: 'required',
name: 'customRequests',
title: '{{t("Custom request")}}',
fields: [
{
type: 'uid',
@ -27,4 +25,4 @@ export default {
name: 'options', // 配置的请求参数都放这里
},
],
} as CollectionOptions;
});

View File

@ -1,8 +1,6 @@
import { CollectionOptions } from '@nocobase/client';
import { defineCollection } from '@nocobase/database';
export default {
namespace: 'custom-requests',
duplicator: 'optional',
export default defineCollection({
dumpRules: 'required',
name: 'customRequestsRoles',
title: '{{t("Custom request")}}',
} as CollectionOptions;
});

View File

@ -24,9 +24,7 @@ export class CustomRequestPlugin extends Plugin {
}
async load() {
await this.db.import({
directory: resolve(__dirname, './collections'),
});
await this.importCollections(resolve(__dirname, 'collections'));
this.app.resource({
name: 'customRequests',

View File

@ -1,7 +0,0 @@
import { Plugin } from '@nocobase/client';
export class DuplicatorPlugin extends Plugin {
async load() {}
}
export default DuplicatorPlugin;

View File

@ -1,8 +0,0 @@
{
"Select Import data": "Select Import data",
"Select Import Plugins": "Select Import Plugins",
"Select User Collections": "Select User Collections",
"Basic Data": "Basic Data",
"Optional Data": "Optional Data",
"User Data": "User Data"
}

View File

@ -1,8 +0,0 @@
{
"Select Import data": "请选择导入数据",
"Select Import Plugins": "请选择导入插件",
"Select User Collections": "请选择用户数据",
"Basic Data": "基础数据",
"Optional Data": "可选数据",
"User Data": "用户数据"
}

View File

@ -1,73 +0,0 @@
import { mockServer, MockServer } from '@nocobase/test';
import path from 'path';
describe('duplicator api', () => {
let app: MockServer;
beforeEach(async () => {
app = mockServer();
app.plugin((await import('../server')).default, { name: 'duplicator' });
app.plugin('error-handler');
app.plugin('collection-manager');
await app.loadAndInstall({ clean: true });
});
afterEach(async () => {
await app.destroy();
});
it('should get collection groups', async () => {
await app.db.getRepository('collections').create({
values: {
name: 'test_collection',
title: '测试Collection',
fields: [
{
name: 'test_field1',
type: 'string',
},
],
},
context: {},
});
const collectionGroupsResponse = await app.agent().resource('duplicator').dumpableCollections();
expect(collectionGroupsResponse.status).toBe(200);
const data = collectionGroupsResponse.body;
expect(data['requiredGroups']).toBeTruthy();
expect(data['optionalGroups']).toBeTruthy();
expect(data['userCollections']).toBeTruthy();
});
it('should request dump api', async () => {
const dumpResponse = await app.agent().post('/duplicator:dump').send({
selectedCollectionGroups: [],
selectedUserCollections: [],
});
expect(dumpResponse.status).toBe(200);
});
it('should request restore api', async () => {
const packageInfoResponse = await app
.agent()
.post('/duplicator:upload')
.attach('file', path.resolve(__dirname, './fixtures/dump.nbdump.fixture'));
console.log(packageInfoResponse.body);
expect(packageInfoResponse.status).toBe(200);
const data = packageInfoResponse.body.data;
expect(data['key']).toBeTruthy();
expect(data['meta']).toBeTruthy();
const restoreResponse = await app.agent().post('/duplicator:restore').send({
restoreKey: data['key'],
selectedOptionalGroups: [],
selectedUserCollections: [],
});
expect(restoreResponse.status).toBe(200);
});
});

View File

@ -1,37 +0,0 @@
import { mockServer, MockServer } from '@nocobase/test';
import { CollectionGroupManager } from '../collection-group-manager';
describe('collection group manager', () => {
let app: MockServer;
beforeEach(async () => {
app = mockServer({
plugins: ['error-handler', 'collection-manager'],
});
await app.loadAndInstall({
clean: true,
});
});
afterEach(async () => {
await app.destroy();
});
it('should list collection groups from db collections', async () => {
const collectionGroups = CollectionGroupManager.getGroups(app);
expect(collectionGroups.map((i) => i.function)).toMatchObject([
'migration',
'applicationPlugins',
'applicationVersion',
'collections',
]);
expect(collectionGroups.find((i) => i.function === 'collections')).toMatchObject({
namespace: 'collection-manager',
function: 'collections',
collections: ['collectionCategory', 'collectionCategories', 'collections', 'fields'],
dumpable: 'required',
});
});
});

View File

@ -1,35 +0,0 @@
import { MockServer } from '@nocobase/test';
import createApp from './index';
import { Dumper } from '../dumper';
describe('dumper', () => {
let app: MockServer;
beforeEach(async () => {
app = await createApp();
});
afterEach(async () => {
await app.destroy();
});
it('should get collection groups', async () => {
await app.db.getRepository('collections').create({
values: {
name: 'test_collection',
fields: [
{
name: 'test_field1',
type: 'string',
},
],
},
context: {},
});
const dump = new Dumper(app);
const dumpableCollections = await dump.dumpableCollections();
expect((dumpableCollections.requiredGroups || []).length).toBeGreaterThan(0);
expect(dumpableCollections.userCollections[0]['name']).toEqual('test_collection');
});
});

View File

@ -1,25 +0,0 @@
import { Dumper } from '../dumper';
import send from 'koa-send';
import { getApp } from './get-app';
export default async function dumpAction(ctx, next) {
const data = <
{
selectedOptionalGroupNames: string[];
selectedUserCollections: string[];
app?: string;
}
>ctx.request.body;
const app = await getApp(ctx, data.app);
const dumper = new Dumper(app);
const { filePath, dirname } = await dumper.dump(data);
await send(ctx, filePath.replace(dirname, ''), {
root: dirname,
});
await next();
}

Some files were not shown because too many files have changed in this diff Show More