fix: optimize import performance
This commit is contained in:
parent
05740672a0
commit
83bca97db0
@ -3,6 +3,9 @@ import { LOG_TYPE_CREATE } from '../constants';
|
|||||||
|
|
||||||
export function afterCreate(app: Application) {
|
export function afterCreate(app: Application) {
|
||||||
return async (model, options) => {
|
return async (model, options) => {
|
||||||
|
if (options.logging === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
const db = app.db;
|
const db = app.db;
|
||||||
const collection = db.getCollection(model.constructor.name);
|
const collection = db.getCollection(model.constructor.name);
|
||||||
if (!collection || !collection.options.logging) {
|
if (!collection || !collection.options.logging) {
|
||||||
|
@ -3,6 +3,10 @@ import { Migration } from '@nocobase/server';
|
|||||||
|
|
||||||
export default class UpdateIdToBigIntMigrator extends Migration {
|
export default class UpdateIdToBigIntMigrator extends Migration {
|
||||||
async up() {
|
async up() {
|
||||||
|
const result = await this.app.version.satisfies('<0.9.0-alpha.1');
|
||||||
|
if (!result) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
const db = this.app.db;
|
const db = this.app.db;
|
||||||
|
|
||||||
await db.getCollection('fields').repository.update({
|
await db.getCollection('fields').repository.update({
|
||||||
|
@ -3,6 +3,10 @@ import { Migration } from '@nocobase/server';
|
|||||||
|
|
||||||
export default class UpdateIdToBigIntMigrator extends Migration {
|
export default class UpdateIdToBigIntMigrator extends Migration {
|
||||||
async up() {
|
async up() {
|
||||||
|
const result = await this.app.version.satisfies('<0.9.0-alpha.1');
|
||||||
|
if (!result) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
const db = this.app.db;
|
const db = this.app.db;
|
||||||
|
|
||||||
await db.getCollection('fields').repository.update({
|
await db.getCollection('fields').repository.update({
|
||||||
|
@ -23,7 +23,7 @@ export async function importXlsx(ctx: Context, next: Next) {
|
|||||||
} = xlsx.parse(file.buffer);
|
} = xlsx.parse(file.buffer);
|
||||||
const failureData = originalList.splice(IMPORT_LIMIT_COUNT + 1);
|
const failureData = originalList.splice(IMPORT_LIMIT_COUNT + 1);
|
||||||
const titles = originalList.shift();
|
const titles = originalList.shift();
|
||||||
const legalList = [];
|
const legalList: any[] = [];
|
||||||
if (originalList.length > 0 && titles?.length === columns.length) {
|
if (originalList.length > 0 && titles?.length === columns.length) {
|
||||||
// const results = (
|
// const results = (
|
||||||
// await Promise.allSettled<any>(
|
// await Promise.allSettled<any>(
|
||||||
@ -38,12 +38,12 @@ export async function importXlsx(ctx: Context, next: Next) {
|
|||||||
// }),
|
// }),
|
||||||
// )
|
// )
|
||||||
// ).filter((item) => 'value' in item && item.value !== undefined);
|
// ).filter((item) => 'value' in item && item.value !== undefined);
|
||||||
const values = [];
|
const values: any[] = [];
|
||||||
for (const item of originalList) {
|
for (const item of originalList) {
|
||||||
try {
|
try {
|
||||||
const transformResult = await transform({ ctx, record: item, columns, fields: collectionFields });
|
const transformResult = await transform({ ctx, record: item, columns, fields: collectionFields });
|
||||||
values.push(transformResult);
|
values.push(transformResult);
|
||||||
legalList.push(cloneDeep(item));
|
legalList.push(cloneDeep<any>(item));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
failureData.unshift([...item, error.message]);
|
failureData.unshift([...item, error.message]);
|
||||||
}
|
}
|
||||||
@ -51,14 +51,24 @@ export async function importXlsx(ctx: Context, next: Next) {
|
|||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
// const values = results.map((r) => r.value);
|
// const values = results.map((r) => r.value);
|
||||||
const result = await ctx.db.sequelize.transaction(async (transaction) => {
|
const result = await ctx.db.sequelize.transaction(async (transaction) => {
|
||||||
|
let sort: number = 0;
|
||||||
|
if (collection.options.sortable) {
|
||||||
|
sort = await repository.model.max<number, any>('sort', { transaction });
|
||||||
|
}
|
||||||
for (const [index, val] of values.entries()) {
|
for (const [index, val] of values.entries()) {
|
||||||
if (val === undefined || val === null) {
|
if (val === undefined || val === null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
let values = { ...val };
|
||||||
|
if (collection.options.sortable) {
|
||||||
|
sort += 1;
|
||||||
|
values['sort'] = sort;
|
||||||
|
}
|
||||||
await repository.create({
|
await repository.create({
|
||||||
values: { ...val },
|
values,
|
||||||
transaction,
|
transaction,
|
||||||
|
logging: false,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const failData = legalList[index];
|
const failData = legalList[index];
|
||||||
|
Loading…
Reference in New Issue
Block a user