Browse Source

fix: get data as batch

pull/9879/head
DarkPhoenix2704 13 hours ago
parent
commit
e895c0d69d
  1. 31
      packages/nocodb/src/models/Comment.ts
  2. 16
      packages/nocodb/src/modules/jobs/jobs/export-import/export.service.ts

31
packages/nocodb/src/models/Comment.ts

@ -44,13 +44,9 @@ export default class Comment implements CommentType {
public static async listByModel( public static async listByModel(
context: NcContext, context: NcContext,
fk_model_id: string, fk_model_id: string,
pagination?: { limit: number; offset: number },
ncMeta = Noco.ncMeta, ncMeta = Noco.ncMeta,
): Promise<Comment[]> { ): Promise<Comment[]> {
const READ_BATCH_SIZE = 1000;
const allComments: Comment[] = [];
let fetchNextBatch = true;
for (let offset = 0; fetchNextBatch; offset += READ_BATCH_SIZE) {
const comments = await ncMeta.metaList2( const comments = await ncMeta.metaList2(
context.workspace_id, context.workspace_id,
context.base_id, context.base_id,
@ -60,33 +56,20 @@ export default class Comment implements CommentType {
fk_model_id, fk_model_id,
}, },
orderBy: { orderBy: {
created_at: 'asc' id: 'asc'
}, },
limit: READ_BATCH_SIZE + 1, limit: pagination?.limit,
offset, offset: pagination?.offset,
xcCondition: xcCondition: {
{
_or: [ _or: [
{ is_deleted: { eq: null } }, { is_deleted: { eq: null } },
{ is_deleted: { { is_deleted: { eq: true } },
eq: true
} },
] ]
} }
} }
); );
const batchComments = comments return comments.map(comment => new Comment(comment));
.slice(0, READ_BATCH_SIZE)
.map(comment => new Comment(comment));
allComments.push(...batchComments);
fetchNextBatch = comments.length > READ_BATCH_SIZE;
}
return allComments;
} }
public static async list( public static async list(

16
packages/nocodb/src/modules/jobs/jobs/export-import/export.service.ts

@ -383,7 +383,21 @@ export class ExportService {
const serializedComments = []; const serializedComments = [];
if (!excludeComments) { if (!excludeComments) {
const comments = await Comment.listByModel(context, model.id); const READ_BATCH_SIZE = 100;
let comments: Comment[] = [];
let offset = 0;
while (true) {
const batchComments = await Comment.listByModel(context, model.id, {
limit: READ_BATCH_SIZE + 1,
offset
});
comments.push(...batchComments.slice(0, READ_BATCH_SIZE));
if (batchComments.length <= READ_BATCH_SIZE) break;
offset += READ_BATCH_SIZE;
}
for (const comment of comments) { for (const comment of comments) {
idMap.set(comment.id, `${idMap.get(model.id)}::${comment.id}`); idMap.set(comment.id, `${idMap.get(model.id)}::${comment.id}`);

Loading…
Cancel
Save