Browse Source

refactor: fetch data as batches

pull/8590/head
Pranav C 6 months ago
parent
commit
7ae8410388
  1. 4
      packages/nocodb/src/meta/migrations/XcMigrationSourcev2.ts
  2. 77
      packages/nocodb/src/meta/migrations/v2/nc_047_comment_migration.ts

4
packages/nocodb/src/meta/migrations/XcMigrationSourcev2.ts

@ -33,6 +33,7 @@ import * as nc_043_user_refresh_token from '~/meta/migrations/v2/nc_043_user_ref
import * as nc_044_view_column_index from '~/meta/migrations/v2/nc_044_view_column_index';
import * as nc_045_extensions from '~/meta/migrations/v2/nc_045_extensions';
import * as nc_046_comment_mentions from '~/meta/migrations/v2/nc_046_comment_mentions';
import * as nc_047_comment_migration from '~/meta/migrations/v2/nc_047_comment_migration';
// Create a custom migration source class
export default class XcMigrationSourcev2 {
@ -77,6 +78,7 @@ export default class XcMigrationSourcev2 {
'nc_044_view_column_index',
'nc_045_extensions',
'nc_046_comment_mentions',
'nc_047_comment_migration',
]);
}
@ -156,6 +158,8 @@ export default class XcMigrationSourcev2 {
return nc_045_extensions;
case 'nc_046_comment_mentions':
return nc_046_comment_mentions;
case 'nc_047_comment_migration':
return nc_047_comment_migration;
}
}
}

77
packages/nocodb/src/meta/migrations/v2/nc_047_comment_migration.ts

@ -4,12 +4,15 @@ import { MetaTable } from '~/utils/globals';
const logger = new Logger('nc_046_comment_mentions');
const BATCH_SIZE = 5000;
const READ_BATCH_SIZE = 1000;
const INSERT_BATCH_SIZE = 200;
const up = async (knex: Knex) => {
try {
logger.log('nc_047_comment_migration: Migration Started');
logger.log('Migration Started');
let fetchNextBatch = true;
for (let offset = 0; fetchNextBatch; offset += READ_BATCH_SIZE) {
const rows = await knex
.select(
`${MetaTable.AUDIT}.id`,
@ -29,43 +32,49 @@ const up = async (knex: Knex) => {
MetaTable.USERS,
`${MetaTable.AUDIT}.user`,
`${MetaTable.USERS}.email`,
);
logger.log('nc_046_comment_mentions: Data from Audit Table fetched');
)
.offset(offset)
// increase limit by 1 to check if there are more rows
.limit(READ_BATCH_SIZE + 1);
if (!rows.length) {
logger.log(
'nc_046_comment_mentions: No Data Found to Migrate from Audit Table',
);
return;
}
const formattedRows = rows.map((row) => ({
id: row.id,
row_id: row.row_id,
comment: (row.description ?? '')
.substring((row.description ?? '').indexOf(':') + 1)
.trim(),
created_by: row.user_id,
created_by_email: row.user_email,
source_id: row.source_id,
base_id: row.base_id,
fk_model_id: row.fk_model_id,
created_at: row.created_at,
updated_at: row.updated_at,
}));
logger.log(
`Data from Audit Table fetched, batch: ${offset} - ${
offset + READ_BATCH_SIZE
}`,
);
logger.log('nc_046_comment_mentions: Data from Audit Table formatted');
const formattedRows = rows
// exclude the last row since it was used to check if there are more rows
.slice(0, READ_BATCH_SIZE)
.map((row) => ({
id: row.id,
row_id: row.row_id,
comment: (row.description ?? '')
.substring((row.description ?? '').indexOf(':') + 1)
.trim(),
created_by: row.user_id,
created_by_email: row.user_email,
source_id: row.source_id,
base_id: row.base_id,
fk_model_id: row.fk_model_id,
created_at: row.created_at,
updated_at: row.updated_at,
}));
return knex.batchInsert(MetaTable.COMMENTS, formattedRows, BATCH_SIZE);
logger.log('Data from Audit Table formatted');
logger.log(
'nc_047_comment_migration: Data migrated from Audit Table to Comments Table',
);
} catch (error) {
logger.error(
'nc_046_comment_mentions: Error while migrating data from Audit Table',
await knex.batchInsert(
MetaTable.COMMENTS,
formattedRows,
INSERT_BATCH_SIZE,
);
// check if there are more rows to fetch
fetchNextBatch = rows.length > READ_BATCH_SIZE;
}
logger.log(
'Data migrated from Audit Table to Comments Table',
);
};
const down = async (knex: Knex) => {

Loading…
Cancel
Save