Browse Source

refactor: add option to pass chunk size in bulk insert method

Signed-off-by: Pranav C <pranavxc@gmail.com>
pull/2218/head
Pranav C 2 years ago
parent
commit
771c1c543b
  1. 11
      packages/nocodb/src/lib/dataMapper/lib/sql/BaseModelSqlv2.ts
  2. 10
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

11
packages/nocodb/src/lib/dataMapper/lib/sql/BaseModelSqlv2.ts

@ -1513,7 +1513,14 @@ class BaseModelSqlv2 {
} }
} }
async bulkInsert(datas: any[]) { async bulkInsert(
datas: any[],
{
chunkSize: _chunkSize = 100
}: {
chunkSize?: number;
} = {}
) {
try { try {
const insertDatas = await Promise.all( const insertDatas = await Promise.all(
datas.map(async d => { datas.map(async d => {
@ -1536,7 +1543,7 @@ class BaseModelSqlv2 {
// fallbacks to `10` if database client is sqlite // fallbacks to `10` if database client is sqlite
// to avoid `too many SQL variables` error // to avoid `too many SQL variables` error
// refer : https://www.sqlite.org/limits.html // refer : https://www.sqlite.org/limits.html
const chunkSize = this.isSqlite ? 10 : 50; const chunkSize = this.isSqlite ? 10 : _chunkSize;
const response = await this.dbDriver const response = await this.dbDriver
.batchInsert(this.model.table_name, insertDatas, chunkSize) .batchInsert(this.model.table_name, insertDatas, chunkSize)

10
packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

@ -126,7 +126,7 @@ export async function importLTARData({
logDetailed: (string) => void; logDetailed: (string) => void;
logBasic: (string) => void; logBasic: (string) => void;
api: Api<any>; api: Api<any>;
insertedAssocRef: { [assocId: string]: boolean }; insertedAssocRef: { [assocTableId: string]: boolean };
}) { }) {
const assocTableMetas: Array<{ const assocTableMetas: Array<{
modelMeta: { id?: string; title?: string }; modelMeta: { id?: string; title?: string };
@ -139,6 +139,7 @@ export async function importLTARData({
const modelMeta: any = await api.dbTable.read(table.id); const modelMeta: any = await api.dbTable.read(table.id);
for (const colMeta of modelMeta.columns) { for (const colMeta of modelMeta.columns) {
// skip columns which are not LTAR and Many to many
if ( if (
colMeta.uidt !== UITypes.LinkToAnotherRecord || colMeta.uidt !== UITypes.LinkToAnotherRecord ||
colMeta.colOptions.type !== RelationTypes.MANY_TO_MANY colMeta.colOptions.type !== RelationTypes.MANY_TO_MANY
@ -146,14 +147,17 @@ export async function importLTARData({
continue; continue;
} }
// skip if already inserted
if (colMeta.colOptions.fk_mm_model_id in insertedAssocRef) continue; if (colMeta.colOptions.fk_mm_model_id in insertedAssocRef) continue;
// mark as inserted
insertedAssocRef[colMeta.colOptions.fk_mm_model_id] = true; insertedAssocRef[colMeta.colOptions.fk_mm_model_id] = true;
const assocModelMeta: TableType = (await api.dbTable.read( const assocModelMeta: TableType = (await api.dbTable.read(
colMeta.colOptions.fk_mm_model_id colMeta.colOptions.fk_mm_model_id
)) as any; )) as any;
// extract associative table and columns meta
assocTableMetas.push({ assocTableMetas.push({
modelMeta: assocModelMeta, modelMeta: assocModelMeta,
colMeta, colMeta,
@ -166,8 +170,11 @@ export async function importLTARData({
}); });
} }
// Iterate over all related M2M associative table
for (const assocMeta of assocTableMetas) { for (const assocMeta of assocTableMetas) {
const insertData = []; const insertData = [];
// extract insert data from records
for (const record of allData) { for (const record of allData) {
const rec = record.fields; const rec = record.fields;
@ -180,6 +187,7 @@ export async function importLTARData({
); );
} }
// Insert datas as chunks of size `ASSOC_BULK_DATA_BATCH_SIZE`
for ( for (
let i = 0; let i = 0;
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE; i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE;

Loading…
Cancel
Save