diff --git a/packages/nc-gui/components/import/ImportFromAirtable.vue b/packages/nc-gui/components/import/ImportFromAirtable.vue
index 1050809630..d22031d35f 100644
--- a/packages/nc-gui/components/import/ImportFromAirtable.vue
+++ b/packages/nc-gui/components/import/ImportFromAirtable.vue
@@ -169,7 +169,7 @@
mdi-loading mdi-spin
- Syncing
+ Importing
diff --git a/packages/nocodb/src/lib/noco/Noco.ts b/packages/nocodb/src/lib/noco/Noco.ts
index 5227638c47..5fe7de491d 100644
--- a/packages/nocodb/src/lib/noco/Noco.ts
+++ b/packages/nocodb/src/lib/noco/Noco.ts
@@ -211,7 +211,7 @@ export default class Noco {
this.router.use(cookieParser());
this.router.use(
bodyParser.json({
- limit: process.env.NC_REQUEST_BODY_SIZE || 1024 * 1024
+ limit: process.env.NC_REQUEST_BODY_SIZE || '50mb'
})
);
this.router.use(morgan('tiny'));
diff --git a/packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts b/packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts
index 898e4e3ba5..999b5e2f71 100644
--- a/packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts
+++ b/packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts
@@ -1261,7 +1261,7 @@ export default async (
async function nocoBaseDataProcessing_v2(sDB, table, record) {
const recordHash = hash(record);
- const rec = record.fields;
+ const rec = { ...record.fields };
// kludge -
// trim spaces on either side of column name
@@ -2230,6 +2230,8 @@ export default async (
logBasic('Reading Records...');
+ const recordsMap = {};
+
for (let i = 0; i < ncTblList.list.length; i++) {
const _perfStart = recordPerfStart();
const ncTbl = await api.dbTable.read(ncTblList.list[i].id);
@@ -2242,7 +2244,7 @@ export default async (
recordCnt = 0;
// await nocoReadData(syncDB, ncTbl);
- await importData({
+ recordsMap[ncTbl.id] = await importData({
projectName: syncDB.projectName,
table: ncTbl,
base,
@@ -2267,7 +2269,8 @@ export default async (
fields: null, //Object.values(tblLinkGroup).flat(),
logBasic,
insertedAssocRef,
- logDetailed
+ logDetailed,
+ records: recordsMap[ncTbl.id]
});
}
diff --git a/packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts b/packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts
index a15f5556f1..0dfb0dc870 100644
--- a/packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts
+++ b/packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts
@@ -1,52 +1,70 @@
import { AirtableBase } from 'airtable/lib/airtable_base';
import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk';
-const BULK_DATA_BATCH_SIZE = 500;
+const BULK_DATA_BATCH_SIZE = 2000;
const ASSOC_BULK_DATA_BATCH_SIZE = 2000;
-async function readAndProcessData({
+async function readAllData({
table,
fields,
- base
-}: // logDetailed
-// logBasic = _str => ()
-{
+ base,
+ logBasic = _str => {},
+ triggerThreshold = BULK_DATA_BATCH_SIZE,
+ onThreshold = async _rec => {}
+}: {
table: { title?: string };
fields?;
base: AirtableBase;
logBasic?: (string) => void;
logDetailed?: (string) => void;
+ triggerThreshold?: number;
+ onThreshold?: (
+ records: Array<{ fields: any; id: string }>,
+ allRecords?: Array<{ fields: any; id: string }>
+ ) => Promise;
}): Promise> {
return new Promise((resolve, reject) => {
const data = [];
+ let thresholdCbkData = [];
+
const selectParams: any = {
pageSize: 100
};
+
if (fields) selectParams.fields = fields;
base(table.title)
.select(selectParams)
.eachPage(
async function page(records, fetchNextPage) {
- // console.log(JSON.stringify(records, null, 2));
-
- // This function (`page`) will get called for each page of records.
- // records.forEach(record => callback(table, record));
- // logBasic(
- // `:: ${table.title} / ${fields} : ${recordCnt +
- // 1} ~ ${(recordCnt += 100)}`
- // );
data.push(...records);
+ thresholdCbkData.push(...records);
+
+ logBasic(
+ `:: Reading '${table.title}' data :: ${Math.max(
+ 1,
+ data.length - records.length
+ )} - ${data.length}`
+ );
+
+ if (thresholdCbkData.length >= triggerThreshold) {
+ await onThreshold(thresholdCbkData, data);
+ thresholdCbkData = [];
+ }
// To fetch the next page of records, call `fetchNextPage`.
// If there are more records, `page` will get called again.
// If there are no more records, `done` will get called.
fetchNextPage();
},
- function done(err) {
+ async function done(err) {
if (err) {
console.error(err);
- reject(err);
+ return reject(err);
+ }
+ if (thresholdCbkData.length) {
+ await onThreshold(thresholdCbkData, data);
+ thresholdCbkData = [];
}
resolve(data);
}
@@ -61,7 +79,8 @@ export async function importData({
api,
nocoBaseDataProcessing_v2,
sDB,
- logDetailed = _str => {}
+ logDetailed = _str => {},
+ logBasic = _str => {}
}: {
projectName: string;
table: { title?: string; id?: string };
@@ -72,38 +91,31 @@ export async function importData({
api: Api;
nocoBaseDataProcessing_v2;
sDB;
-}) {
+}): Promise {
try {
- // get all data from a table
- const allData = [];
- const records = await readAndProcessData({
+ // @ts-ignore
+ const records = await readAllData({
table,
- base
- });
+ base,
+ logDetailed,
+ logBasic,
+ async onThreshold(records, allRecords) {
+ const allData = [];
+ for (let i = 0; i < records.length; i++) {
+ const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]);
+ allData.push(r);
+ }
- for (let i = 0; i < records.length; i++) {
- const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]);
- allData.push(r);
- }
+ logBasic(
+ `:: Importing '${table.title}' data :: ${allRecords.length -
+ records.length +
+ 1} - ${allRecords.length}`
+ );
+ await api.dbTableRow.bulkCreate('nc', projectName, table.id, allData);
+ }
+ });
- for (
- let i = 0;
- i < allData.length / BULK_DATA_BATCH_SIZE;
- i += BULK_DATA_BATCH_SIZE
- ) {
- logDetailed(
- `Importing '${table.title}' data :: ${i + 1} - ${Math.min(
- i + BULK_DATA_BATCH_SIZE,
- records.length
- )}`
- );
- await api.dbTableRow.bulkCreate(
- 'nc',
- projectName,
- table.id, // encodeURIComponent(table.title),
- allData.slice(i, BULK_DATA_BATCH_SIZE)
- );
- }
+ return records;
} catch (e) {
console.log(e);
}
@@ -116,9 +128,10 @@ export async function importLTARData({
api,
projectName,
insertedAssocRef = {},
- logDetailed = _str => {}
-}: // logBasic = _str => ()
-{
+ logDetailed = _str => {},
+ logBasic = _str => {},
+ records
+}: {
projectName: string;
table: { title?: string; id?: string };
fields;
@@ -127,6 +140,7 @@ export async function importLTARData({
logBasic: (string) => void;
api: Api;
insertedAssocRef: { [assocTableId: string]: boolean };
+ records?: Array<{ fields: any; id: string }>;
}) {
const assocTableMetas: Array<{
modelMeta: { id?: string; title?: string };
@@ -134,7 +148,15 @@ export async function importLTARData({
curCol: { title?: string };
refCol: { title?: string };
}> = [];
- const allData = await readAndProcessData({ table, fields, base });
+ const allData =
+ records ||
+ (await readAllData({
+ table,
+ fields,
+ base,
+ logDetailed,
+ logBasic
+ }));
const modelMeta: any = await api.dbTable.read(table.id);
@@ -193,8 +215,8 @@ export async function importLTARData({
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE;
i += ASSOC_BULK_DATA_BATCH_SIZE
) {
- logDetailed(
- `Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min(
+ logBasic(
+ `:: Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min(
i + ASSOC_BULK_DATA_BATCH_SIZE,
allData.length
)}`