Browse Source

feat: add log, update batch size

Signed-off-by: Pranav C <pranavxc@gmail.com>
pull/2218/head
Pranav C 3 years ago
parent
commit
c895b01438
  1. 8
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts
  2. 79
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

8
packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts

@ -13,7 +13,7 @@ import hash from 'object-hash';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc'; import utc from 'dayjs/plugin/utc';
import { importData, importLTARData } from './readAllATData'; import { importData, importLTARData } from './readAndProcessData';
dayjs.extend(utc); dayjs.extend(utc);
@ -2249,7 +2249,8 @@ export default async (
api, api,
logBasic, logBasic,
nocoBaseDataProcessing_v2, nocoBaseDataProcessing_v2,
sDB: syncDB sDB: syncDB,
logDetailed
}); });
logDetailed(`Data inserted from ${ncTbl.title}`); logDetailed(`Data inserted from ${ncTbl.title}`);
@ -2265,7 +2266,8 @@ export default async (
base, base,
fields: null, //Object.values(tblLinkGroup).flat(), fields: null, //Object.values(tblLinkGroup).flat(),
logBasic, logBasic,
insertedAssocRef insertedAssocRef,
logDetailed
}); });
} }

79
packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAllATData.ts → packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

@ -1,16 +1,21 @@
import { AirtableBase } from 'airtable/lib/airtable_base'; import { AirtableBase } from 'airtable/lib/airtable_base';
import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk'; import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk';
async function readAllATData({ const BULK_DATA_BATCH_SIZE = 500;
const ASSOC_BULK_DATA_BATCH_SIZE = 2000;
async function readAndProcessData({
table, table,
fields, fields,
base base
}: // logBasic = _str => () }: // logDetailed
// logBasic = _str => ()
{ {
table: { title?: string }; table: { title?: string };
fields?; fields?;
base: AirtableBase; base: AirtableBase;
logBasic?: (string) => void; logBasic?: (string) => void;
logDetailed?: (string) => void;
}): Promise<Array<any>> { }): Promise<Array<any>> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const data = []; const data = [];
@ -55,14 +60,15 @@ export async function importData({
base, base,
api, api,
nocoBaseDataProcessing_v2, nocoBaseDataProcessing_v2,
sDB sDB,
}: // logBasic = _str => () logDetailed = _str => {}
{ }: {
projectName: string; projectName: string;
table: { title?: string; id?: string }; table: { title?: string; id?: string };
fields?; fields?;
base: AirtableBase; base: AirtableBase;
logBasic: (string) => void; logBasic: (string) => void;
logDetailed: (string) => void;
api: Api<any>; api: Api<any>;
nocoBaseDataProcessing_v2; nocoBaseDataProcessing_v2;
sDB; sDB;
@ -70,7 +76,7 @@ export async function importData({
try { try {
// get all data from a table // get all data from a table
const allData = []; const allData = [];
const records = await readAllATData({ const records = await readAndProcessData({
table, table,
base base
}); });
@ -80,12 +86,22 @@ export async function importData({
allData.push(r); allData.push(r);
} }
for (let i = 0; i < allData.length / 2000; i += 2000) { for (
let i = 0;
i < allData.length / BULK_DATA_BATCH_SIZE;
i += BULK_DATA_BATCH_SIZE
) {
logDetailed(
`Importing '${table.title}' data :: ${i + 1} - ${Math.min(
i + BULK_DATA_BATCH_SIZE,
records.length
)}`
);
await api.dbTableRow.bulkCreate( await api.dbTableRow.bulkCreate(
'nc', 'nc',
projectName, projectName,
table.id, // encodeURIComponent(table.title), table.id, // encodeURIComponent(table.title),
allData.slice(i, 2000) allData.slice(i, BULK_DATA_BATCH_SIZE)
); );
} }
} catch (e) { } catch (e) {
@ -99,13 +115,15 @@ export async function importLTARData({
base, base,
api, api,
projectName, projectName,
insertedAssocRef = {} insertedAssocRef = {},
logDetailed = _str => {}
}: // logBasic = _str => () }: // logBasic = _str => ()
{ {
projectName: string; projectName: string;
table: { title?: string; id?: string }; table: { title?: string; id?: string };
fields; fields;
base: AirtableBase; base: AirtableBase;
logDetailed: (string) => void;
logBasic: (string) => void; logBasic: (string) => void;
api: Api<any>; api: Api<any>;
insertedAssocRef: { [assocId: string]: boolean }; insertedAssocRef: { [assocId: string]: boolean };
@ -116,7 +134,7 @@ export async function importLTARData({
curCol: { title?: string }; curCol: { title?: string };
refCol: { title?: string }; refCol: { title?: string };
}> = []; }> = [];
const allData = await readAllATData({ table, fields, base }); const allData = await readAndProcessData({ table, fields, base });
const modelMeta: any = await api.dbTable.read(table.id); const modelMeta: any = await api.dbTable.read(table.id);
@ -162,40 +180,23 @@ export async function importLTARData({
); );
} }
for (let i = 0; i < insertData.length / 2000; i += 2000) { for (
let i = 0;
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE;
i += ASSOC_BULK_DATA_BATCH_SIZE
) {
logDetailed(
`Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min(
i + ASSOC_BULK_DATA_BATCH_SIZE,
allData.length
)}`
);
await api.dbTableRow.bulkCreate( await api.dbTableRow.bulkCreate(
'nc', 'nc',
projectName, projectName,
assocMeta.modelMeta.id, assocMeta.modelMeta.id,
insertData.slice(i, 2000) insertData.slice(i, ASSOC_BULK_DATA_BATCH_SIZE)
); );
} }
} }
} }
// for (const [key, value] of Object.entries(rec)) {
// const refRowIdList: any = value;
// const referenceColumnName = key;
//
// if (refRowIdList.length) {
// for (let i = 0; i < refRowIdList.length; i++) {
// logDetailed(
// `NC API: dbTableRow.nestedAdd ${record.id}/mm/${referenceColumnName}/${refRowIdList[0][i]}`
// );
//
// const _perfStart = recordPerfStart();
// await api.dbTableRow.nestedAdd(
// 'noco',
// projName,
// table.id,
// `${record.id}`,
// 'mm',
// encodeURIComponent(referenceColumnName),
// `${refRowIdList[i]}`
// );
// recordPerfStats(_perfStart, 'dbTableRow.nestedAdd');
// }
// }
// }
// }
// }
Loading…
Cancel
Save