Browse Source

wip: data and LTAR import

Signed-off-by: Pranav C <pranavxc@gmail.com>
feat/0523-export-schema
Pranav C 3 years ago committed by Raju Udava
parent
commit
0ce2da321b
  1. 2
      packages/nc-gui/components/import/ImportFromAirtable.vue
  2. 2
      packages/nocodb/src/lib/noco/Noco.ts
  3. 9
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts
  4. 124
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

2
packages/nc-gui/components/import/ImportFromAirtable.vue

@ -169,7 +169,7 @@
<v-icon color="green" size="15"> <v-icon color="green" size="15">
mdi-loading mdi-spin mdi-loading mdi-spin
</v-icon> </v-icon>
<span class="caption nc-text">Syncing <span class="caption nc-text">Importing
</span> </span>
<!-- <div class="nc-progress" />--> <!-- <div class="nc-progress" />-->
</div> </div>

2
packages/nocodb/src/lib/noco/Noco.ts

@ -211,7 +211,7 @@ export default class Noco {
this.router.use(cookieParser()); this.router.use(cookieParser());
this.router.use( this.router.use(
bodyParser.json({ bodyParser.json({
limit: process.env.NC_REQUEST_BODY_SIZE || 1024 * 1024 limit: process.env.NC_REQUEST_BODY_SIZE || '50mb'
}) })
); );
this.router.use(morgan('tiny')); this.router.use(morgan('tiny'));

9
packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts

@ -1261,7 +1261,7 @@ export default async (
async function nocoBaseDataProcessing_v2(sDB, table, record) { async function nocoBaseDataProcessing_v2(sDB, table, record) {
const recordHash = hash(record); const recordHash = hash(record);
const rec = record.fields; const rec = { ...record.fields };
// kludge - // kludge -
// trim spaces on either side of column name // trim spaces on either side of column name
@ -2230,6 +2230,8 @@ export default async (
logBasic('Reading Records...'); logBasic('Reading Records...');
const recordsMap = {};
for (let i = 0; i < ncTblList.list.length; i++) { for (let i = 0; i < ncTblList.list.length; i++) {
const _perfStart = recordPerfStart(); const _perfStart = recordPerfStart();
const ncTbl = await api.dbTable.read(ncTblList.list[i].id); const ncTbl = await api.dbTable.read(ncTblList.list[i].id);
@ -2242,7 +2244,7 @@ export default async (
recordCnt = 0; recordCnt = 0;
// await nocoReadData(syncDB, ncTbl); // await nocoReadData(syncDB, ncTbl);
await importData({ recordsMap[ncTbl.id] = await importData({
projectName: syncDB.projectName, projectName: syncDB.projectName,
table: ncTbl, table: ncTbl,
base, base,
@ -2267,7 +2269,8 @@ export default async (
fields: null, //Object.values(tblLinkGroup).flat(), fields: null, //Object.values(tblLinkGroup).flat(),
logBasic, logBasic,
insertedAssocRef, insertedAssocRef,
logDetailed logDetailed,
records: recordsMap[ncTbl.id]
}); });
} }

124
packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

@ -1,52 +1,70 @@
import { AirtableBase } from 'airtable/lib/airtable_base'; import { AirtableBase } from 'airtable/lib/airtable_base';
import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk'; import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk';
const BULK_DATA_BATCH_SIZE = 500; const BULK_DATA_BATCH_SIZE = 2000;
const ASSOC_BULK_DATA_BATCH_SIZE = 2000; const ASSOC_BULK_DATA_BATCH_SIZE = 2000;
async function readAndProcessData({ async function readAllData({
table, table,
fields, fields,
base base,
}: // logDetailed logBasic = _str => {},
// logBasic = _str => () triggerThreshold = BULK_DATA_BATCH_SIZE,
{ onThreshold = async _rec => {}
}: {
table: { title?: string }; table: { title?: string };
fields?; fields?;
base: AirtableBase; base: AirtableBase;
logBasic?: (string) => void; logBasic?: (string) => void;
logDetailed?: (string) => void; logDetailed?: (string) => void;
triggerThreshold?: number;
onThreshold?: (
records: Array<{ fields: any; id: string }>,
allRecords?: Array<{ fields: any; id: string }>
) => Promise<void>;
}): Promise<Array<any>> { }): Promise<Array<any>> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const data = []; const data = [];
let thresholdCbkData = [];
const selectParams: any = { const selectParams: any = {
pageSize: 100 pageSize: 100
}; };
if (fields) selectParams.fields = fields; if (fields) selectParams.fields = fields;
base(table.title) base(table.title)
.select(selectParams) .select(selectParams)
.eachPage( .eachPage(
async function page(records, fetchNextPage) { async function page(records, fetchNextPage) {
// console.log(JSON.stringify(records, null, 2));
// This function (`page`) will get called for each page of records.
// records.forEach(record => callback(table, record));
// logBasic(
// `:: ${table.title} / ${fields} : ${recordCnt +
// 1} ~ ${(recordCnt += 100)}`
// );
data.push(...records); data.push(...records);
thresholdCbkData.push(...records);
logBasic(
`:: Reading '${table.title}' data :: ${Math.max(
1,
data.length - records.length
)} - ${data.length}`
);
if (thresholdCbkData.length >= triggerThreshold) {
await onThreshold(thresholdCbkData, data);
thresholdCbkData = [];
}
// To fetch the next page of records, call `fetchNextPage`. // To fetch the next page of records, call `fetchNextPage`.
// If there are more records, `page` will get called again. // If there are more records, `page` will get called again.
// If there are no more records, `done` will get called. // If there are no more records, `done` will get called.
fetchNextPage(); fetchNextPage();
}, },
function done(err) { async function done(err) {
if (err) { if (err) {
console.error(err); console.error(err);
reject(err); return reject(err);
}
if (thresholdCbkData.length) {
await onThreshold(thresholdCbkData, data);
thresholdCbkData = [];
} }
resolve(data); resolve(data);
} }
@ -61,7 +79,8 @@ export async function importData({
api, api,
nocoBaseDataProcessing_v2, nocoBaseDataProcessing_v2,
sDB, sDB,
logDetailed = _str => {} logDetailed = _str => {},
logBasic = _str => {}
}: { }: {
projectName: string; projectName: string;
table: { title?: string; id?: string }; table: { title?: string; id?: string };
@ -72,38 +91,31 @@ export async function importData({
api: Api<any>; api: Api<any>;
nocoBaseDataProcessing_v2; nocoBaseDataProcessing_v2;
sDB; sDB;
}) { }): Promise<any> {
try { try {
// get all data from a table // @ts-ignore
const allData = []; const records = await readAllData({
const records = await readAndProcessData({
table, table,
base base,
}); logDetailed,
logBasic,
async onThreshold(records, allRecords) {
const allData = [];
for (let i = 0; i < records.length; i++) {
const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]);
allData.push(r);
}
for (let i = 0; i < records.length; i++) { logBasic(
const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]); `:: Importing '${table.title}' data :: ${allRecords.length -
allData.push(r); records.length +
} 1} - ${allRecords.length}`
);
await api.dbTableRow.bulkCreate('nc', projectName, table.id, allData);
}
});
for ( return records;
let i = 0;
i < allData.length / BULK_DATA_BATCH_SIZE;
i += BULK_DATA_BATCH_SIZE
) {
logDetailed(
`Importing '${table.title}' data :: ${i + 1} - ${Math.min(
i + BULK_DATA_BATCH_SIZE,
records.length
)}`
);
await api.dbTableRow.bulkCreate(
'nc',
projectName,
table.id, // encodeURIComponent(table.title),
allData.slice(i, BULK_DATA_BATCH_SIZE)
);
}
} catch (e) { } catch (e) {
console.log(e); console.log(e);
} }
@ -116,9 +128,10 @@ export async function importLTARData({
api, api,
projectName, projectName,
insertedAssocRef = {}, insertedAssocRef = {},
logDetailed = _str => {} logDetailed = _str => {},
}: // logBasic = _str => () logBasic = _str => {},
{ records
}: {
projectName: string; projectName: string;
table: { title?: string; id?: string }; table: { title?: string; id?: string };
fields; fields;
@ -127,6 +140,7 @@ export async function importLTARData({
logBasic: (string) => void; logBasic: (string) => void;
api: Api<any>; api: Api<any>;
insertedAssocRef: { [assocTableId: string]: boolean }; insertedAssocRef: { [assocTableId: string]: boolean };
records?: Array<{ fields: any; id: string }>;
}) { }) {
const assocTableMetas: Array<{ const assocTableMetas: Array<{
modelMeta: { id?: string; title?: string }; modelMeta: { id?: string; title?: string };
@ -134,7 +148,15 @@ export async function importLTARData({
curCol: { title?: string }; curCol: { title?: string };
refCol: { title?: string }; refCol: { title?: string };
}> = []; }> = [];
const allData = await readAndProcessData({ table, fields, base }); const allData =
records ||
(await readAllData({
table,
fields,
base,
logDetailed,
logBasic
}));
const modelMeta: any = await api.dbTable.read(table.id); const modelMeta: any = await api.dbTable.read(table.id);
@ -193,8 +215,8 @@ export async function importLTARData({
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE; i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE;
i += ASSOC_BULK_DATA_BATCH_SIZE i += ASSOC_BULK_DATA_BATCH_SIZE
) { ) {
logDetailed( logBasic(
`Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min( `:: Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min(
i + ASSOC_BULK_DATA_BATCH_SIZE, i + ASSOC_BULK_DATA_BATCH_SIZE,
allData.length allData.length
)}` )}`

Loading…
Cancel
Save