Browse Source

wip: data and LTAR import

Signed-off-by: Pranav C <pranavxc@gmail.com>
feat/0523-export-schema
Pranav C 3 years ago committed by Raju Udava
parent
commit
0ce2da321b
  1. 2
      packages/nc-gui/components/import/ImportFromAirtable.vue
  2. 2
      packages/nocodb/src/lib/noco/Noco.ts
  3. 9
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts
  4. 114
      packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

2
packages/nc-gui/components/import/ImportFromAirtable.vue

@ -169,7 +169,7 @@
<v-icon color="green" size="15">
mdi-loading mdi-spin
</v-icon>
<span class="caption nc-text">Syncing
<span class="caption nc-text">Importing
</span>
<!-- <div class="nc-progress" />-->
</div>

2
packages/nocodb/src/lib/noco/Noco.ts

@ -211,7 +211,7 @@ export default class Noco {
this.router.use(cookieParser());
this.router.use(
bodyParser.json({
limit: process.env.NC_REQUEST_BODY_SIZE || 1024 * 1024
limit: process.env.NC_REQUEST_BODY_SIZE || '50mb'
})
);
this.router.use(morgan('tiny'));

9
packages/nocodb/src/lib/noco/meta/api/sync/helpers/job.ts

@ -1261,7 +1261,7 @@ export default async (
async function nocoBaseDataProcessing_v2(sDB, table, record) {
const recordHash = hash(record);
const rec = record.fields;
const rec = { ...record.fields };
// kludge -
// trim spaces on either side of column name
@ -2230,6 +2230,8 @@ export default async (
logBasic('Reading Records...');
const recordsMap = {};
for (let i = 0; i < ncTblList.list.length; i++) {
const _perfStart = recordPerfStart();
const ncTbl = await api.dbTable.read(ncTblList.list[i].id);
@ -2242,7 +2244,7 @@ export default async (
recordCnt = 0;
// await nocoReadData(syncDB, ncTbl);
await importData({
recordsMap[ncTbl.id] = await importData({
projectName: syncDB.projectName,
table: ncTbl,
base,
@ -2267,7 +2269,8 @@ export default async (
fields: null, //Object.values(tblLinkGroup).flat(),
logBasic,
insertedAssocRef,
logDetailed
logDetailed,
records: recordsMap[ncTbl.id]
});
}

114
packages/nocodb/src/lib/noco/meta/api/sync/helpers/readAndProcessData.ts

@ -1,52 +1,70 @@
import { AirtableBase } from 'airtable/lib/airtable_base';
import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk';
const BULK_DATA_BATCH_SIZE = 500;
const BULK_DATA_BATCH_SIZE = 2000;
const ASSOC_BULK_DATA_BATCH_SIZE = 2000;
async function readAndProcessData({
async function readAllData({
table,
fields,
base
}: // logDetailed
// logBasic = _str => ()
{
base,
logBasic = _str => {},
triggerThreshold = BULK_DATA_BATCH_SIZE,
onThreshold = async _rec => {}
}: {
table: { title?: string };
fields?;
base: AirtableBase;
logBasic?: (string) => void;
logDetailed?: (string) => void;
triggerThreshold?: number;
onThreshold?: (
records: Array<{ fields: any; id: string }>,
allRecords?: Array<{ fields: any; id: string }>
) => Promise<void>;
}): Promise<Array<any>> {
return new Promise((resolve, reject) => {
const data = [];
let thresholdCbkData = [];
const selectParams: any = {
pageSize: 100
};
if (fields) selectParams.fields = fields;
base(table.title)
.select(selectParams)
.eachPage(
async function page(records, fetchNextPage) {
// console.log(JSON.stringify(records, null, 2));
// This function (`page`) will get called for each page of records.
// records.forEach(record => callback(table, record));
// logBasic(
// `:: ${table.title} / ${fields} : ${recordCnt +
// 1} ~ ${(recordCnt += 100)}`
// );
data.push(...records);
thresholdCbkData.push(...records);
logBasic(
`:: Reading '${table.title}' data :: ${Math.max(
1,
data.length - records.length
)} - ${data.length}`
);
if (thresholdCbkData.length >= triggerThreshold) {
await onThreshold(thresholdCbkData, data);
thresholdCbkData = [];
}
// To fetch the next page of records, call `fetchNextPage`.
// If there are more records, `page` will get called again.
// If there are no more records, `done` will get called.
fetchNextPage();
},
function done(err) {
async function done(err) {
if (err) {
console.error(err);
reject(err);
return reject(err);
}
if (thresholdCbkData.length) {
await onThreshold(thresholdCbkData, data);
thresholdCbkData = [];
}
resolve(data);
}
@ -61,7 +79,8 @@ export async function importData({
api,
nocoBaseDataProcessing_v2,
sDB,
logDetailed = _str => {}
logDetailed = _str => {},
logBasic = _str => {}
}: {
projectName: string;
table: { title?: string; id?: string };
@ -72,38 +91,31 @@ export async function importData({
api: Api<any>;
nocoBaseDataProcessing_v2;
sDB;
}) {
}): Promise<any> {
try {
// get all data from a table
const allData = [];
const records = await readAndProcessData({
// @ts-ignore
const records = await readAllData({
table,
base
});
base,
logDetailed,
logBasic,
async onThreshold(records, allRecords) {
const allData = [];
for (let i = 0; i < records.length; i++) {
const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]);
allData.push(r);
}
for (
let i = 0;
i < allData.length / BULK_DATA_BATCH_SIZE;
i += BULK_DATA_BATCH_SIZE
) {
logDetailed(
`Importing '${table.title}' data :: ${i + 1} - ${Math.min(
i + BULK_DATA_BATCH_SIZE,
records.length
)}`
);
await api.dbTableRow.bulkCreate(
'nc',
projectName,
table.id, // encodeURIComponent(table.title),
allData.slice(i, BULK_DATA_BATCH_SIZE)
logBasic(
`:: Importing '${table.title}' data :: ${allRecords.length -
records.length +
1} - ${allRecords.length}`
);
await api.dbTableRow.bulkCreate('nc', projectName, table.id, allData);
}
});
return records;
} catch (e) {
console.log(e);
}
@ -116,9 +128,10 @@ export async function importLTARData({
api,
projectName,
insertedAssocRef = {},
logDetailed = _str => {}
}: // logBasic = _str => ()
{
logDetailed = _str => {},
logBasic = _str => {},
records
}: {
projectName: string;
table: { title?: string; id?: string };
fields;
@ -127,6 +140,7 @@ export async function importLTARData({
logBasic: (string) => void;
api: Api<any>;
insertedAssocRef: { [assocTableId: string]: boolean };
records?: Array<{ fields: any; id: string }>;
}) {
const assocTableMetas: Array<{
modelMeta: { id?: string; title?: string };
@ -134,7 +148,15 @@ export async function importLTARData({
curCol: { title?: string };
refCol: { title?: string };
}> = [];
const allData = await readAndProcessData({ table, fields, base });
const allData =
records ||
(await readAllData({
table,
fields,
base,
logDetailed,
logBasic
}));
const modelMeta: any = await api.dbTable.read(table.id);
@ -193,8 +215,8 @@ export async function importLTARData({
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE;
i += ASSOC_BULK_DATA_BATCH_SIZE
) {
logDetailed(
`Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min(
logBasic(
`:: Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min(
i + ASSOC_BULK_DATA_BATCH_SIZE,
allData.length
)}`

Loading…
Cancel
Save