|
|
@ -1,52 +1,70 @@ |
|
|
|
import { AirtableBase } from 'airtable/lib/airtable_base'; |
|
|
|
import { AirtableBase } from 'airtable/lib/airtable_base'; |
|
|
|
import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk'; |
|
|
|
import { Api, RelationTypes, TableType, UITypes } from 'nocodb-sdk'; |
|
|
|
|
|
|
|
|
|
|
|
const BULK_DATA_BATCH_SIZE = 500; |
|
|
|
const BULK_DATA_BATCH_SIZE = 2000; |
|
|
|
const ASSOC_BULK_DATA_BATCH_SIZE = 2000; |
|
|
|
const ASSOC_BULK_DATA_BATCH_SIZE = 2000; |
|
|
|
|
|
|
|
|
|
|
|
async function readAndProcessData({ |
|
|
|
async function readAllData({ |
|
|
|
table, |
|
|
|
table, |
|
|
|
fields, |
|
|
|
fields, |
|
|
|
base |
|
|
|
base, |
|
|
|
}: // logDetailed
|
|
|
|
logBasic = _str => {}, |
|
|
|
// logBasic = _str => ()
|
|
|
|
triggerThreshold = BULK_DATA_BATCH_SIZE, |
|
|
|
{ |
|
|
|
onThreshold = async _rec => {} |
|
|
|
|
|
|
|
}: { |
|
|
|
table: { title?: string }; |
|
|
|
table: { title?: string }; |
|
|
|
fields?; |
|
|
|
fields?; |
|
|
|
base: AirtableBase; |
|
|
|
base: AirtableBase; |
|
|
|
logBasic?: (string) => void; |
|
|
|
logBasic?: (string) => void; |
|
|
|
logDetailed?: (string) => void; |
|
|
|
logDetailed?: (string) => void; |
|
|
|
|
|
|
|
triggerThreshold?: number; |
|
|
|
|
|
|
|
onThreshold?: ( |
|
|
|
|
|
|
|
records: Array<{ fields: any; id: string }>, |
|
|
|
|
|
|
|
allRecords?: Array<{ fields: any; id: string }> |
|
|
|
|
|
|
|
) => Promise<void>; |
|
|
|
}): Promise<Array<any>> { |
|
|
|
}): Promise<Array<any>> { |
|
|
|
return new Promise((resolve, reject) => { |
|
|
|
return new Promise((resolve, reject) => { |
|
|
|
const data = []; |
|
|
|
const data = []; |
|
|
|
|
|
|
|
let thresholdCbkData = []; |
|
|
|
|
|
|
|
|
|
|
|
const selectParams: any = { |
|
|
|
const selectParams: any = { |
|
|
|
pageSize: 100 |
|
|
|
pageSize: 100 |
|
|
|
}; |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
if (fields) selectParams.fields = fields; |
|
|
|
if (fields) selectParams.fields = fields; |
|
|
|
|
|
|
|
|
|
|
|
base(table.title) |
|
|
|
base(table.title) |
|
|
|
.select(selectParams) |
|
|
|
.select(selectParams) |
|
|
|
.eachPage( |
|
|
|
.eachPage( |
|
|
|
async function page(records, fetchNextPage) { |
|
|
|
async function page(records, fetchNextPage) { |
|
|
|
// console.log(JSON.stringify(records, null, 2));
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// This function (`page`) will get called for each page of records.
|
|
|
|
|
|
|
|
// records.forEach(record => callback(table, record));
|
|
|
|
|
|
|
|
// logBasic(
|
|
|
|
|
|
|
|
// `:: ${table.title} / ${fields} : ${recordCnt +
|
|
|
|
|
|
|
|
// 1} ~ ${(recordCnt += 100)}`
|
|
|
|
|
|
|
|
// );
|
|
|
|
|
|
|
|
data.push(...records); |
|
|
|
data.push(...records); |
|
|
|
|
|
|
|
thresholdCbkData.push(...records); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logBasic( |
|
|
|
|
|
|
|
`:: Reading '${table.title}' data :: ${Math.max( |
|
|
|
|
|
|
|
1, |
|
|
|
|
|
|
|
data.length - records.length |
|
|
|
|
|
|
|
)} - ${data.length}` |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (thresholdCbkData.length >= triggerThreshold) { |
|
|
|
|
|
|
|
await onThreshold(thresholdCbkData, data); |
|
|
|
|
|
|
|
thresholdCbkData = []; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// To fetch the next page of records, call `fetchNextPage`.
|
|
|
|
// To fetch the next page of records, call `fetchNextPage`.
|
|
|
|
// If there are more records, `page` will get called again.
|
|
|
|
// If there are more records, `page` will get called again.
|
|
|
|
// If there are no more records, `done` will get called.
|
|
|
|
// If there are no more records, `done` will get called.
|
|
|
|
fetchNextPage(); |
|
|
|
fetchNextPage(); |
|
|
|
}, |
|
|
|
}, |
|
|
|
function done(err) { |
|
|
|
async function done(err) { |
|
|
|
if (err) { |
|
|
|
if (err) { |
|
|
|
console.error(err); |
|
|
|
console.error(err); |
|
|
|
reject(err); |
|
|
|
return reject(err); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
if (thresholdCbkData.length) { |
|
|
|
|
|
|
|
await onThreshold(thresholdCbkData, data); |
|
|
|
|
|
|
|
thresholdCbkData = []; |
|
|
|
} |
|
|
|
} |
|
|
|
resolve(data); |
|
|
|
resolve(data); |
|
|
|
} |
|
|
|
} |
|
|
@ -61,7 +79,8 @@ export async function importData({ |
|
|
|
api, |
|
|
|
api, |
|
|
|
nocoBaseDataProcessing_v2, |
|
|
|
nocoBaseDataProcessing_v2, |
|
|
|
sDB, |
|
|
|
sDB, |
|
|
|
logDetailed = _str => {} |
|
|
|
logDetailed = _str => {}, |
|
|
|
|
|
|
|
logBasic = _str => {} |
|
|
|
}: { |
|
|
|
}: { |
|
|
|
projectName: string; |
|
|
|
projectName: string; |
|
|
|
table: { title?: string; id?: string }; |
|
|
|
table: { title?: string; id?: string }; |
|
|
@ -72,38 +91,31 @@ export async function importData({ |
|
|
|
api: Api<any>; |
|
|
|
api: Api<any>; |
|
|
|
nocoBaseDataProcessing_v2; |
|
|
|
nocoBaseDataProcessing_v2; |
|
|
|
sDB; |
|
|
|
sDB; |
|
|
|
}) { |
|
|
|
}): Promise<any> { |
|
|
|
try { |
|
|
|
try { |
|
|
|
// get all data from a table
|
|
|
|
// @ts-ignore
|
|
|
|
const allData = []; |
|
|
|
const records = await readAllData({ |
|
|
|
const records = await readAndProcessData({ |
|
|
|
|
|
|
|
table, |
|
|
|
table, |
|
|
|
base |
|
|
|
base, |
|
|
|
}); |
|
|
|
logDetailed, |
|
|
|
|
|
|
|
logBasic, |
|
|
|
|
|
|
|
async onThreshold(records, allRecords) { |
|
|
|
|
|
|
|
const allData = []; |
|
|
|
|
|
|
|
for (let i = 0; i < records.length; i++) { |
|
|
|
|
|
|
|
const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]); |
|
|
|
|
|
|
|
allData.push(r); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < records.length; i++) { |
|
|
|
logBasic( |
|
|
|
const r = await nocoBaseDataProcessing_v2(sDB, table, records[i]); |
|
|
|
`:: Importing '${table.title}' data :: ${allRecords.length - |
|
|
|
allData.push(r); |
|
|
|
records.length + |
|
|
|
} |
|
|
|
1} - ${allRecords.length}` |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
await api.dbTableRow.bulkCreate('nc', projectName, table.id, allData); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
}); |
|
|
|
|
|
|
|
|
|
|
|
for ( |
|
|
|
return records; |
|
|
|
let i = 0; |
|
|
|
|
|
|
|
i < allData.length / BULK_DATA_BATCH_SIZE; |
|
|
|
|
|
|
|
i += BULK_DATA_BATCH_SIZE |
|
|
|
|
|
|
|
) { |
|
|
|
|
|
|
|
logDetailed( |
|
|
|
|
|
|
|
`Importing '${table.title}' data :: ${i + 1} - ${Math.min( |
|
|
|
|
|
|
|
i + BULK_DATA_BATCH_SIZE, |
|
|
|
|
|
|
|
records.length |
|
|
|
|
|
|
|
)}` |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
await api.dbTableRow.bulkCreate( |
|
|
|
|
|
|
|
'nc', |
|
|
|
|
|
|
|
projectName, |
|
|
|
|
|
|
|
table.id, // encodeURIComponent(table.title),
|
|
|
|
|
|
|
|
allData.slice(i, BULK_DATA_BATCH_SIZE) |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} catch (e) { |
|
|
|
} catch (e) { |
|
|
|
console.log(e); |
|
|
|
console.log(e); |
|
|
|
} |
|
|
|
} |
|
|
@ -116,9 +128,10 @@ export async function importLTARData({ |
|
|
|
api, |
|
|
|
api, |
|
|
|
projectName, |
|
|
|
projectName, |
|
|
|
insertedAssocRef = {}, |
|
|
|
insertedAssocRef = {}, |
|
|
|
logDetailed = _str => {} |
|
|
|
logDetailed = _str => {}, |
|
|
|
}: // logBasic = _str => ()
|
|
|
|
logBasic = _str => {}, |
|
|
|
{ |
|
|
|
records |
|
|
|
|
|
|
|
}: { |
|
|
|
projectName: string; |
|
|
|
projectName: string; |
|
|
|
table: { title?: string; id?: string }; |
|
|
|
table: { title?: string; id?: string }; |
|
|
|
fields; |
|
|
|
fields; |
|
|
@ -127,6 +140,7 @@ export async function importLTARData({ |
|
|
|
logBasic: (string) => void; |
|
|
|
logBasic: (string) => void; |
|
|
|
api: Api<any>; |
|
|
|
api: Api<any>; |
|
|
|
insertedAssocRef: { [assocTableId: string]: boolean }; |
|
|
|
insertedAssocRef: { [assocTableId: string]: boolean }; |
|
|
|
|
|
|
|
records?: Array<{ fields: any; id: string }>; |
|
|
|
}) { |
|
|
|
}) { |
|
|
|
const assocTableMetas: Array<{ |
|
|
|
const assocTableMetas: Array<{ |
|
|
|
modelMeta: { id?: string; title?: string }; |
|
|
|
modelMeta: { id?: string; title?: string }; |
|
|
@ -134,7 +148,15 @@ export async function importLTARData({ |
|
|
|
curCol: { title?: string }; |
|
|
|
curCol: { title?: string }; |
|
|
|
refCol: { title?: string }; |
|
|
|
refCol: { title?: string }; |
|
|
|
}> = []; |
|
|
|
}> = []; |
|
|
|
const allData = await readAndProcessData({ table, fields, base }); |
|
|
|
const allData = |
|
|
|
|
|
|
|
records || |
|
|
|
|
|
|
|
(await readAllData({ |
|
|
|
|
|
|
|
table, |
|
|
|
|
|
|
|
fields, |
|
|
|
|
|
|
|
base, |
|
|
|
|
|
|
|
logDetailed, |
|
|
|
|
|
|
|
logBasic |
|
|
|
|
|
|
|
})); |
|
|
|
|
|
|
|
|
|
|
|
const modelMeta: any = await api.dbTable.read(table.id); |
|
|
|
const modelMeta: any = await api.dbTable.read(table.id); |
|
|
|
|
|
|
|
|
|
|
@ -193,8 +215,8 @@ export async function importLTARData({ |
|
|
|
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE; |
|
|
|
i < insertData.length / ASSOC_BULK_DATA_BATCH_SIZE; |
|
|
|
i += ASSOC_BULK_DATA_BATCH_SIZE |
|
|
|
i += ASSOC_BULK_DATA_BATCH_SIZE |
|
|
|
) { |
|
|
|
) { |
|
|
|
logDetailed( |
|
|
|
logBasic( |
|
|
|
`Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min( |
|
|
|
`:: Importing '${table.title}' LTAR data :: ${i + 1} - ${Math.min( |
|
|
|
i + ASSOC_BULK_DATA_BATCH_SIZE, |
|
|
|
i + ASSOC_BULK_DATA_BATCH_SIZE, |
|
|
|
allData.length |
|
|
|
allData.length |
|
|
|
)}` |
|
|
|
)}` |
|
|
|