mirror of https://github.com/nocodb/nocodb
mertmit
1 year ago
96 changed files with 17843 additions and 8282 deletions
@ -0,0 +1,100 @@
|
||||
import type { Socket } from 'socket.io-client' |
||||
import io from 'socket.io-client' |
||||
import { defineNuxtPlugin, useGlobal, watch } from '#imports' |
||||
|
||||
export default defineNuxtPlugin(async (nuxtApp) => { |
||||
const { appInfo } = $(useGlobal()) |
||||
|
||||
let socket: Socket | null = null |
||||
let messageIndex = 0 |
||||
|
||||
const init = async (token: string) => { |
||||
try { |
||||
if (socket) socket.disconnect() |
||||
|
||||
const url = new URL(appInfo.ncSiteUrl, window.location.href.split(/[?#]/)[0]) |
||||
|
||||
socket = io(`${url.href}jobs`, { |
||||
extraHeaders: { 'xc-auth': token }, |
||||
}) |
||||
|
||||
socket.on('connect_error', (e) => { |
||||
console.error(e) |
||||
socket?.disconnect() |
||||
}) |
||||
} catch {} |
||||
} |
||||
|
||||
if (nuxtApp.$state.signedIn.value) { |
||||
await init(nuxtApp.$state.token.value) |
||||
} |
||||
|
||||
const send = (name: string, data: any) => { |
||||
if (socket) { |
||||
const _id = messageIndex++ |
||||
socket.emit(name, { _id, data }) |
||||
return _id |
||||
} |
||||
} |
||||
|
||||
const jobs = { |
||||
subscribe( |
||||
job: { id: string; name: string } | any, |
||||
subscribedCb?: () => void, |
||||
statusCb?: (status: 'active' | 'completed' | 'failed' | 'refresh', error?: any) => void, |
||||
logCb?: (data: { message: string }) => void, |
||||
) { |
||||
const logFn = (data: { id: string; name: string; data: { message: string } }) => { |
||||
if (data.id === job.id) { |
||||
if (logCb) logCb(data.data) |
||||
} |
||||
} |
||||
const statusFn = (data: any) => { |
||||
if (data.id === job.id) { |
||||
if (statusCb) statusCb(data.status, data.error) |
||||
if (data.status === 'completed' || data.status === 'failed') { |
||||
socket?.off('status', statusFn) |
||||
socket?.off('log', logFn) |
||||
} |
||||
} |
||||
} |
||||
|
||||
const _id = send('subscribe', job) |
||||
|
||||
const subscribeFn = (data: { _id: number; name: string; id: string }) => { |
||||
if (data._id === _id) { |
||||
if (data.id !== job.id || data.name !== job.name) { |
||||
job.id = data.id |
||||
job.name = data.name |
||||
} |
||||
if (subscribedCb) subscribedCb() |
||||
socket?.on('log', logFn) |
||||
socket?.on('status', statusFn) |
||||
socket?.off('subscribed', subscribeFn) |
||||
} |
||||
} |
||||
socket?.on('subscribed', subscribeFn) |
||||
}, |
||||
getStatus(name: string, id: string): Promise<string> { |
||||
return new Promise((resolve) => { |
||||
if (socket) { |
||||
const _id = send('status', { name, id }) |
||||
const tempFn = (data: any) => { |
||||
if (data._id === _id) { |
||||
resolve(data.status) |
||||
socket?.off('status', tempFn) |
||||
} |
||||
} |
||||
socket.on('status', tempFn) |
||||
} |
||||
}) |
||||
}, |
||||
} |
||||
|
||||
watch((nuxtApp.$state as ReturnType<typeof useGlobal>).token, (newToken, oldToken) => { |
||||
if (newToken && newToken !== oldToken) init(newToken) |
||||
else if (!newToken) socket?.disconnect() |
||||
}) |
||||
|
||||
nuxtApp.provide('jobs', jobs) |
||||
}) |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,19 @@
|
||||
import { Router } from 'express'; |
||||
import ncMetaAclMw from '../../meta/helpers/ncMetaAclMw'; |
||||
import { exportService } from '../../services'; |
||||
import type { Request, Response } from 'express'; |
||||
|
||||
export async function exportBase(req: Request, res: Response) { |
||||
res.json( |
||||
await exportService.exportBase({ baseId: req.params.baseId, path: req.body.path }) |
||||
); |
||||
} |
||||
|
||||
const router = Router({ mergeParams: true }); |
||||
|
||||
router.post( |
||||
'/api/v1/db/meta/export/:projectId/:baseId', |
||||
ncMetaAclMw(exportBase, 'exportBase') |
||||
); |
||||
|
||||
export default router; |
@ -0,0 +1,39 @@
|
||||
import { Router } from 'express'; |
||||
import ncMetaAclMw from '../../meta/helpers/ncMetaAclMw'; |
||||
import { importService } from '../../services'; |
||||
import type { Request, Response } from 'express'; |
||||
|
||||
export async function importModels(req: Request, res: Response) { |
||||
const { body, ...rest } = req; |
||||
res.json( |
||||
await importService.importModels({ |
||||
user: (req as any).user, |
||||
projectId: req.params.projectId, |
||||
baseId: req.params.baseId, |
||||
data: Array.isArray(body) ? body : body.models, |
||||
req: rest, |
||||
}) |
||||
); |
||||
} |
||||
|
||||
export async function importBase(req: Request, res: Response) { |
||||
const { body, ...rest } = req; |
||||
res.json( |
||||
await importService.importBase({ |
||||
user: (req as any).user, |
||||
projectId: req.params.projectId, |
||||
baseId: req.params.baseId, |
||||
src: body.src, |
||||
req: rest, |
||||
}) |
||||
); |
||||
} |
||||
|
||||
const router = Router({ mergeParams: true }); |
||||
|
||||
router.post( |
||||
'/api/v1/db/meta/import/:projectId/:baseId', |
||||
ncMetaAclMw(importBase, 'importBase') |
||||
); |
||||
|
||||
export default router; |
@ -0,0 +1,7 @@
|
||||
import exportController from './export.ctl'; |
||||
import importController from './import.ctl'; |
||||
|
||||
export default { |
||||
exportController, |
||||
importController, |
||||
}; |
@ -0,0 +1,493 @@
|
||||
import { NcError } from './../../meta/helpers/catchError'; |
||||
import { UITypes, ViewTypes } from 'nocodb-sdk'; |
||||
import { Project, Base, Model, View, LinkToAnotherRecordColumn } from '../../models'; |
||||
import { dataService } from '..'; |
||||
import { getViewAndModelByAliasOrId } from '../dbData/helpers'; |
||||
import { Readable } from 'stream'; |
||||
import NcPluginMgrv2 from '../../meta/helpers/NcPluginMgrv2'; |
||||
import { unparse } from 'papaparse'; |
||||
import { IStorageAdapterV2 } from 'nc-plugin'; |
||||
|
||||
/* |
||||
{ |
||||
"entity": "project", |
||||
"bases": [ |
||||
### current scope |
||||
{ |
||||
"entity": "base", |
||||
"models": [ |
||||
{ |
||||
"entity": "model", |
||||
"model": {}, |
||||
"views": [] |
||||
} |
||||
] |
||||
} |
||||
### end current scope |
||||
] |
||||
} |
||||
*/ |
||||
|
||||
async function generateBaseIdMap(base: Base, idMap: Map<string, string>) { |
||||
idMap.set(base.project_id, base.project_id); |
||||
idMap.set(base.id, `${base.project_id}::${base.id}`); |
||||
const models = await base.getModels(); |
||||
|
||||
for (const md of models) { |
||||
idMap.set(md.id, `${base.project_id}::${base.id}::${md.id}`); |
||||
await md.getColumns(); |
||||
for (const column of md.columns) { |
||||
idMap.set(column.id, `${idMap.get(md.id)}::${column.id}`); |
||||
} |
||||
} |
||||
|
||||
return models; |
||||
} |
||||
|
||||
async function serializeModels(param: { modelId: string[] }) { |
||||
const serializedModels = []; |
||||
|
||||
// db id to structured id
|
||||
const idMap = new Map<string, string>(); |
||||
|
||||
const projects: Project[] = [] |
||||
const bases: Base[] = [] |
||||
const modelsMap = new Map<string, Model[]>(); |
||||
|
||||
for (const modelId of param.modelId) { |
||||
const model = await Model.get(modelId); |
||||
|
||||
if (!model) return NcError.badRequest(`Model not found for id '${modelId}'`); |
||||
|
||||
const fndProject = projects.find(p => p.id === model.project_id) |
||||
const project = fndProject || await Project.get(model.project_id); |
||||
|
||||
const fndBase = bases.find(b => b.id === model.base_id) |
||||
const base = fndBase || await Base.get(model.base_id); |
||||
|
||||
if (!fndProject) projects.push(project); |
||||
if (!fndBase) bases.push(base); |
||||
|
||||
if (!modelsMap.has(base.id)) { |
||||
modelsMap.set(base.id, await generateBaseIdMap(base, idMap)); |
||||
} |
||||
|
||||
await model.getColumns(); |
||||
await model.getViews(); |
||||
|
||||
for (const column of model.columns) { |
||||
await column.getColOptions(); |
||||
if (column.colOptions) { |
||||
for (const [k, v] of Object.entries(column.colOptions)) { |
||||
switch (k) { |
||||
case 'fk_mm_child_column_id': |
||||
case 'fk_mm_parent_column_id': |
||||
case 'fk_mm_model_id': |
||||
case 'fk_parent_column_id': |
||||
case 'fk_child_column_id': |
||||
case 'fk_related_model_id': |
||||
case 'fk_relation_column_id': |
||||
case 'fk_lookup_column_id': |
||||
case 'fk_rollup_column_id': |
||||
column.colOptions[k] = idMap.get(v as string); |
||||
break; |
||||
case 'options': |
||||
for (const o of column.colOptions['options']) { |
||||
delete o.id; |
||||
delete o.fk_column_id; |
||||
} |
||||
break; |
||||
case 'formula': |
||||
column.colOptions[k] = column.colOptions[k].replace(/(?<=\{\{).*?(?=\}\})/gm, (match) => idMap.get(match)); |
||||
break; |
||||
case 'id': |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_column_id': |
||||
delete column.colOptions[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
for (const view of model.views) { |
||||
idMap.set(view.id, `${idMap.get(model.id)}::${view.id}`); |
||||
await view.getColumns(); |
||||
await view.getFilters(); |
||||
await view.getSorts(); |
||||
if (view.filter) { |
||||
const export_filters = [] |
||||
for (const fl of view.filter.children) { |
||||
const tempFl = { |
||||
id: `${idMap.get(view.id)}::${fl.id}`, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: fl.fk_parent_id, |
||||
is_group: fl.is_group, |
||||
logical_op: fl.logical_op, |
||||
comparison_op: fl.comparison_op, |
||||
comparison_sub_op: fl.comparison_sub_op, |
||||
value: fl.value, |
||||
} |
||||
if (tempFl.is_group) { |
||||
delete tempFl.comparison_op; |
||||
delete tempFl.comparison_sub_op; |
||||
delete tempFl.value; |
||||
} |
||||
export_filters.push(tempFl) |
||||
} |
||||
view.filter.children = export_filters; |
||||
} |
||||
|
||||
if (view.sorts) { |
||||
const export_sorts = [] |
||||
for (const sr of view.sorts) { |
||||
const tempSr = { |
||||
fk_column_id: idMap.get(sr.fk_column_id), |
||||
direction: sr.direction, |
||||
} |
||||
export_sorts.push(tempSr) |
||||
} |
||||
view.sorts = export_sorts; |
||||
} |
||||
|
||||
if (view.view) { |
||||
for (const [k, v] of Object.entries(view.view)) { |
||||
switch (k) { |
||||
case 'fk_column_id': |
||||
case 'fk_cover_image_col_id': |
||||
case 'fk_grp_col_id': |
||||
view.view[k] = idMap.get(v as string); |
||||
break; |
||||
case 'meta': |
||||
if (view.type === ViewTypes.KANBAN) { |
||||
const meta = JSON.parse(view.view.meta as string) as Record<string, any>; |
||||
for (const [k, v] of Object.entries(meta)) { |
||||
const colId = idMap.get(k as string); |
||||
for (const op of v) { |
||||
op.fk_column_id = idMap.get(op.fk_column_id); |
||||
delete op.id; |
||||
} |
||||
meta[colId] = v; |
||||
delete meta[k]; |
||||
} |
||||
view.view.meta = meta; |
||||
} |
||||
break; |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_view_id': |
||||
case 'project_id': |
||||
case 'base_id': |
||||
case 'uuid': |
||||
delete view.view[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
serializedModels.push({ |
||||
entity: 'model', |
||||
model: { |
||||
id: idMap.get(model.id), |
||||
prefix: project.prefix, |
||||
title: model.title, |
||||
table_name: clearPrefix(model.table_name, project.prefix), |
||||
meta: model.meta, |
||||
columns: model.columns.map((column) => ({ |
||||
id: idMap.get(column.id), |
||||
ai: column.ai, |
||||
column_name: column.column_name, |
||||
cc: column.cc, |
||||
cdf: column.cdf, |
||||
meta: column.meta, |
||||
pk: column.pk, |
||||
order: column.order, |
||||
rqd: column.rqd, |
||||
system: column.system, |
||||
uidt: column.uidt, |
||||
title: column.title, |
||||
un: column.un, |
||||
unique: column.unique, |
||||
colOptions: column.colOptions, |
||||
})), |
||||
}, |
||||
views: model.views.map((view) => ({ |
||||
id: idMap.get(view.id), |
||||
is_default: view.is_default, |
||||
type: view.type, |
||||
meta: view.meta, |
||||
order: view.order, |
||||
title: view.title, |
||||
show: view.show, |
||||
show_system_fields: view.show_system_fields, |
||||
filter: view.filter, |
||||
sorts: view.sorts, |
||||
lock_type: view.lock_type, |
||||
columns: view.columns.map((column) => { |
||||
const { |
||||
id, |
||||
fk_view_id, |
||||
fk_column_id, |
||||
project_id, |
||||
base_id, |
||||
created_at, |
||||
updated_at, |
||||
uuid, |
||||
...rest |
||||
} = column as any; |
||||
return { |
||||
fk_column_id: idMap.get(fk_column_id), |
||||
...rest, |
||||
}; |
||||
}), |
||||
view: view.view, |
||||
})), |
||||
}); |
||||
} |
||||
|
||||
return serializedModels; |
||||
} |
||||
|
||||
async function exportModelData(param: { |
||||
storageAdapter: IStorageAdapterV2; |
||||
path: string; |
||||
projectId: string; |
||||
modelId: string; |
||||
viewId?: string; |
||||
}) { |
||||
const { model, view } = await getViewAndModelByAliasOrId({ |
||||
projectName: param.projectId, |
||||
tableName: param.modelId, |
||||
viewName: param.viewId, |
||||
}); |
||||
|
||||
await model.getColumns(); |
||||
|
||||
const hasLink = model.columns.some((c) => c.uidt === UITypes.LinkToAnotherRecord && c.colOptions?.type === 'mm'); |
||||
|
||||
const pkMap = new Map<string, string>(); |
||||
|
||||
for (const column of model.columns.filter((c) => c.uidt === UITypes.LinkToAnotherRecord && c.colOptions?.type !== 'hm')) { |
||||
const relatedTable = await ( |
||||
(await column.getColOptions()) as LinkToAnotherRecordColumn |
||||
).getRelatedTable(); |
||||
|
||||
await relatedTable.getColumns(); |
||||
|
||||
pkMap.set(column.id, relatedTable.primaryKey.title); |
||||
} |
||||
|
||||
const readableStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const readableLinkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
readableStream.setEncoding('utf8'); |
||||
|
||||
readableLinkStream.setEncoding('utf8'); |
||||
|
||||
const storageAdapter = param.storageAdapter; |
||||
|
||||
const uploadPromise = storageAdapter.fileCreateByStream( |
||||
`${param.path}/${model.id}.csv`, |
||||
readableStream |
||||
); |
||||
|
||||
const uploadLinkPromise = hasLink |
||||
? storageAdapter.fileCreateByStream( |
||||
`${param.path}/${model.id}_links.csv`, |
||||
readableLinkStream |
||||
) |
||||
: Promise.resolve(); |
||||
|
||||
const limit = 100; |
||||
let offset = 0; |
||||
|
||||
const primaryKey = model.columns.find((c) => c.pk); |
||||
|
||||
const formatData = (data: any) => { |
||||
const linkData = []; |
||||
for (const row of data) { |
||||
const pkValue = primaryKey ? row[primaryKey.title] : undefined; |
||||
const linkRow = {}; |
||||
for (const [k, v] of Object.entries(row)) { |
||||
const col = model.columns.find((c) => c.title === k); |
||||
if (col) { |
||||
if (col.pk) linkRow['pk'] = pkValue; |
||||
const colId = `${col.project_id}::${col.base_id}::${col.fk_model_id}::${col.id}`; |
||||
switch(col.uidt) { |
||||
case UITypes.LinkToAnotherRecord: |
||||
if (col.system || col.colOptions.type === 'hm') break; |
||||
const pkList = []; |
||||
|
||||
const links = Array.isArray(v) ? v : [v]; |
||||
|
||||
for (const link of links) { |
||||
if (link) { |
||||
for (const [k, val] of Object.entries(link)) { |
||||
if (k === pkMap.get(col.id)) { |
||||
pkList.push(val); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
if (col.colOptions.type === 'mm') { |
||||
linkRow[colId] = pkList.join(','); |
||||
} else { |
||||
row[colId] = pkList[0]; |
||||
} |
||||
break; |
||||
case UITypes.Attachment: |
||||
try { |
||||
row[colId] = JSON.stringify(v); |
||||
} catch (e) { |
||||
row[colId] = v; |
||||
} |
||||
break; |
||||
case UITypes.ForeignKey: |
||||
case UITypes.Formula: |
||||
case UITypes.Lookup: |
||||
case UITypes.Rollup: |
||||
case UITypes.Rating: |
||||
case UITypes.Barcode: |
||||
// skip these types
|
||||
break; |
||||
default: |
||||
row[colId] = v; |
||||
break; |
||||
} |
||||
delete row[k]; |
||||
} |
||||
} |
||||
linkData.push(linkRow); |
||||
} |
||||
return { data, linkData }; |
||||
} |
||||
|
||||
try { |
||||
await recursiveRead(formatData, readableStream, readableLinkStream, model, view, offset, limit, true); |
||||
await uploadPromise; |
||||
await uploadLinkPromise; |
||||
} catch (e) { |
||||
await storageAdapter.fileDelete(`${param.path}/${model.id}.csv`); |
||||
await storageAdapter.fileDelete(`${param.path}/${model.id}_links.csv`); |
||||
console.error(e); |
||||
throw e; |
||||
} |
||||
|
||||
return true; |
||||
} |
||||
|
||||
async function recursiveRead( |
||||
formatter: Function, |
||||
stream: Readable, |
||||
linkStream: Readable, |
||||
model: Model, |
||||
view: View, |
||||
offset: number, |
||||
limit: number, |
||||
header = false |
||||
): Promise<void> { |
||||
return new Promise((resolve, reject) => { |
||||
dataService |
||||
.getDataList({ model, view, query: { limit, offset } }) |
||||
.then((result) => { |
||||
try { |
||||
if (!header) { |
||||
stream.push('\r\n'); |
||||
linkStream.push('\r\n'); |
||||
} |
||||
const { data, linkData } = formatter(result.list); |
||||
stream.push(unparse(data, { header })); |
||||
linkStream.push(unparse(linkData, { header })); |
||||
if (result.pageInfo.isLastPage) { |
||||
stream.push(null); |
||||
linkStream.push(null); |
||||
resolve(); |
||||
} else { |
||||
recursiveRead(formatter, stream, linkStream, model, view, offset + limit, limit).then(resolve); |
||||
} |
||||
} catch (e) { |
||||
reject(e); |
||||
} |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
function clearPrefix(text: string, prefix?: string) { |
||||
if (!prefix || prefix.length === 0) return text; |
||||
return text.replace(new RegExp(`^${prefix}_?`), ''); |
||||
} |
||||
|
||||
export async function exportBaseSchema(param: { baseId: string }) { |
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) return NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const project = await Project.get(base.project_id); |
||||
|
||||
const models = (await base.getModels()).filter((m) => !m.mm && m.type === 'table'); |
||||
|
||||
const exportedModels = await serializeModels({ modelId: models.map(m => m.id) }); |
||||
|
||||
const exportData = { id: `${project.id}::${base.id}`, entity: 'base', models: exportedModels }; |
||||
|
||||
return exportData; |
||||
} |
||||
|
||||
export async function exportBase(param: { path: string; baseId: string }) { |
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) return NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const project = await Project.get(base.project_id); |
||||
|
||||
const models = (await base.getModels()).filter((m) => !m.mm && m.type === 'table'); |
||||
|
||||
const exportedModels = await serializeModels({ modelId: models.map(m => m.id) }); |
||||
|
||||
const exportData = { id: `${project.id}::${base.id}`, entity: 'base', models: exportedModels }; |
||||
|
||||
const storageAdapter = await NcPluginMgrv2.storageAdapter(); |
||||
|
||||
const destPath = `export/${project.id}/${base.id}/${param.path}/schema.json`; |
||||
|
||||
try { |
||||
|
||||
const readableStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
readableStream.setEncoding('utf8'); |
||||
|
||||
readableStream.push(JSON.stringify(exportData)); |
||||
|
||||
readableStream.push(null); |
||||
|
||||
await storageAdapter.fileCreateByStream( |
||||
destPath, |
||||
readableStream |
||||
); |
||||
|
||||
for (const model of models) { |
||||
await exportModelData({ |
||||
storageAdapter, |
||||
path: `export/${project.id}/${base.id}/${param.path}/data`, |
||||
projectId: project.id, |
||||
modelId: model.id, |
||||
}); |
||||
} |
||||
|
||||
} catch (e) { |
||||
console.error(e); |
||||
return NcError.internalServerError('Error while exporting base'); |
||||
} |
||||
|
||||
return true; |
||||
} |
@ -0,0 +1,844 @@
|
||||
import type { ViewCreateReqType } from 'nocodb-sdk'; |
||||
import { UITypes, ViewTypes } from 'nocodb-sdk'; |
||||
import { tableService, gridViewService, filterService, viewColumnService, gridViewColumnService, sortService, formViewService, galleryViewService, kanbanViewService, formViewColumnService, columnService, bulkDataService } from '..'; |
||||
import { NcError } from '../../meta/helpers/catchError'; |
||||
import { Project, Base, User, View, Model, Column, LinkToAnotherRecordColumn } from '../../models'; |
||||
import NcPluginMgrv2 from '../../meta/helpers/NcPluginMgrv2'; |
||||
import papaparse from 'papaparse'; |
||||
|
||||
export async function importModels(param: { |
||||
user: User; |
||||
projectId: string; |
||||
baseId: string; |
||||
data: { models: { model: any; views: any[] }[] } | { model: any; views: any[] }[]; |
||||
req: any; |
||||
}) { |
||||
|
||||
// structured id to db id
|
||||
const idMap = new Map<string, string>(); |
||||
|
||||
const project = await Project.get(param.projectId); |
||||
|
||||
if (!project) return NcError.badRequest(`Project not found for id '${param.projectId}'`); |
||||
|
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) return NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const tableReferences = new Map<string, Model>(); |
||||
const linkMap = new Map<string, string>(); |
||||
|
||||
param.data = Array.isArray(param.data) ? param.data : param.data.models; |
||||
|
||||
// create tables with static columns
|
||||
for (const data of param.data) { |
||||
const modelData = data.model; |
||||
|
||||
const reducedColumnSet = modelData.columns.filter( |
||||
(a) => |
||||
a.uidt !== UITypes.LinkToAnotherRecord && |
||||
a.uidt !== UITypes.Lookup && |
||||
a.uidt !== UITypes.Rollup && |
||||
a.uidt !== UITypes.Formula && |
||||
a.uidt !== UITypes.ForeignKey |
||||
); |
||||
|
||||
// create table with static columns
|
||||
const table = await tableService.tableCreate({ |
||||
projectId: project.id, |
||||
baseId: base.id, |
||||
user: param.user, |
||||
table: withoutId({ |
||||
...modelData, |
||||
columns: reducedColumnSet.map((a) => withoutId(a)), |
||||
}), |
||||
}); |
||||
|
||||
idMap.set(modelData.id, table.id); |
||||
|
||||
// map column id's with new created column id's
|
||||
for (const col of table.columns) { |
||||
const colRef = modelData.columns.find( |
||||
(a) => a.column_name === col.column_name |
||||
); |
||||
idMap.set(colRef.id, col.id); |
||||
} |
||||
|
||||
tableReferences.set(modelData.id, table); |
||||
} |
||||
|
||||
const referencedColumnSet = [] |
||||
|
||||
// create columns with reference to other columns
|
||||
for (const data of param.data) { |
||||
const modelData = data.model; |
||||
const table = tableReferences.get(modelData.id); |
||||
|
||||
const linkedColumnSet = modelData.columns.filter( |
||||
(a) => a.uidt === UITypes.LinkToAnotherRecord |
||||
); |
||||
|
||||
// create columns with reference to other columns
|
||||
for (const col of linkedColumnSet) { |
||||
if (col.colOptions) { |
||||
const colOptions = col.colOptions; |
||||
if (col.uidt === UITypes.LinkToAnotherRecord && idMap.has(colOptions.fk_related_model_id)) { |
||||
if (colOptions.type === 'mm') { |
||||
if (!linkMap.has(colOptions.fk_mm_model_id)) { |
||||
// delete col.column_name as it is not required and will cause ajv error (null for LTAR)
|
||||
delete col.column_name; |
||||
|
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: table.id, |
||||
column: withoutId({ |
||||
...col, |
||||
...{ |
||||
parentId: idMap.get(getParentIdentifier(colOptions.fk_child_column_id)), |
||||
childId: idMap.get(getParentIdentifier(colOptions.fk_parent_column_id)), |
||||
type: colOptions.type, |
||||
virtual: colOptions.virtual, |
||||
ur: colOptions.ur, |
||||
dr: colOptions.dr, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
linkMap.set(colOptions.fk_mm_model_id, nColumn.colOptions.fk_mm_model_id); |
||||
break; |
||||
} |
||||
} |
||||
|
||||
const childModel = getParentIdentifier(colOptions.fk_parent_column_id) === modelData.id ? freshModelData : await Model.get(idMap.get(getParentIdentifier(colOptions.fk_parent_column_id))); |
||||
|
||||
if (getParentIdentifier(colOptions.fk_parent_column_id) !== modelData.id) await childModel.getColumns(); |
||||
|
||||
const childColumn = param.data.find(a => a.model.id === getParentIdentifier(colOptions.fk_parent_column_id)).model.columns.find(a => a.colOptions?.fk_mm_model_id === colOptions.fk_mm_model_id && a.id !== col.id); |
||||
|
||||
for (const nColumn of childModel.columns) { |
||||
if (nColumn?.colOptions?.fk_mm_model_id === linkMap.get(colOptions.fk_mm_model_id) && nColumn.id !== idMap.get(col.id)) { |
||||
idMap.set(childColumn.id, nColumn.id); |
||||
|
||||
if (nColumn.title !== childColumn.title) { |
||||
await columnService.columnUpdate({ |
||||
columnId: nColumn.id, |
||||
column: { |
||||
...nColumn, |
||||
column_name: childColumn.title, |
||||
title: childColumn.title, |
||||
}, |
||||
}); |
||||
} |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} else if (colOptions.type === 'hm') { |
||||
// delete col.column_name as it is not required and will cause ajv error (null for LTAR)
|
||||
delete col.column_name; |
||||
|
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: table.id, |
||||
column: withoutId({ |
||||
...col, |
||||
...{ |
||||
parentId: idMap.get(getParentIdentifier(colOptions.fk_parent_column_id)), |
||||
childId: idMap.get(getParentIdentifier(colOptions.fk_child_column_id)), |
||||
type: colOptions.type, |
||||
virtual: colOptions.virtual, |
||||
ur: colOptions.ur, |
||||
dr: colOptions.dr, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
idMap.set(colOptions.fk_parent_column_id, nColumn.colOptions.fk_parent_column_id); |
||||
idMap.set(colOptions.fk_child_column_id, nColumn.colOptions.fk_child_column_id); |
||||
break; |
||||
} |
||||
} |
||||
|
||||
const childModel = colOptions.fk_related_model_id === modelData.id ? freshModelData : await Model.get(idMap.get(colOptions.fk_related_model_id)); |
||||
|
||||
if (colOptions.fk_related_model_id !== modelData.id) await childModel.getColumns(); |
||||
|
||||
const childColumn = param.data |
||||
.find((a) => a.model.id === colOptions.fk_related_model_id) |
||||
.model.columns.find( |
||||
(a) => |
||||
a.colOptions?.fk_parent_column_id === |
||||
colOptions.fk_parent_column_id && |
||||
a.colOptions?.fk_child_column_id === |
||||
colOptions.fk_child_column_id && |
||||
a.id !== col.id |
||||
); |
||||
|
||||
for (const nColumn of childModel.columns) { |
||||
if (nColumn.id !== idMap.get(col.id) && nColumn.colOptions?.fk_parent_column_id === idMap.get(colOptions.fk_parent_column_id) && nColumn.colOptions?.fk_child_column_id === idMap.get(colOptions.fk_child_column_id)) { |
||||
idMap.set(childColumn.id, nColumn.id); |
||||
|
||||
if (nColumn.title !== childColumn.title) { |
||||
await columnService.columnUpdate({ |
||||
columnId: nColumn.id, |
||||
column: { |
||||
...nColumn, |
||||
column_name: childColumn.title, |
||||
title: childColumn.title, |
||||
}, |
||||
}); |
||||
} |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
referencedColumnSet.push(...modelData.columns.filter( |
||||
(a) => |
||||
a.uidt === UITypes.Lookup || |
||||
a.uidt === UITypes.Rollup || |
||||
a.uidt === UITypes.Formula |
||||
)); |
||||
} |
||||
|
||||
const sortedReferencedColumnSet = []; |
||||
|
||||
// sort referenced columns to avoid referencing before creation
|
||||
for (const col of referencedColumnSet) { |
||||
const relatedColIds = []; |
||||
if (col.colOptions?.fk_lookup_column_id) { |
||||
relatedColIds.push(col.colOptions.fk_lookup_column_id); |
||||
} |
||||
if (col.colOptions?.fk_rollup_column_id) { |
||||
relatedColIds.push(col.colOptions.fk_rollup_column_id); |
||||
} |
||||
if (col.colOptions?.formula) { |
||||
relatedColIds.push(...col.colOptions.formula.match(/(?<=\{\{).*?(?=\}\})/gm)); |
||||
} |
||||
|
||||
// find the last related column in the sorted array
|
||||
let fnd = undefined; |
||||
for (let i = sortedReferencedColumnSet.length - 1; i >= 0; i--) { |
||||
if (relatedColIds.includes(sortedReferencedColumnSet[i].id)) { |
||||
fnd = sortedReferencedColumnSet[i]; |
||||
break; |
||||
} |
||||
} |
||||
|
||||
if (!fnd) { |
||||
sortedReferencedColumnSet.unshift(col); |
||||
} else { |
||||
sortedReferencedColumnSet.splice(sortedReferencedColumnSet.indexOf(fnd) + 1, 0, col); |
||||
} |
||||
} |
||||
|
||||
// create referenced columns
|
||||
for (const col of sortedReferencedColumnSet) { |
||||
const { colOptions, ...flatCol } = col; |
||||
if (col.uidt === UITypes.Lookup) { |
||||
if (!idMap.get(colOptions.fk_relation_column_id)) continue; |
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: idMap.get(getParentIdentifier(col.id)), |
||||
column: withoutId({ |
||||
...flatCol, |
||||
...{ |
||||
fk_lookup_column_id: idMap.get(colOptions.fk_lookup_column_id), |
||||
fk_relation_column_id: idMap.get(colOptions.fk_relation_column_id), |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
break; |
||||
} |
||||
} |
||||
} else if (col.uidt === UITypes.Rollup) { |
||||
if (!idMap.get(colOptions.fk_relation_column_id)) continue; |
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: idMap.get(getParentIdentifier(col.id)), |
||||
column: withoutId({ |
||||
...flatCol, |
||||
...{ |
||||
fk_rollup_column_id: idMap.get(colOptions.fk_rollup_column_id), |
||||
fk_relation_column_id: idMap.get(colOptions.fk_relation_column_id), |
||||
rollup_function: colOptions.rollup_function, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
break; |
||||
} |
||||
} |
||||
} else if (col.uidt === UITypes.Formula) { |
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: idMap.get(getParentIdentifier(col.id)), |
||||
column: withoutId({ |
||||
...flatCol, |
||||
...{ |
||||
formula_raw: colOptions.formula_raw, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
// create views
|
||||
for (const data of param.data) { |
||||
const modelData = data.model; |
||||
const viewsData = data.views; |
||||
|
||||
const table = tableReferences.get(modelData.id); |
||||
|
||||
// get default view
|
||||
await table.getViews(); |
||||
|
||||
for (const view of viewsData) { |
||||
const viewData = withoutId({ |
||||
...view, |
||||
}); |
||||
|
||||
const vw = await createView(idMap, table, viewData, table.views); |
||||
|
||||
if (!vw) continue; |
||||
|
||||
idMap.set(view.id, vw.id); |
||||
|
||||
// create filters
|
||||
const filters = view.filter.children; |
||||
|
||||
for (const fl of filters) { |
||||
const fg = await filterService.filterCreate({ |
||||
viewId: vw.id, |
||||
filter: withoutId({ |
||||
...fl, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: idMap.get(fl.fk_parent_id), |
||||
}), |
||||
}); |
||||
|
||||
idMap.set(fl.id, fg.id); |
||||
} |
||||
|
||||
// create sorts
|
||||
for (const sr of view.sorts) { |
||||
await sortService.sortCreate({ |
||||
viewId: vw.id, |
||||
sort: withoutId({ |
||||
...sr, |
||||
fk_column_id: idMap.get(sr.fk_column_id), |
||||
}), |
||||
}) |
||||
} |
||||
|
||||
// update view columns
|
||||
const vwColumns = await viewColumnService.columnList({ viewId: vw.id }) |
||||
|
||||
for (const cl of vwColumns) { |
||||
const fcl = view.columns.find(a => a.fk_column_id === reverseGet(idMap, cl.fk_column_id)) |
||||
if (!fcl) continue; |
||||
await viewColumnService.columnUpdate({ |
||||
viewId: vw.id, |
||||
columnId: cl.id, |
||||
column: { |
||||
show: fcl.show, |
||||
order: fcl.order, |
||||
}, |
||||
}) |
||||
} |
||||
|
||||
switch (vw.type) { |
||||
case ViewTypes.GRID: |
||||
for (const cl of vwColumns) { |
||||
const fcl = view.columns.find(a => a.fk_column_id === reverseGet(idMap, cl.fk_column_id)) |
||||
if (!fcl) continue; |
||||
const { fk_column_id, ...rest } = fcl; |
||||
await gridViewColumnService.gridColumnUpdate({ |
||||
gridViewColumnId: cl.id, |
||||
grid: { |
||||
...withoutNull(rest), |
||||
}, |
||||
}) |
||||
} |
||||
break; |
||||
case ViewTypes.FORM: |
||||
for (const cl of vwColumns) { |
||||
const fcl = view.columns.find(a => a.fk_column_id === reverseGet(idMap, cl.fk_column_id)) |
||||
if (!fcl) continue; |
||||
const { fk_column_id, ...rest } = fcl; |
||||
await formViewColumnService.columnUpdate({ |
||||
formViewColumnId: cl.id, |
||||
formViewColumn: { |
||||
...withoutNull(rest), |
||||
}, |
||||
}) |
||||
} |
||||
break; |
||||
case ViewTypes.GALLERY: |
||||
case ViewTypes.KANBAN: |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return idMap; |
||||
} |
||||
|
||||
async function createView(idMap: Map<string, string>, md: Model, vw: Partial<View>, views: View[]): Promise<View> { |
||||
if (vw.is_default) { |
||||
const view = views.find((a) => a.is_default); |
||||
if (view) { |
||||
const gridData = withoutNull(vw.view); |
||||
if (gridData) { |
||||
await gridViewService.gridViewUpdate({ |
||||
viewId: view.id, |
||||
grid: gridData, |
||||
}); |
||||
} |
||||
} |
||||
return view; |
||||
} |
||||
|
||||
switch (vw.type) { |
||||
case ViewTypes.GRID: |
||||
const gview = await gridViewService.gridViewCreate({ |
||||
tableId: md.id, |
||||
grid: vw as ViewCreateReqType, |
||||
}); |
||||
const gridData = withoutNull(vw.view); |
||||
if (gridData) { |
||||
await gridViewService.gridViewUpdate({ |
||||
viewId: gview.id, |
||||
grid: gridData, |
||||
}); |
||||
} |
||||
return gview; |
||||
case ViewTypes.FORM: |
||||
const fview = await formViewService.formViewCreate({ |
||||
tableId: md.id, |
||||
body: vw as ViewCreateReqType, |
||||
}); |
||||
const formData = withoutNull(vw.view); |
||||
if (formData) { |
||||
await formViewService.formViewUpdate({ |
||||
formViewId: fview.id, |
||||
form: formData, |
||||
}); |
||||
} |
||||
return fview; |
||||
case ViewTypes.GALLERY: |
||||
const glview = await galleryViewService.galleryViewCreate({ |
||||
tableId: md.id, |
||||
gallery: vw as ViewCreateReqType, |
||||
}); |
||||
const galleryData = withoutNull(vw.view); |
||||
if (galleryData) { |
||||
for (const [k, v] of Object.entries(galleryData)) { |
||||
switch (k) { |
||||
case 'fk_cover_image_col_id': |
||||
galleryData[k] = idMap.get(v as string); |
||||
break; |
||||
} |
||||
} |
||||
await galleryViewService.galleryViewUpdate({ |
||||
galleryViewId: glview.id, |
||||
gallery: galleryData, |
||||
}); |
||||
} |
||||
return glview; |
||||
case ViewTypes.KANBAN: |
||||
const kview = await kanbanViewService.kanbanViewCreate({ |
||||
tableId: md.id, |
||||
kanban: vw as ViewCreateReqType, |
||||
}); |
||||
const kanbanData = withoutNull(vw.view); |
||||
if (kanbanData) { |
||||
for (const [k, v] of Object.entries(kanbanData)) { |
||||
switch (k) { |
||||
case 'fk_grp_col_id': |
||||
case 'fk_cover_image_col_id': |
||||
kanbanData[k] = idMap.get(v as string); |
||||
break; |
||||
case 'meta': |
||||
const meta = {}; |
||||
for (const [mk, mv] of Object.entries(v as any)) { |
||||
const tempVal = []; |
||||
for (const vl of mv as any) { |
||||
if (vl.fk_column_id) { |
||||
tempVal.push({ |
||||
...vl, |
||||
fk_column_id: idMap.get(vl.fk_column_id), |
||||
}); |
||||
} else { |
||||
delete vl.fk_column_id; |
||||
tempVal.push({ |
||||
...vl, |
||||
id: "uncategorized", |
||||
}); |
||||
} |
||||
} |
||||
meta[idMap.get(mk)] = tempVal; |
||||
} |
||||
kanbanData[k] = meta; |
||||
break; |
||||
} |
||||
} |
||||
await kanbanViewService.kanbanViewUpdate({ |
||||
kanbanViewId: kview.id, |
||||
kanban: kanbanData, |
||||
}); |
||||
} |
||||
return kview; |
||||
} |
||||
|
||||
return null |
||||
} |
||||
|
||||
function withoutNull(obj: any) { |
||||
const newObj = {}; |
||||
let found = false; |
||||
for (const [key, value] of Object.entries(obj)) { |
||||
if (value !== null) { |
||||
newObj[key] = value; |
||||
found = true; |
||||
} |
||||
} |
||||
if (!found) return null; |
||||
return newObj; |
||||
} |
||||
|
||||
function reverseGet(map: Map<string, string>, vl: string) { |
||||
for (const [key, value] of map.entries()) { |
||||
if (vl === value) { |
||||
return key; |
||||
} |
||||
} |
||||
return undefined |
||||
} |
||||
|
||||
function withoutId(obj: any) { |
||||
const { id, ...rest } = obj; |
||||
return rest; |
||||
} |
||||
|
||||
function getParentIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
arr.pop(); |
||||
return arr.join('::'); |
||||
} |
||||
|
||||
function getEntityIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
return arr.pop(); |
||||
} |
||||
|
||||
function findWithIdentifier(map: Map<string, any>, id: string) { |
||||
for (const key of map.keys()) { |
||||
if (getEntityIdentifier(key) === id) { |
||||
return map.get(key); |
||||
} |
||||
} |
||||
return undefined; |
||||
} |
||||
|
||||
export async function importBase(param: { |
||||
user: User; |
||||
projectId: string; |
||||
baseId: string; |
||||
src: { type: 'local' | 'url' | 'file'; path?: string; url?: string; file?: any }; |
||||
req: any; |
||||
}) { |
||||
const { user, projectId, baseId, src, req } = param; |
||||
|
||||
const debug = req.params.debug === 'true'; |
||||
|
||||
const debugLog = (...args: any[]) => { |
||||
if (!debug) return; |
||||
console.log(...args); |
||||
} |
||||
|
||||
let start = process.hrtime(); |
||||
|
||||
let elapsedTime = function(label?: string){ |
||||
const elapsedS = (process.hrtime(start)[0]).toFixed(3); |
||||
const elapsedMs = process.hrtime(start)[1] / 1000000; |
||||
if (label) debugLog(`${label}: ${elapsedS}s ${elapsedMs}ms`); |
||||
start = process.hrtime(); |
||||
} |
||||
|
||||
switch (src.type) { |
||||
case 'local': |
||||
const path = src.path.replace(/\/$/, ''); |
||||
|
||||
const storageAdapter = await NcPluginMgrv2.storageAdapter(); |
||||
|
||||
try { |
||||
const schema = JSON.parse(await storageAdapter.fileRead(`${path}/schema.json`)); |
||||
|
||||
elapsedTime('read schema'); |
||||
|
||||
// store fk_mm_model_id (mm) to link once
|
||||
const handledLinks = []; |
||||
|
||||
const idMap = await importModels({ |
||||
user, |
||||
projectId, |
||||
baseId, |
||||
data: schema, |
||||
req, |
||||
}); |
||||
|
||||
elapsedTime('import models'); |
||||
|
||||
if (idMap) { |
||||
const files = await storageAdapter.getDirectoryList(`${path}/data`); |
||||
const dataFiles = files.filter((file) => !file.match(/_links\.csv$/)); |
||||
const linkFiles = files.filter((file) => file.match(/_links\.csv$/)); |
||||
|
||||
for (const file of dataFiles) { |
||||
const readStream = await storageAdapter.fileReadByStream( |
||||
`${path}/data/${file}` |
||||
); |
||||
|
||||
const headers: string[] = []; |
||||
let chunk = []; |
||||
|
||||
const modelId = findWithIdentifier( |
||||
idMap, |
||||
file.replace(/\.csv$/, '') |
||||
); |
||||
|
||||
const model = await Model.get(modelId); |
||||
|
||||
debugLog(`Importing ${model.title}...`); |
||||
|
||||
await new Promise(async (resolve) => { |
||||
papaparse.parse(readStream, { |
||||
newline: '\r\n', |
||||
step: async function (results, parser) { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: baseId, |
||||
colId: id, |
||||
}); |
||||
if (col.colOptions?.type === 'bt') { |
||||
const childCol = await Column.get({ |
||||
base_id: baseId, |
||||
colId: col.colOptions.fk_child_column_id, |
||||
}); |
||||
headers.push(childCol.column_name); |
||||
} else { |
||||
headers.push(col.column_name); |
||||
} |
||||
} else { |
||||
debugLog(header); |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
const row = {}; |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (results.data[i] !== '') { |
||||
row[headers[i]] = results.data[i]; |
||||
} |
||||
} |
||||
chunk.push(row); |
||||
if (chunk.length > 100) { |
||||
parser.pause(); |
||||
elapsedTime('before import chunk'); |
||||
try { |
||||
await bulkDataService.bulkDataInsert({ |
||||
projectName: projectId, |
||||
tableName: modelId, |
||||
body: chunk, |
||||
cookie: null, |
||||
chunkSize: chunk.length + 1, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
debugLog(`${model.title} import throwed an error!`); |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
elapsedTime('after import chunk'); |
||||
parser.resume(); |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async function () { |
||||
if (chunk.length > 0) { |
||||
elapsedTime('before import chunk'); |
||||
try { |
||||
await bulkDataService.bulkDataInsert({ |
||||
projectName: projectId, |
||||
tableName: modelId, |
||||
body: chunk, |
||||
cookie: null, |
||||
chunkSize: chunk.length + 1, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
debugLog(chunk); |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
elapsedTime('after import chunk'); |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
// reset timer
|
||||
elapsedTime(); |
||||
|
||||
for (const file of linkFiles) { |
||||
const readStream = await storageAdapter.fileReadByStream( |
||||
`${path}/data/${file}` |
||||
); |
||||
|
||||
const headers: string[] = []; |
||||
const mmParentChild: any = {}; |
||||
let chunk: Record<string, any[]> = {}; // colId: { rowId, childId }[]
|
||||
|
||||
const modelId = findWithIdentifier( |
||||
idMap, |
||||
file.replace(/_links\.csv$/, '') |
||||
); |
||||
const model = await Model.get(modelId); |
||||
|
||||
let pkIndex = -1; |
||||
|
||||
debugLog(`Linking ${model.title}...`); |
||||
|
||||
await new Promise(async (resolve) => { |
||||
papaparse.parse(readStream, { |
||||
newline: '\r\n', |
||||
step: async function (results, parser) { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
if (header === 'pk') { |
||||
headers.push(null); |
||||
pkIndex = headers.length - 1; |
||||
continue; |
||||
} |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: baseId, |
||||
colId: id, |
||||
}); |
||||
if ( |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions.fk_mm_model_id && |
||||
handledLinks.includes(col.colOptions.fk_mm_model_id) |
||||
) { |
||||
headers.push(null); |
||||
} else { |
||||
if ( |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions.fk_mm_model_id && |
||||
!handledLinks.includes( |
||||
col.colOptions.fk_mm_model_id |
||||
) |
||||
) { |
||||
const colOptions = await col.getColOptions<LinkToAnotherRecordColumn>(); |
||||
|
||||
const vChildCol = await colOptions.getMMChildColumn(); |
||||
const vParentCol = await colOptions.getMMParentColumn(); |
||||
|
||||
mmParentChild[col.colOptions.fk_mm_model_id] = { |
||||
parent: vParentCol.column_name, |
||||
child: vChildCol.column_name, |
||||
} |
||||
|
||||
handledLinks.push(col.colOptions.fk_mm_model_id); |
||||
} |
||||
headers.push(col.colOptions.fk_mm_model_id); |
||||
chunk[col.colOptions.fk_mm_model_id] = [] |
||||
} |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (!headers[i]) continue; |
||||
|
||||
const mm = mmParentChild[headers[i]]; |
||||
|
||||
for (const rel of results.data[i].split(',')) { |
||||
if (rel.trim() === '') continue; |
||||
chunk[headers[i]].push({ [mm.parent]: rel, [mm.child]: results.data[pkIndex] }); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async function () { |
||||
for (const [k, v] of Object.entries(chunk)) { |
||||
try { |
||||
elapsedTime('prepare link chunk'); |
||||
await bulkDataService.bulkDataInsert({ |
||||
projectName: projectId, |
||||
tableName: k, |
||||
body: v, |
||||
cookie: null, |
||||
chunkSize: 1000, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
elapsedTime('insert link chunk'); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
} |
||||
} |
||||
} catch (e) { |
||||
throw new Error(e); |
||||
} |
||||
break; |
||||
case 'url': |
||||
break; |
||||
case 'file': |
||||
break; |
||||
} |
||||
} |
@ -1,100 +0,0 @@
|
||||
FROM golang:alpine3.14 as lt |
||||
|
||||
WORKDIR /usr/src/ |
||||
|
||||
RUN apk add --no-cache git make musl-dev gcc |
||||
|
||||
# build litestream |
||||
RUN git clone https://github.com/benbjohnson/litestream.git litestream |
||||
RUN cd litestream ; go install ./cmd/litestream |
||||
|
||||
RUN cp $GOPATH/bin/litestream /usr/src/lt |
||||
|
||||
|
||||
|
||||
FROM node:12 as builder |
||||
WORKDIR /usr/src/app |
||||
|
||||
# Copy application dependency manifests to the container image. |
||||
# A wildcard is used to ensure both package.json AND package-lock.json are copied. |
||||
# Copying this separately prevents re-running npm ci on every code change. |
||||
COPY ./package*.json ./ |
||||
COPY ./docker/main.js ./docker/main.js |
||||
#COPY ./docker/start.sh /usr/src/appEntry/start.sh |
||||
COPY ./docker/start-litestream.sh /usr/src/appEntry/start.sh |
||||
# install production dependencies, |
||||
# reduce node_module size with modclean & removing sqlite deps, |
||||
# package built code into app.tar.gz & add execute permission to start.sh |
||||
RUN npm ci --production --quiet |
||||
RUN npx modclean --patterns="default:*" --ignore="nc-lib-gui/**,dayjs/**,express-status-monitor/**" --run |
||||
RUN rm -rf ./node_modules/sqlite3/deps |
||||
RUN tar -czf ../appEntry/app.tar.gz ./* |
||||
RUN chmod +x /usr/src/appEntry/start.sh |
||||
|
||||
|
||||
|
||||
|
||||
FROM alpine:3.14 |
||||
|
||||
#ENV AWS_ACCESS_KEY_ID= |
||||
#ENV AWS_SECRET_ACCESS_KEY= |
||||
#ENV AWS_BUCKET= |
||||
|
||||
|
||||
|
||||
#WORKDIR /usr/src/ |
||||
# |
||||
## Install go lang |
||||
#RUN apk add --no-cache git make musl-dev go |
||||
# |
||||
## Configure Go |
||||
#ENV GOROOT /usr/lib/go |
||||
#ENV GOPATH /go |
||||
#ENV PATH /go/bin:$PATH |
||||
# |
||||
#RUN mkdir -p ${GOPATH}/src ${GOPATH}/bin |
||||
# |
||||
## build litestream |
||||
# |
||||
#RUN git clone https://github.com/benbjohnson/litestream.git litestream |
||||
#RUN cd litestream ; go install ./cmd/litestream |
||||
|
||||
|
||||
# Bug fix for segfault ( Convert PT_GNU_STACK program header into PT_PAX_FLAGS ) |
||||
#RUN apk --update --no-cache add paxctl \ |
||||
# && paxctl -cm $(which node) |
||||
|
||||
WORKDIR /usr/src/app |
||||
|
||||
ENV NC_DOCKER 0.6 |
||||
ENV PORT 8080 |
||||
ENV NC_TOOL_DIR=/usr/app/data/ |
||||
|
||||
|
||||
# Copy application dependency manifests to the container image. |
||||
# A wildcard is used to ensure both package.json AND package-lock.json are copied. |
||||
# Copying this separately prevents re-running npm install on every code change. |
||||
#COPY ./build/ ./build/ |
||||
#COPY ./docker/main.js ./docker/main.js |
||||
#COPY ./package.json ./ |
||||
|
||||
RUN apk --update --no-cache add \ |
||||
nodejs \ |
||||
tar |
||||
|
||||
# Copy litestream binary build |
||||
COPY --from=lt /usr/src/lt /usr/src/appEntry/litestream |
||||
# Copy packaged production code & main entry file |
||||
COPY --from=builder /usr/src/appEntry/ /usr/src/appEntry/ |
||||
|
||||
|
||||
# Run the web service on container startup. |
||||
#CMD [ "node", "docker/index.js" ] |
||||
ENTRYPOINT ["sh", "/usr/src/appEntry/start.sh"] |
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -1,6 +0,0 @@
|
||||
export abstract class NocoSyncSourceAdapter { |
||||
public abstract init(): Promise<void>; |
||||
public abstract destProjectWrite(): Promise<any>; |
||||
public abstract destSchemaWrite(): Promise<any>; |
||||
public abstract destDataWrite(): Promise<any>; |
||||
} |
@ -1,7 +0,0 @@
|
||||
export abstract class NocoSyncSourceAdapter { |
||||
public abstract init(): Promise<void>; |
||||
public abstract srcSchemaGet(): Promise<any>; |
||||
public abstract srcDataLoad(): Promise<any>; |
||||
public abstract srcDataListen(): Promise<any>; |
||||
public abstract srcDataPoll(): Promise<any>; |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -1,21 +0,0 @@
|
||||
import { Test } from '@nestjs/testing'; |
||||
import { ImportService } from '../../services/import.service'; |
||||
import { ImportController } from './import.controller'; |
||||
import type { TestingModule } from '@nestjs/testing'; |
||||
|
||||
describe('ImportController', () => { |
||||
let controller: ImportController; |
||||
|
||||
beforeEach(async () => { |
||||
const module: TestingModule = await Test.createTestingModule({ |
||||
controllers: [ImportController], |
||||
providers: [ImportService], |
||||
}).compile(); |
||||
|
||||
controller = module.get<ImportController>(ImportController); |
||||
}); |
||||
|
||||
it('should be defined', () => { |
||||
expect(controller).toBeDefined(); |
||||
}); |
||||
}); |
@ -1,148 +0,0 @@
|
||||
import { Controller, HttpCode, Post, Request, UseGuards } from '@nestjs/common'; |
||||
import { forwardRef, Inject } from '@nestjs/common'; |
||||
import { ModuleRef } from '@nestjs/core'; |
||||
import { GlobalGuard } from '../../guards/global/global.guard'; |
||||
import { NcError } from '../../helpers/catchError'; |
||||
import { ExtractProjectIdMiddleware } from '../../middlewares/extract-project-id/extract-project-id.middleware'; |
||||
import { SyncSource } from '../../models'; |
||||
import NocoJobs from '../../jobs/NocoJobs'; |
||||
import { SocketService } from '../../services/socket.service'; |
||||
import airtableSyncJob from './helpers/job'; |
||||
import type { AirtableSyncConfig } from './helpers/job'; |
||||
|
||||
import type { Server } from 'socket.io'; |
||||
|
||||
const AIRTABLE_IMPORT_JOB = 'AIRTABLE_IMPORT_JOB'; |
||||
const AIRTABLE_PROGRESS_JOB = 'AIRTABLE_PROGRESS_JOB'; |
||||
|
||||
enum SyncStatus { |
||||
PROGRESS = 'PROGRESS', |
||||
COMPLETED = 'COMPLETED', |
||||
FAILED = 'FAILED', |
||||
} |
||||
|
||||
const initJob = (sv: Server, jobs: { [p: string]: { last_message: any } }) => { |
||||
// add importer job handler and progress notification job handler
|
||||
NocoJobs.jobsMgr.addJobWorker(AIRTABLE_IMPORT_JOB, airtableSyncJob); |
||||
NocoJobs.jobsMgr.addJobWorker( |
||||
AIRTABLE_PROGRESS_JOB, |
||||
({ payload, progress }) => { |
||||
sv.to(payload?.id).emit('progress', { |
||||
msg: progress?.msg, |
||||
level: progress?.level, |
||||
status: progress?.status, |
||||
}); |
||||
|
||||
if (payload?.id in jobs) { |
||||
jobs[payload?.id].last_message = { |
||||
msg: progress?.msg, |
||||
level: progress?.level, |
||||
status: progress?.status, |
||||
}; |
||||
} |
||||
}, |
||||
); |
||||
|
||||
NocoJobs.jobsMgr.addProgressCbk(AIRTABLE_IMPORT_JOB, (payload, progress) => { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_PROGRESS_JOB, { |
||||
payload, |
||||
progress: { |
||||
msg: progress?.msg, |
||||
level: progress?.level, |
||||
status: progress?.status, |
||||
}, |
||||
}); |
||||
}); |
||||
NocoJobs.jobsMgr.addSuccessCbk(AIRTABLE_IMPORT_JOB, (payload) => { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_PROGRESS_JOB, { |
||||
payload, |
||||
progress: { |
||||
msg: 'Complete!', |
||||
status: SyncStatus.COMPLETED, |
||||
}, |
||||
}); |
||||
delete jobs[payload?.id]; |
||||
}); |
||||
NocoJobs.jobsMgr.addFailureCbk(AIRTABLE_IMPORT_JOB, (payload, error: any) => { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_PROGRESS_JOB, { |
||||
payload, |
||||
progress: { |
||||
msg: error?.message || 'Failed due to some internal error', |
||||
status: SyncStatus.FAILED, |
||||
}, |
||||
}); |
||||
delete jobs[payload?.id]; |
||||
}); |
||||
}; |
||||
@Controller() |
||||
@UseGuards(ExtractProjectIdMiddleware, GlobalGuard) |
||||
export class ImportController { |
||||
constructor( |
||||
private readonly socketService: SocketService, |
||||
@Inject(forwardRef(() => ModuleRef)) private readonly moduleRef: ModuleRef, |
||||
) {} |
||||
|
||||
@Post('/api/v1/db/meta/import/airtable') |
||||
@HttpCode(200) |
||||
importAirtable(@Request() req) { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_IMPORT_JOB, { |
||||
id: req.query.id, |
||||
...req.body, |
||||
}); |
||||
return {}; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/trigger') |
||||
@HttpCode(200) |
||||
async triggerSync(@Request() req) { |
||||
if (req.params.syncId in this.socketService.jobs) { |
||||
NcError.badRequest('Sync already in progress'); |
||||
} |
||||
|
||||
const syncSource = await SyncSource.get(req.params.syncId); |
||||
|
||||
const user = await syncSource.getUser(); |
||||
|
||||
// Treat default baseUrl as siteUrl from req object
|
||||
let baseURL = (req as any).ncSiteUrl; |
||||
|
||||
// if environment value avail use it
|
||||
// or if it's docker construct using `PORT`
|
||||
if (process.env.NC_DOCKER) { |
||||
baseURL = `http://localhost:${process.env.PORT || 8080}`; |
||||
} |
||||
|
||||
setTimeout(() => { |
||||
NocoJobs.jobsMgr.add<AirtableSyncConfig>(AIRTABLE_IMPORT_JOB, { |
||||
id: req.params.syncId, |
||||
...(syncSource?.details || {}), |
||||
projectId: syncSource.project_id, |
||||
baseId: syncSource.base_id, |
||||
authToken: '', |
||||
baseURL, |
||||
user: user, |
||||
moduleRef: this.moduleRef, |
||||
}); |
||||
}, 1000); |
||||
|
||||
this.socketService.jobs[req.params.syncId] = { |
||||
last_message: { |
||||
msg: 'Sync started', |
||||
}, |
||||
}; |
||||
return {}; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/abort') |
||||
@HttpCode(200) |
||||
async abortImport(@Request() req) { |
||||
if (req.params.syncId in this.socketService.jobs) { |
||||
delete this.socketService.jobs[req.params.syncId]; |
||||
} |
||||
return {}; |
||||
} |
||||
|
||||
async onModuleInit() { |
||||
initJob(this.socketService.io, this.socketService.jobs); |
||||
} |
||||
} |
@ -0,0 +1,82 @@
|
||||
import type { Base } from '../models'; |
||||
|
||||
export async function generateBaseIdMap( |
||||
base: Base, |
||||
idMap: Map<string, string>, |
||||
) { |
||||
idMap.set(base.project_id, base.project_id); |
||||
idMap.set(base.id, `${base.project_id}::${base.id}`); |
||||
const models = await base.getModels(); |
||||
|
||||
for (const md of models) { |
||||
idMap.set(md.id, `${base.project_id}::${base.id}::${md.id}`); |
||||
await md.getColumns(); |
||||
for (const column of md.columns) { |
||||
idMap.set(column.id, `${idMap.get(md.id)}::${column.id}`); |
||||
} |
||||
} |
||||
|
||||
return models; |
||||
} |
||||
|
||||
export function clearPrefix(text: string, prefix?: string) { |
||||
if (!prefix || prefix.length === 0) return text; |
||||
return text.replace(new RegExp(`^${prefix}_?`), ''); |
||||
} |
||||
|
||||
export function withoutNull(obj: any) { |
||||
const newObj = {}; |
||||
let found = false; |
||||
for (const [key, value] of Object.entries(obj)) { |
||||
if (value !== null) { |
||||
newObj[key] = value; |
||||
found = true; |
||||
} |
||||
} |
||||
if (!found) return null; |
||||
return newObj; |
||||
} |
||||
|
||||
export function reverseGet(map: Map<string, string>, vl: string) { |
||||
for (const [key, value] of map.entries()) { |
||||
if (vl === value) { |
||||
return key; |
||||
} |
||||
} |
||||
return undefined; |
||||
} |
||||
|
||||
export function withoutId(obj: any) { |
||||
const { id, ...rest } = obj; |
||||
return rest; |
||||
} |
||||
|
||||
export function getParentIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
arr.pop(); |
||||
return arr.join('::'); |
||||
} |
||||
|
||||
export function getEntityIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
return arr.pop(); |
||||
} |
||||
|
||||
export function findWithIdentifier(map: Map<string, any>, id: string) { |
||||
for (const key of map.keys()) { |
||||
if (getEntityIdentifier(key) === id) { |
||||
return map.get(key); |
||||
} |
||||
} |
||||
return undefined; |
||||
} |
||||
|
||||
export function generateUniqueName(name: string, names: string[]) { |
||||
let newName = name; |
||||
let i = 1; |
||||
while (names.includes(newName)) { |
||||
newName = `${name}_${i}`; |
||||
i++; |
||||
} |
||||
return newName; |
||||
} |
@ -0,0 +1,7 @@
|
||||
export const JOBS_QUEUE = 'jobs'; |
||||
|
||||
export enum JobTypes { |
||||
DuplicateBase = 'duplicate-base', |
||||
DuplicateModel = 'duplicate-model', |
||||
AtImport = 'at-import', |
||||
} |
@ -0,0 +1,77 @@
|
||||
import { InjectQueue } from '@nestjs/bull'; |
||||
import { Controller, HttpCode, Post, Request, UseGuards } from '@nestjs/common'; |
||||
import { Queue } from 'bull'; |
||||
import { GlobalGuard } from '../../../guards/global/global.guard'; |
||||
import { ExtractProjectIdMiddleware } from '../../../middlewares/extract-project-id/extract-project-id.middleware'; |
||||
import { SyncSource } from '../../../models'; |
||||
import { NcError } from '../../../helpers/catchError'; |
||||
import { QueueService } from '../fallback-queue.service'; |
||||
import { JobsService } from '../jobs.service'; |
||||
import { JOBS_QUEUE, JobTypes } from '../../../interface/Jobs'; |
||||
|
||||
@Controller() |
||||
@UseGuards(ExtractProjectIdMiddleware, GlobalGuard) |
||||
export class AtImportController { |
||||
activeQueue; |
||||
constructor( |
||||
@InjectQueue(JOBS_QUEUE) private readonly jobsQueue: Queue, |
||||
private readonly fallbackQueueService: QueueService, |
||||
private readonly jobsService: JobsService, |
||||
) { |
||||
this.activeQueue = process.env.NC_REDIS_URL |
||||
? this.jobsQueue |
||||
: this.fallbackQueueService; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/import/airtable') |
||||
@HttpCode(200) |
||||
async importAirtable(@Request() req) { |
||||
const job = await this.activeQueue.add(JobTypes.AtImport, { |
||||
...req.body, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/trigger') |
||||
@HttpCode(200) |
||||
async triggerSync(@Request() req) { |
||||
const jobs = await this.jobsService.jobList(JobTypes.AtImport); |
||||
const fnd = jobs.find((j) => j.data.syncId === req.params.syncId); |
||||
|
||||
if (fnd) { |
||||
NcError.badRequest('Sync already in progress'); |
||||
} |
||||
|
||||
const syncSource = await SyncSource.get(req.params.syncId); |
||||
|
||||
const user = await syncSource.getUser(); |
||||
|
||||
// Treat default baseUrl as siteUrl from req object
|
||||
let baseURL = (req as any).ncSiteUrl; |
||||
|
||||
// if environment value avail use it
|
||||
// or if it's docker construct using `PORT`
|
||||
if (process.env.NC_DOCKER) { |
||||
baseURL = `http://localhost:${process.env.PORT || 8080}`; |
||||
} |
||||
|
||||
const job = await this.activeQueue.add(JobTypes.AtImport, { |
||||
syncId: req.params.syncId, |
||||
...(syncSource?.details || {}), |
||||
projectId: syncSource.project_id, |
||||
baseId: syncSource.base_id, |
||||
authToken: '', |
||||
baseURL, |
||||
user: user, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/abort') |
||||
@HttpCode(200) |
||||
async abortImport(@Request() req) { |
||||
return {}; |
||||
} |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,222 @@
|
||||
import { Readable } from 'stream'; |
||||
import sqlite3 from 'sqlite3'; |
||||
|
||||
class EntityMap { |
||||
initialized: boolean; |
||||
cols: string[]; |
||||
db: any; |
||||
|
||||
constructor(...args) { |
||||
this.initialized = false; |
||||
this.cols = args.map((arg) => processKey(arg)); |
||||
this.db = new Promise((resolve, reject) => { |
||||
const db = new sqlite3.Database(':memory:'); |
||||
|
||||
const colStatement = |
||||
this.cols.length > 0 |
||||
? this.cols.join(' TEXT, ') + ' TEXT' |
||||
: 'mappingPlaceholder TEXT'; |
||||
db.run(`CREATE TABLE mapping (${colStatement})`, (err) => { |
||||
if (err) { |
||||
console.log(err); |
||||
reject(err); |
||||
} |
||||
resolve(db); |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
async init() { |
||||
if (!this.initialized) { |
||||
this.db = await this.db; |
||||
this.initialized = true; |
||||
} |
||||
} |
||||
|
||||
destroy() { |
||||
if (this.initialized && this.db) { |
||||
this.db.close(); |
||||
} |
||||
} |
||||
|
||||
async addRow(row) { |
||||
if (!this.initialized) { |
||||
throw 'Please initialize first!'; |
||||
} |
||||
|
||||
const cols = Object.keys(row).map((key) => processKey(key)); |
||||
const colStatement = cols.map((key) => `'${key}'`).join(', '); |
||||
const questionMarks = cols.map(() => '?').join(', '); |
||||
|
||||
const promises = []; |
||||
|
||||
for (const col of cols.filter((col) => !this.cols.includes(col))) { |
||||
promises.push( |
||||
new Promise((resolve, reject) => { |
||||
this.db.run(`ALTER TABLE mapping ADD '${col}' TEXT;`, (err) => { |
||||
if (err) { |
||||
console.log(err); |
||||
reject(err); |
||||
} |
||||
this.cols.push(col); |
||||
resolve(true); |
||||
}); |
||||
}), |
||||
); |
||||
} |
||||
|
||||
await Promise.all(promises); |
||||
|
||||
const values = Object.values(row).map((val) => { |
||||
if (typeof val === 'object') { |
||||
return `JSON::${JSON.stringify(val)}`; |
||||
} |
||||
return val; |
||||
}); |
||||
|
||||
return new Promise((resolve, reject) => { |
||||
this.db.run( |
||||
`INSERT INTO mapping (${colStatement}) VALUES (${questionMarks})`, |
||||
values, |
||||
(err) => { |
||||
if (err) { |
||||
console.log(err); |
||||
reject(err); |
||||
} |
||||
resolve(true); |
||||
}, |
||||
); |
||||
}); |
||||
} |
||||
|
||||
getRow(col, val, res = []): Promise<Record<string, any>> { |
||||
if (!this.initialized) { |
||||
throw 'Please initialize first!'; |
||||
} |
||||
return new Promise((resolve, reject) => { |
||||
col = processKey(col); |
||||
res = res.map((r) => processKey(r)); |
||||
this.db.get( |
||||
`SELECT ${ |
||||
res.length ? res.join(', ') : '*' |
||||
} FROM mapping WHERE ${col} = ?`,
|
||||
[val], |
||||
(err, rs) => { |
||||
if (err) { |
||||
console.log(err); |
||||
reject(err); |
||||
} |
||||
if (rs) { |
||||
rs = processResponseRow(rs); |
||||
} |
||||
resolve(rs); |
||||
}, |
||||
); |
||||
}); |
||||
} |
||||
|
||||
getCount(): Promise<number> { |
||||
if (!this.initialized) { |
||||
throw 'Please initialize first!'; |
||||
} |
||||
return new Promise((resolve, reject) => { |
||||
this.db.get(`SELECT COUNT(*) as count FROM mapping`, (err, rs) => { |
||||
if (err) { |
||||
console.log(err); |
||||
reject(err); |
||||
} |
||||
resolve(rs.count); |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
getStream(res = []): DBStream { |
||||
if (!this.initialized) { |
||||
throw 'Please initialize first!'; |
||||
} |
||||
res = res.map((r) => processKey(r)); |
||||
return new DBStream( |
||||
this.db, |
||||
`SELECT ${res.length ? res.join(', ') : '*'} FROM mapping`, |
||||
); |
||||
} |
||||
|
||||
getLimit(limit, offset, res = []): Promise<Record<string, any>[]> { |
||||
if (!this.initialized) { |
||||
throw 'Please initialize first!'; |
||||
} |
||||
return new Promise((resolve, reject) => { |
||||
res = res.map((r) => processKey(r)); |
||||
this.db.all( |
||||
`SELECT ${ |
||||
res.length ? res.join(', ') : '*' |
||||
} FROM mapping LIMIT ${limit} OFFSET ${offset}`,
|
||||
(err, rs) => { |
||||
if (err) { |
||||
console.log(err); |
||||
reject(err); |
||||
} |
||||
for (let row of rs) { |
||||
row = processResponseRow(row); |
||||
} |
||||
resolve(rs); |
||||
}, |
||||
); |
||||
}); |
||||
} |
||||
} |
||||
|
||||
class DBStream extends Readable { |
||||
db: any; |
||||
stmt: any; |
||||
sql: any; |
||||
|
||||
constructor(db, sql) { |
||||
super({ objectMode: true }); |
||||
this.db = db; |
||||
this.sql = sql; |
||||
this.stmt = this.db.prepare(this.sql); |
||||
this.on('end', () => this.stmt.finalize()); |
||||
} |
||||
|
||||
_read() { |
||||
const stream = this; |
||||
this.stmt.get(function (err, result) { |
||||
if (err) { |
||||
stream.emit('error', err); |
||||
} else { |
||||
if (result) { |
||||
result = processResponseRow(result); |
||||
} |
||||
stream.push(result || null); |
||||
} |
||||
}); |
||||
} |
||||
} |
||||
|
||||
function processResponseRow(res: any) { |
||||
for (const key of Object.keys(res)) { |
||||
if (res[key] && res[key].startsWith('JSON::')) { |
||||
try { |
||||
res[key] = JSON.parse(res[key].replace('JSON::', '')); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
} |
||||
if (revertKey(key) !== key) { |
||||
res[revertKey(key)] = res[key]; |
||||
delete res[key]; |
||||
} |
||||
} |
||||
return res; |
||||
} |
||||
|
||||
function processKey(key) { |
||||
return key.replace(/'/g, "''").replace(/[A-Z]/g, (match) => `_${match}`); |
||||
} |
||||
|
||||
function revertKey(key) { |
||||
return key.replace(/''/g, "'").replace(/_[A-Z]/g, (match) => match[1]); |
||||
} |
||||
|
||||
export default EntityMap; |
@ -0,0 +1,242 @@
|
||||
import axios from 'axios'; |
||||
|
||||
const info: any = { |
||||
initialized: false, |
||||
}; |
||||
|
||||
async function initialize(shareId) { |
||||
info.cookie = ''; |
||||
const url = `https://airtable.com/${shareId}`; |
||||
|
||||
try { |
||||
const hreq = await axios |
||||
.get(url, { |
||||
headers: { |
||||
accept: |
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', |
||||
'accept-language': 'en-US,en;q=0.9', |
||||
'sec-ch-ua': |
||||
'" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"', |
||||
'sec-ch-ua-mobile': '?0', |
||||
'sec-ch-ua-platform': '"Linux"', |
||||
'sec-fetch-dest': 'document', |
||||
'sec-fetch-mode': 'navigate', |
||||
'sec-fetch-site': 'none', |
||||
'sec-fetch-user': '?1', |
||||
'upgrade-insecure-requests': '1', |
||||
'User-Agent': |
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.88 Safari/537.36', |
||||
}, |
||||
// @ts-ignore
|
||||
referrerPolicy: 'strict-origin-when-cross-origin', |
||||
body: null, |
||||
method: 'GET', |
||||
}) |
||||
.then((response) => { |
||||
for (const ck of response.headers['set-cookie']) { |
||||
info.cookie += ck.split(';')[0] + '; '; |
||||
} |
||||
return response.data; |
||||
}) |
||||
.catch(() => { |
||||
throw { |
||||
message: |
||||
'Invalid Shared Base ID :: Ensure www.airtable.com/<SharedBaseID> is accessible. Refer https://bit.ly/3x0OdXI for details', |
||||
}; |
||||
}); |
||||
|
||||
info.headers = JSON.parse( |
||||
hreq.match(/(?<=var headers =)(.*)(?=;)/g)[0].trim(), |
||||
); |
||||
info.link = unicodeToChar(hreq.match(/(?<=fetch\(")(.*)(?=")/g)[0].trim()); |
||||
info.baseInfo = decodeURIComponent(info.link) |
||||
.match(/{(.*)}/g)[0] |
||||
.split('&') |
||||
.reduce((result, el) => { |
||||
try { |
||||
return Object.assign( |
||||
result, |
||||
JSON.parse(el.includes('=') ? el.split('=')[1] : el), |
||||
); |
||||
} catch (e) { |
||||
if (el.includes('=')) { |
||||
return Object.assign(result, { |
||||
[el.split('=')[0]]: el.split('=')[1], |
||||
}); |
||||
} |
||||
} |
||||
}, {}); |
||||
info.baseId = info.baseInfo.applicationId; |
||||
info.initialized = true; |
||||
} catch (e) { |
||||
console.log(e); |
||||
info.initialized = false; |
||||
if (e.message) { |
||||
throw e; |
||||
} else { |
||||
throw { |
||||
message: |
||||
'Error processing Shared Base :: Ensure www.airtable.com/<SharedBaseID> is accessible. Refer https://bit.ly/3x0OdXI for details', |
||||
}; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function read() { |
||||
if (info.initialized) { |
||||
const resreq = await axios('https://airtable.com' + info.link, { |
||||
headers: { |
||||
accept: '*/*', |
||||
'accept-language': 'en-US,en;q=0.9', |
||||
'sec-ch-ua': |
||||
'" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"', |
||||
'sec-ch-ua-mobile': '?0', |
||||
'sec-ch-ua-platform': '"Linux"', |
||||
'sec-fetch-dest': 'empty', |
||||
'sec-fetch-mode': 'cors', |
||||
'sec-fetch-site': 'same-origin', |
||||
'User-Agent': |
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.88 Safari/537.36', |
||||
'x-time-zone': 'Europe/Berlin', |
||||
cookie: info.cookie, |
||||
...info.headers, |
||||
}, |
||||
// @ts-ignore
|
||||
referrerPolicy: 'no-referrer', |
||||
body: null, |
||||
method: 'GET', |
||||
}) |
||||
.then((response) => { |
||||
return response.data; |
||||
}) |
||||
.catch(() => { |
||||
throw { |
||||
message: |
||||
'Error Reading :: Ensure www.airtable.com/<SharedBaseID> is accessible. Refer https://bit.ly/3x0OdXI for details', |
||||
}; |
||||
}); |
||||
|
||||
return { |
||||
schema: resreq.data, |
||||
baseId: info.baseId, |
||||
baseInfo: info.baseInfo, |
||||
}; |
||||
} else { |
||||
throw { |
||||
message: 'Error Initializing :: please try again !!', |
||||
}; |
||||
} |
||||
} |
||||
|
||||
async function readView(viewId) { |
||||
if (info.initialized) { |
||||
const resreq = await axios( |
||||
`https://airtable.com/v0.3/view/${viewId}/readData?` + |
||||
`stringifiedObjectParams=${encodeURIComponent('{}')}&requestId=${ |
||||
info.baseInfo.requestId |
||||
}&accessPolicy=${encodeURIComponent( |
||||
JSON.stringify({ |
||||
allowedActions: info.baseInfo.allowedActions, |
||||
shareId: info.baseInfo.shareId, |
||||
applicationId: info.baseInfo.applicationId, |
||||
generationNumber: info.baseInfo.generationNumber, |
||||
expires: info.baseInfo.expires, |
||||
signature: info.baseInfo.signature, |
||||
}), |
||||
)}`,
|
||||
{ |
||||
headers: { |
||||
accept: '*/*', |
||||
'accept-language': 'en-US,en;q=0.9', |
||||
'sec-ch-ua': |
||||
'" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"', |
||||
'sec-ch-ua-mobile': '?0', |
||||
'sec-ch-ua-platform': '"Linux"', |
||||
'sec-fetch-dest': 'empty', |
||||
'sec-fetch-mode': 'cors', |
||||
'sec-fetch-site': 'same-origin', |
||||
'User-Agent': |
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.88 Safari/537.36', |
||||
'x-time-zone': 'Europe/Berlin', |
||||
cookie: info.cookie, |
||||
...info.headers, |
||||
}, |
||||
// @ts-ignore
|
||||
referrerPolicy: 'no-referrer', |
||||
body: null, |
||||
method: 'GET', |
||||
}, |
||||
) |
||||
.then((response) => { |
||||
return response.data; |
||||
}) |
||||
.catch(() => { |
||||
throw { |
||||
message: |
||||
'Error Reading View :: Ensure www.airtable.com/<SharedBaseID> is accessible. Refer https://bit.ly/3x0OdXI for details', |
||||
}; |
||||
}); |
||||
return { view: resreq.data }; |
||||
} else { |
||||
throw { |
||||
message: 'Error Initializing :: please try again !!', |
||||
}; |
||||
} |
||||
} |
||||
|
||||
async function readTemplate(templateId) { |
||||
if (!info.initialized) { |
||||
await initialize('shrO8aYf3ybwSdDKn'); |
||||
} |
||||
const resreq = await axios( |
||||
`https://www.airtable.com/v0.3/exploreApplications/${templateId}`, |
||||
{ |
||||
headers: { |
||||
accept: '*/*', |
||||
'accept-language': 'en-US,en;q=0.9', |
||||
'sec-ch-ua': |
||||
'" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"', |
||||
'sec-ch-ua-mobile': '?0', |
||||
'sec-ch-ua-platform': '"Linux"', |
||||
'sec-fetch-dest': 'empty', |
||||
'sec-fetch-mode': 'cors', |
||||
'sec-fetch-site': 'same-origin', |
||||
'User-Agent': |
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.88 Safari/537.36', |
||||
'x-time-zone': 'Europe/Berlin', |
||||
cookie: info.cookie, |
||||
...info.headers, |
||||
}, |
||||
// @ts-ignore
|
||||
referrer: 'https://www.airtable.com/', |
||||
referrerPolicy: 'same-origin', |
||||
body: null, |
||||
method: 'GET', |
||||
mode: 'cors', |
||||
credentials: 'include', |
||||
}, |
||||
) |
||||
.then((response) => { |
||||
return response.data; |
||||
}) |
||||
.catch(() => { |
||||
throw { |
||||
message: |
||||
'Error Fetching :: Ensure www.airtable.com/templates/featured/<TemplateID> is accessible.', |
||||
}; |
||||
}); |
||||
return { template: resreq }; |
||||
} |
||||
|
||||
function unicodeToChar(text) { |
||||
return text.replace(/\\u[\dA-F]{4}/gi, function (match) { |
||||
return String.fromCharCode(parseInt(match.replace(/\\u/g, ''), 16)); |
||||
}); |
||||
} |
||||
|
||||
export default { |
||||
initialize, |
||||
read, |
||||
readView, |
||||
readTemplate, |
||||
}; |
@ -0,0 +1,362 @@
|
||||
/* eslint-disable no-async-promise-executor */ |
||||
import { RelationTypes, UITypes } from 'nocodb-sdk'; |
||||
import EntityMap from './EntityMap'; |
||||
import type { BulkDataAliasService } from '../../../../services/bulk-data-alias.service'; |
||||
import type { TablesService } from '../../../../services/tables.service'; |
||||
// @ts-ignore
|
||||
import type { AirtableBase } from 'airtable/lib/airtable_base'; |
||||
import type { TableType } from 'nocodb-sdk'; |
||||
|
||||
const BULK_DATA_BATCH_SIZE = 500; |
||||
const ASSOC_BULK_DATA_BATCH_SIZE = 1000; |
||||
const BULK_PARALLEL_PROCESS = 5; |
||||
|
||||
interface AirtableImportContext { |
||||
bulkDataService: BulkDataAliasService; |
||||
tableService: TablesService; |
||||
} |
||||
|
||||
async function readAllData({ |
||||
table, |
||||
fields, |
||||
base, |
||||
logBasic = (_str) => {}, |
||||
services, |
||||
}: { |
||||
table: { title?: string }; |
||||
fields?; |
||||
base: AirtableBase; |
||||
logBasic?: (string) => void; |
||||
logDetailed?: (string) => void; |
||||
services: AirtableImportContext; |
||||
}): Promise<EntityMap> { |
||||
return new Promise((resolve, reject) => { |
||||
let data = null; |
||||
|
||||
const selectParams: any = { |
||||
pageSize: 100, |
||||
}; |
||||
|
||||
if (fields) selectParams.fields = fields; |
||||
|
||||
base(table.title) |
||||
.select(selectParams) |
||||
.eachPage( |
||||
async function page(records, fetchNextPage) { |
||||
if (!data) { |
||||
data = new EntityMap(); |
||||
await data.init(); |
||||
} |
||||
|
||||
for await (const record of records) { |
||||
await data.addRow({ id: record.id, ...record.fields }); |
||||
} |
||||
|
||||
const tmpLength = await data.getCount(); |
||||
|
||||
logBasic( |
||||
`:: Reading '${table.title}' data :: ${Math.max( |
||||
1, |
||||
tmpLength - records.length, |
||||
)} - ${tmpLength}`,
|
||||
); |
||||
|
||||
// To fetch the next page of records, call `fetchNextPage`.
|
||||
// If there are more records, `page` will get called again.
|
||||
// If there are no more records, `done` will get called.
|
||||
fetchNextPage(); |
||||
}, |
||||
async function done(err) { |
||||
if (err) { |
||||
console.error(err); |
||||
return reject(err); |
||||
} |
||||
resolve(data); |
||||
}, |
||||
); |
||||
}); |
||||
} |
||||
|
||||
export async function importData({ |
||||
projectName, |
||||
table, |
||||
base, |
||||
nocoBaseDataProcessing_v2, |
||||
sDB, |
||||
logDetailed = (_str) => {}, |
||||
logBasic = (_str) => {}, |
||||
services, |
||||
}: { |
||||
projectName: string; |
||||
table: { title?: string; id?: string }; |
||||
fields?; |
||||
base: AirtableBase; |
||||
logBasic: (string) => void; |
||||
logDetailed: (string) => void; |
||||
nocoBaseDataProcessing_v2; |
||||
sDB; |
||||
services: AirtableImportContext; |
||||
}): Promise<EntityMap> { |
||||
try { |
||||
// @ts-ignore
|
||||
const records = await readAllData({ |
||||
table, |
||||
base, |
||||
logDetailed, |
||||
logBasic, |
||||
}); |
||||
|
||||
await new Promise(async (resolve) => { |
||||
const readable = records.getStream(); |
||||
const allRecordsCount = await records.getCount(); |
||||
const promises = []; |
||||
let tempData = []; |
||||
let importedCount = 0; |
||||
let activeProcess = 0; |
||||
readable.on('data', async (record) => { |
||||
promises.push( |
||||
new Promise(async (resolve) => { |
||||
activeProcess++; |
||||
if (activeProcess >= BULK_PARALLEL_PROCESS) readable.pause(); |
||||
const { id: rid, ...fields } = record; |
||||
const r = await nocoBaseDataProcessing_v2(sDB, table, { |
||||
id: rid, |
||||
fields, |
||||
}); |
||||
tempData.push(r); |
||||
|
||||
if (tempData.length >= BULK_DATA_BATCH_SIZE) { |
||||
let insertArray = tempData.splice(0, tempData.length); |
||||
|
||||
await services.bulkDataService.bulkDataInsert({ |
||||
projectName, |
||||
tableName: table.title, |
||||
body: insertArray, |
||||
cookie: {}, |
||||
}); |
||||
|
||||
logBasic( |
||||
`:: Importing '${ |
||||
table.title |
||||
}' data :: ${importedCount} - ${Math.min( |
||||
importedCount + BULK_DATA_BATCH_SIZE, |
||||
allRecordsCount, |
||||
)}`,
|
||||
); |
||||
importedCount += insertArray.length; |
||||
insertArray = []; |
||||
} |
||||
activeProcess--; |
||||
if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); |
||||
resolve(true); |
||||
}), |
||||
); |
||||
}); |
||||
readable.on('end', async () => { |
||||
await Promise.all(promises); |
||||
if (tempData.length > 0) { |
||||
await services.bulkDataService.bulkDataInsert({ |
||||
projectName, |
||||
tableName: table.title, |
||||
body: tempData, |
||||
cookie: {}, |
||||
}); |
||||
|
||||
logBasic( |
||||
`:: Importing '${ |
||||
table.title |
||||
}' data :: ${importedCount} - ${Math.min( |
||||
importedCount + BULK_DATA_BATCH_SIZE, |
||||
allRecordsCount, |
||||
)}`,
|
||||
); |
||||
importedCount += tempData.length; |
||||
tempData = []; |
||||
} |
||||
resolve(true); |
||||
}); |
||||
}); |
||||
|
||||
return records; |
||||
} catch (e) { |
||||
console.log(e); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
export async function importLTARData({ |
||||
table, |
||||
fields, |
||||
base, |
||||
projectName, |
||||
insertedAssocRef = {}, |
||||
logDetailed = (_str) => {}, |
||||
logBasic = (_str) => {}, |
||||
records, |
||||
atNcAliasRef, |
||||
ncLinkMappingTable, |
||||
syncDB, |
||||
services, |
||||
}: { |
||||
projectName: string; |
||||
table: { title?: string; id?: string }; |
||||
fields; |
||||
base: AirtableBase; |
||||
logDetailed: (string) => void; |
||||
logBasic: (string) => void; |
||||
insertedAssocRef: { [assocTableId: string]: boolean }; |
||||
records?: EntityMap; |
||||
atNcAliasRef: { |
||||
[ncTableId: string]: { |
||||
[ncTitle: string]: string; |
||||
}; |
||||
}; |
||||
ncLinkMappingTable: Record<string, Record<string, any>>[]; |
||||
syncDB; |
||||
services: AirtableImportContext; |
||||
}) { |
||||
const assocTableMetas: Array<{ |
||||
modelMeta: { id?: string; title?: string }; |
||||
colMeta: { title?: string }; |
||||
curCol: { title?: string }; |
||||
refCol: { title?: string }; |
||||
}> = []; |
||||
const allData = |
||||
records || |
||||
(await readAllData({ |
||||
table, |
||||
fields, |
||||
base, |
||||
logDetailed, |
||||
logBasic, |
||||
services, |
||||
})); |
||||
|
||||
const modelMeta: any = |
||||
await services.tableService.getTableWithAccessibleViews({ |
||||
tableId: table.id, |
||||
user: syncDB.user, |
||||
}); |
||||
|
||||
for (const colMeta of modelMeta.columns) { |
||||
// skip columns which are not LTAR and Many to many
|
||||
if ( |
||||
colMeta.uidt !== UITypes.LinkToAnotherRecord || |
||||
colMeta.colOptions.type !== RelationTypes.MANY_TO_MANY |
||||
) { |
||||
continue; |
||||
} |
||||
|
||||
// skip if already inserted
|
||||
if (colMeta.colOptions.fk_mm_model_id in insertedAssocRef) continue; |
||||
|
||||
// self links: skip if the column under consideration is the add-on column NocoDB creates
|
||||
if (ncLinkMappingTable.every((a) => a.nc.title !== colMeta.title)) continue; |
||||
|
||||
// mark as inserted
|
||||
insertedAssocRef[colMeta.colOptions.fk_mm_model_id] = true; |
||||
|
||||
const assocModelMeta: TableType = |
||||
(await services.tableService.getTableWithAccessibleViews({ |
||||
tableId: colMeta.colOptions.fk_mm_model_id, |
||||
user: syncDB.user, |
||||
})) as any; |
||||
|
||||
// extract associative table and columns meta
|
||||
assocTableMetas.push({ |
||||
modelMeta: assocModelMeta, |
||||
colMeta, |
||||
curCol: assocModelMeta.columns.find( |
||||
(c) => c.id === colMeta.colOptions.fk_mm_child_column_id, |
||||
), |
||||
refCol: assocModelMeta.columns.find( |
||||
(c) => c.id === colMeta.colOptions.fk_mm_parent_column_id, |
||||
), |
||||
}); |
||||
} |
||||
|
||||
let nestedLinkCnt = 0; |
||||
// Iterate over all related M2M associative table
|
||||
for await (const assocMeta of assocTableMetas) { |
||||
let assocTableData = []; |
||||
let importedCount = 0; |
||||
|
||||
// extract insert data from records
|
||||
await new Promise((resolve) => { |
||||
const promises = []; |
||||
const readable = allData.getStream(); |
||||
let activeProcess = 0; |
||||
readable.on('data', async (record) => { |
||||
promises.push( |
||||
new Promise(async (resolve) => { |
||||
activeProcess++; |
||||
if (activeProcess >= BULK_PARALLEL_PROCESS) readable.pause(); |
||||
const { id: _atId, ...rec } = record; |
||||
|
||||
// todo: use actual alias instead of sanitized
|
||||
assocTableData.push( |
||||
...( |
||||
rec?.[atNcAliasRef[table.id][assocMeta.colMeta.title]] || [] |
||||
).map((id) => ({ |
||||
[assocMeta.curCol.title]: record.id, |
||||
[assocMeta.refCol.title]: id, |
||||
})), |
||||
); |
||||
|
||||
if (assocTableData.length >= ASSOC_BULK_DATA_BATCH_SIZE) { |
||||
let insertArray = assocTableData.splice(0, assocTableData.length); |
||||
logBasic( |
||||
`:: Importing '${ |
||||
table.title |
||||
}' LTAR data :: ${importedCount} - ${Math.min( |
||||
importedCount + ASSOC_BULK_DATA_BATCH_SIZE, |
||||
insertArray.length, |
||||
)}`,
|
||||
); |
||||
|
||||
await services.bulkDataService.bulkDataInsert({ |
||||
projectName, |
||||
tableName: assocMeta.modelMeta.title, |
||||
body: insertArray, |
||||
cookie: {}, |
||||
}); |
||||
|
||||
importedCount += insertArray.length; |
||||
insertArray = []; |
||||
} |
||||
activeProcess--; |
||||
if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); |
||||
resolve(true); |
||||
}), |
||||
); |
||||
}); |
||||
readable.on('end', async () => { |
||||
await Promise.all(promises); |
||||
if (assocTableData.length >= 0) { |
||||
logBasic( |
||||
`:: Importing '${ |
||||
table.title |
||||
}' LTAR data :: ${importedCount} - ${Math.min( |
||||
importedCount + ASSOC_BULK_DATA_BATCH_SIZE, |
||||
assocTableData.length, |
||||
)}`,
|
||||
); |
||||
|
||||
await services.bulkDataService.bulkDataInsert({ |
||||
projectName, |
||||
tableName: assocMeta.modelMeta.title, |
||||
body: assocTableData, |
||||
cookie: {}, |
||||
}); |
||||
|
||||
importedCount += assocTableData.length; |
||||
assocTableData = []; |
||||
} |
||||
resolve(true); |
||||
}); |
||||
}); |
||||
|
||||
nestedLinkCnt += importedCount; |
||||
} |
||||
return nestedLinkCnt; |
||||
} |
@ -0,0 +1,31 @@
|
||||
export const mapTbl = {}; |
||||
|
||||
// static mapping records between aTblId && ncId
|
||||
export const addToMappingTbl = function addToMappingTbl( |
||||
aTblId, |
||||
ncId, |
||||
ncName, |
||||
parent?, |
||||
) { |
||||
mapTbl[aTblId] = { |
||||
ncId: ncId, |
||||
ncParent: parent, |
||||
// name added to assist in quick debug
|
||||
ncName: ncName, |
||||
}; |
||||
}; |
||||
|
||||
// get NcID from airtable ID
|
||||
export const getNcIdFromAtId = function getNcIdFromAtId(aId) { |
||||
return mapTbl[aId]?.ncId; |
||||
}; |
||||
|
||||
// get nc Parent from airtable ID
|
||||
export const getNcParentFromAtId = function getNcParentFromAtId(aId) { |
||||
return mapTbl[aId]?.ncParent; |
||||
}; |
||||
|
||||
// get nc-title from airtable ID
|
||||
export const getNcNameFromAtId = function getNcNameFromAtId(aId) { |
||||
return mapTbl[aId]?.ncName; |
||||
}; |
@ -0,0 +1,126 @@
|
||||
import { InjectQueue } from '@nestjs/bull'; |
||||
import { |
||||
Body, |
||||
Controller, |
||||
HttpCode, |
||||
Param, |
||||
Post, |
||||
Request, |
||||
UseGuards, |
||||
} from '@nestjs/common'; |
||||
import { Queue } from 'bull'; |
||||
import { GlobalGuard } from '../../../guards/global/global.guard'; |
||||
import { |
||||
Acl, |
||||
ExtractProjectIdMiddleware, |
||||
} from '../../../middlewares/extract-project-id/extract-project-id.middleware'; |
||||
import { ProjectsService } from '../../../services/projects.service'; |
||||
import { Base, Model, Project } from '../../../models'; |
||||
import { generateUniqueName } from '../../../helpers/exportImportHelpers'; |
||||
import { QueueService } from '../fallback-queue.service'; |
||||
import { JOBS_QUEUE, JobTypes } from '../../../interface/Jobs'; |
||||
|
||||
@Controller() |
||||
@UseGuards(ExtractProjectIdMiddleware, GlobalGuard) |
||||
export class DuplicateController { |
||||
activeQueue; |
||||
constructor( |
||||
@InjectQueue(JOBS_QUEUE) private readonly jobsQueue: Queue, |
||||
private readonly fallbackQueueService: QueueService, |
||||
private readonly projectsService: ProjectsService, |
||||
) { |
||||
this.activeQueue = process.env.NC_REDIS_URL |
||||
? this.jobsQueue |
||||
: this.fallbackQueueService; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/duplicate/:projectId/:baseId?') |
||||
@HttpCode(200) |
||||
@Acl('duplicateBase') |
||||
async duplicateBase( |
||||
@Request() req, |
||||
@Param('projectId') projectId: string, |
||||
@Param('baseId') baseId?: string, |
||||
) { |
||||
const project = await Project.get(projectId); |
||||
|
||||
if (!project) { |
||||
throw new Error(`Project not found for id '${projectId}'`); |
||||
} |
||||
|
||||
const base = baseId |
||||
? await Base.get(baseId) |
||||
: (await project.getBases())[0]; |
||||
|
||||
if (!base) { |
||||
throw new Error(`Base not found!`); |
||||
} |
||||
|
||||
const projects = await Project.list({}); |
||||
|
||||
const uniqueTitle = generateUniqueName( |
||||
`${project.title} copy`, |
||||
projects.map((p) => p.title), |
||||
); |
||||
|
||||
const dupProject = await this.projectsService.projectCreate({ |
||||
project: { title: uniqueTitle, status: 'job' }, |
||||
user: { id: req.user.id }, |
||||
}); |
||||
|
||||
const job = await this.activeQueue.add(JobTypes.DuplicateBase, { |
||||
projectId: project.id, |
||||
baseId: base.id, |
||||
dupProjectId: dupProject.id, |
||||
req: { |
||||
user: req.user, |
||||
clientIp: req.clientIp, |
||||
}, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/duplicate/:projectId/model/:modelId') |
||||
@HttpCode(200) |
||||
@Acl('duplicateModel') |
||||
async duplicateModel( |
||||
@Request() req, |
||||
@Param('projectId') projectId: string, |
||||
@Param('modelId') modelId?: string, |
||||
) { |
||||
const project = await Project.get(projectId); |
||||
|
||||
if (!project) { |
||||
throw new Error(`Project not found for id '${projectId}'`); |
||||
} |
||||
|
||||
const model = await Model.get(modelId); |
||||
|
||||
if (!model) { |
||||
throw new Error(`Model not found!`); |
||||
} |
||||
|
||||
const base = await Base.get(model.base_id); |
||||
|
||||
const models = await base.getModels(); |
||||
|
||||
const uniqueTitle = generateUniqueName( |
||||
`${model.title} copy`, |
||||
models.map((p) => p.title), |
||||
); |
||||
|
||||
const job = await this.activeQueue.add(JobTypes.DuplicateModel, { |
||||
projectId: project.id, |
||||
baseId: base.id, |
||||
modelId: model.id, |
||||
req: { |
||||
user: req.user, |
||||
clientIp: req.clientIp, |
||||
}, |
||||
title: uniqueTitle, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
} |
@ -0,0 +1,562 @@
|
||||
import { Readable } from 'stream'; |
||||
import { Process, Processor } from '@nestjs/bull'; |
||||
import { Job } from 'bull'; |
||||
import papaparse from 'papaparse'; |
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import { Base, Column, Model, Project } from '../../../models'; |
||||
import { ProjectsService } from '../../../services/projects.service'; |
||||
import { findWithIdentifier } from '../../../helpers/exportImportHelpers'; |
||||
import { BulkDataAliasService } from '../../../services/bulk-data-alias.service'; |
||||
import { JOBS_QUEUE, JobTypes } from '../../../interface/Jobs'; |
||||
import { ExportService } from './export.service'; |
||||
import { ImportService } from './import.service'; |
||||
import type { LinkToAnotherRecordColumn } from '../../../models'; |
||||
|
||||
const DEBUG = false; |
||||
|
||||
const debugLog = function (...args: any[]) { |
||||
if (DEBUG) { |
||||
console.log(...args); |
||||
} |
||||
}; |
||||
|
||||
const initTime = function () { |
||||
return { |
||||
hrTime: process.hrtime(), |
||||
}; |
||||
}; |
||||
|
||||
const elapsedTime = function ( |
||||
time: { hrTime: [number, number] }, |
||||
label?: string, |
||||
) { |
||||
const elapsedS = process.hrtime(time.hrTime)[0].toFixed(3); |
||||
const elapsedMs = process.hrtime(time.hrTime)[1] / 1000000; |
||||
if (label) debugLog(`${label}: ${elapsedS}s ${elapsedMs}ms`); |
||||
time.hrTime = process.hrtime(); |
||||
}; |
||||
|
||||
@Processor(JOBS_QUEUE) |
||||
export class DuplicateProcessor { |
||||
constructor( |
||||
private readonly exportService: ExportService, |
||||
private readonly importService: ImportService, |
||||
private readonly projectsService: ProjectsService, |
||||
private readonly bulkDataService: BulkDataAliasService, |
||||
) {} |
||||
|
||||
@Process(JobTypes.DuplicateBase) |
||||
async duplicateBase(job: Job) { |
||||
const hrTime = initTime(); |
||||
|
||||
const { projectId, baseId, dupProjectId, req } = job.data; |
||||
|
||||
const project = await Project.get(projectId); |
||||
const dupProject = await Project.get(dupProjectId); |
||||
const base = await Base.get(baseId); |
||||
|
||||
try { |
||||
if (!project || !dupProject || !base) { |
||||
throw new Error(`Project or base not found!`); |
||||
} |
||||
|
||||
const user = (req as any).user; |
||||
|
||||
const models = (await base.getModels()).filter( |
||||
// TODO revert this when issue with cache is fixed
|
||||
(m) => m.base_id === base.id && !m.mm && m.type === 'table', |
||||
); |
||||
|
||||
const exportedModels = await this.exportService.serializeModels({ |
||||
modelIds: models.map((m) => m.id), |
||||
}); |
||||
|
||||
elapsedTime(hrTime, 'serializeModels'); |
||||
|
||||
if (!exportedModels) { |
||||
throw new Error(`Export failed for base '${base.id}'`); |
||||
} |
||||
|
||||
await dupProject.getBases(); |
||||
|
||||
const dupBase = dupProject.bases[0]; |
||||
|
||||
elapsedTime(hrTime, 'projectCreate'); |
||||
|
||||
const idMap = await this.importService.importModels({ |
||||
user, |
||||
projectId: dupProject.id, |
||||
baseId: dupBase.id, |
||||
data: exportedModels, |
||||
req: req, |
||||
}); |
||||
|
||||
elapsedTime(hrTime, 'importModels'); |
||||
|
||||
if (!idMap) { |
||||
throw new Error(`Import failed for base '${base.id}'`); |
||||
} |
||||
|
||||
await this.importModelsData({ |
||||
idMap, |
||||
sourceProject: project, |
||||
sourceModels: models, |
||||
destProject: dupProject, |
||||
destBase: dupBase, |
||||
hrTime, |
||||
}); |
||||
|
||||
await this.projectsService.projectUpdate({ |
||||
projectId: dupProject.id, |
||||
project: { |
||||
status: null, |
||||
}, |
||||
}); |
||||
} catch (e) { |
||||
if (dupProject?.id) { |
||||
await this.projectsService.projectSoftDelete({ |
||||
projectId: dupProject.id, |
||||
}); |
||||
} |
||||
throw e; |
||||
} |
||||
} |
||||
|
||||
@Process(JobTypes.DuplicateModel) |
||||
async duplicateModel(job: Job) { |
||||
const hrTime = initTime(); |
||||
|
||||
const { projectId, baseId, modelId, title, req } = job.data; |
||||
|
||||
const project = await Project.get(projectId); |
||||
const base = await Base.get(baseId); |
||||
|
||||
const user = (req as any).user; |
||||
|
||||
const models = (await base.getModels()).filter( |
||||
(m) => !m.mm && m.type === 'table', |
||||
); |
||||
|
||||
const sourceModel = models.find((m) => m.id === modelId); |
||||
|
||||
await sourceModel.getColumns(); |
||||
|
||||
const relatedModelIds = sourceModel.columns |
||||
.filter((col) => col.uidt === UITypes.LinkToAnotherRecord) |
||||
.map((col) => col.colOptions.fk_related_model_id) |
||||
.filter((id) => id); |
||||
|
||||
const relatedModels = models.filter((m) => relatedModelIds.includes(m.id)); |
||||
|
||||
const exportedModel = ( |
||||
await this.exportService.serializeModels({ |
||||
modelIds: [modelId], |
||||
}) |
||||
)[0]; |
||||
|
||||
elapsedTime(hrTime, 'serializeModel'); |
||||
|
||||
if (!exportedModel) { |
||||
throw new Error(`Export failed for base '${base.id}'`); |
||||
} |
||||
|
||||
exportedModel.model.title = title; |
||||
exportedModel.model.table_name = title.toLowerCase().replace(/ /g, '_'); |
||||
|
||||
const idMap = await this.importService.importModels({ |
||||
projectId, |
||||
baseId, |
||||
data: [exportedModel], |
||||
user, |
||||
req, |
||||
externalModels: relatedModels, |
||||
}); |
||||
|
||||
elapsedTime(hrTime, 'reimportModelSchema'); |
||||
|
||||
if (!idMap) { |
||||
throw new Error(`Import failed for model '${modelId}'`); |
||||
} |
||||
|
||||
const fields: Record<string, string[]> = {}; |
||||
|
||||
for (const md of relatedModels) { |
||||
const bts = md.columns |
||||
.filter( |
||||
(c) => |
||||
c.uidt === UITypes.LinkToAnotherRecord && |
||||
c.colOptions.type === 'bt' && |
||||
c.colOptions.fk_related_model_id === modelId, |
||||
) |
||||
.map((c) => c.id); |
||||
|
||||
if (bts.length > 0) { |
||||
fields[md.id] = [md.primaryKey.id]; |
||||
fields[md.id].push(...bts); |
||||
} |
||||
} |
||||
|
||||
await this.importModelsData({ |
||||
idMap, |
||||
sourceProject: project, |
||||
sourceModels: [sourceModel], |
||||
destProject: project, |
||||
destBase: base, |
||||
hrTime, |
||||
modelFieldIds: fields, |
||||
externalModels: relatedModels, |
||||
}); |
||||
|
||||
elapsedTime(hrTime, 'reimportModelData'); |
||||
|
||||
// console.log('exportedModel', exportedModel);
|
||||
} |
||||
|
||||
async importModelsData(param: { |
||||
idMap: Map<string, string>; |
||||
sourceProject: Project; |
||||
sourceModels: Model[]; |
||||
destProject: Project; |
||||
destBase: Base; |
||||
hrTime: { hrTime: [number, number] }; |
||||
modelFieldIds?: Record<string, string[]>; |
||||
externalModels?: Model[]; |
||||
}) { |
||||
const { |
||||
idMap, |
||||
sourceProject, |
||||
sourceModels, |
||||
destProject, |
||||
destBase, |
||||
hrTime, |
||||
modelFieldIds, |
||||
externalModels, |
||||
} = param; |
||||
|
||||
const handledLinks = []; |
||||
const lChunks: Record<string, any[]> = {}; // fk_mm_model_id: { rowId, childId }[]
|
||||
|
||||
const insertChunks = async () => { |
||||
for (const [k, v] of Object.entries(lChunks)) { |
||||
try { |
||||
if (v.length === 0) continue; |
||||
await this.bulkDataService.bulkDataInsert({ |
||||
projectName: destProject.id, |
||||
tableName: k, |
||||
body: v, |
||||
cookie: null, |
||||
chunkSize: 1000, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
lChunks[k] = []; |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
for (const sourceModel of sourceModels) { |
||||
const dataStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
this.exportService.streamModelData({ |
||||
dataStream, |
||||
linkStream, |
||||
projectId: sourceProject.id, |
||||
modelId: sourceModel.id, |
||||
handledMmList: handledLinks, |
||||
}); |
||||
|
||||
const headers: string[] = []; |
||||
let chunk = []; |
||||
|
||||
const model = await Model.get(findWithIdentifier(idMap, sourceModel.id)); |
||||
|
||||
await new Promise((resolve) => { |
||||
papaparse.parse(dataStream, { |
||||
newline: '\r\n', |
||||
step: async (results, parser) => { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: id, |
||||
}); |
||||
if (col.colOptions?.type === 'bt') { |
||||
const childCol = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: col.colOptions.fk_child_column_id, |
||||
}); |
||||
headers.push(childCol.column_name); |
||||
} else { |
||||
headers.push(col.column_name); |
||||
} |
||||
} else { |
||||
debugLog('header not found', header); |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
const row = {}; |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (results.data[i] !== '') { |
||||
row[headers[i]] = results.data[i]; |
||||
} |
||||
} |
||||
chunk.push(row); |
||||
if (chunk.length > 1000) { |
||||
parser.pause(); |
||||
try { |
||||
await this.bulkDataService.bulkDataInsert({ |
||||
projectName: destProject.id, |
||||
tableName: model.id, |
||||
body: chunk, |
||||
cookie: null, |
||||
chunkSize: chunk.length + 1, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
parser.resume(); |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async () => { |
||||
if (chunk.length > 0) { |
||||
try { |
||||
await this.bulkDataService.bulkDataInsert({ |
||||
projectName: destProject.id, |
||||
tableName: model.id, |
||||
body: chunk, |
||||
cookie: null, |
||||
chunkSize: chunk.length + 1, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
let headersFound = false; |
||||
|
||||
let childIndex = -1; |
||||
let parentIndex = -1; |
||||
let columnIndex = -1; |
||||
|
||||
const mmColumns: Record<string, Column> = {}; |
||||
const mmParentChild: any = {}; |
||||
|
||||
await new Promise((resolve) => { |
||||
papaparse.parse(linkStream, { |
||||
newline: '\r\n', |
||||
step: async (results, parser) => { |
||||
if (!headersFound) { |
||||
for (const [i, header] of Object.entries(results.data)) { |
||||
if (header === 'child') { |
||||
childIndex = parseInt(i); |
||||
} else if (header === 'parent') { |
||||
parentIndex = parseInt(i); |
||||
} else if (header === 'column') { |
||||
columnIndex = parseInt(i); |
||||
} |
||||
} |
||||
headersFound = true; |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
const child = results.data[childIndex]; |
||||
const parent = results.data[parentIndex]; |
||||
const columnId = results.data[columnIndex]; |
||||
if (child && parent && columnId) { |
||||
if (mmColumns[columnId]) { |
||||
// push to chunk
|
||||
const mmModelId = |
||||
mmColumns[columnId].colOptions.fk_mm_model_id; |
||||
const mm = mmParentChild[mmModelId]; |
||||
lChunks[mmModelId].push({ |
||||
[mm.parent]: parent, |
||||
[mm.child]: child, |
||||
}); |
||||
} else { |
||||
// get column for the first time
|
||||
parser.pause(); |
||||
|
||||
await insertChunks(); |
||||
|
||||
const col = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: findWithIdentifier(idMap, columnId), |
||||
}); |
||||
|
||||
const colOptions = |
||||
await col.getColOptions<LinkToAnotherRecordColumn>(); |
||||
|
||||
const vChildCol = await colOptions.getMMChildColumn(); |
||||
const vParentCol = await colOptions.getMMParentColumn(); |
||||
|
||||
mmParentChild[col.colOptions.fk_mm_model_id] = { |
||||
parent: vParentCol.column_name, |
||||
child: vChildCol.column_name, |
||||
}; |
||||
|
||||
mmColumns[columnId] = col; |
||||
|
||||
handledLinks.push(col.colOptions.fk_mm_model_id); |
||||
|
||||
const mmModelId = col.colOptions.fk_mm_model_id; |
||||
|
||||
// create chunk
|
||||
lChunks[mmModelId] = []; |
||||
|
||||
// push to chunk
|
||||
const mm = mmParentChild[mmModelId]; |
||||
lChunks[mmModelId].push({ |
||||
[mm.parent]: parent, |
||||
[mm.child]: child, |
||||
}); |
||||
|
||||
parser.resume(); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async () => { |
||||
await insertChunks(); |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
elapsedTime(hrTime, model.title); |
||||
} |
||||
|
||||
// update external models (has bt to this model)
|
||||
if (externalModels) { |
||||
for (const sourceModel of externalModels) { |
||||
const fields = modelFieldIds?.[sourceModel.id]; |
||||
|
||||
if (!fields) continue; |
||||
|
||||
const dataStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
this.exportService.streamModelData({ |
||||
dataStream, |
||||
linkStream, |
||||
projectId: sourceProject.id, |
||||
modelId: sourceModel.id, |
||||
handledMmList: handledLinks, |
||||
_fieldIds: fields, |
||||
}); |
||||
|
||||
const headers: string[] = []; |
||||
let chunk = []; |
||||
|
||||
const model = await Model.get(sourceModel.id); |
||||
|
||||
await new Promise((resolve) => { |
||||
papaparse.parse(dataStream, { |
||||
newline: '\r\n', |
||||
step: async (results, parser) => { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: id, |
||||
}); |
||||
if (col.colOptions?.type === 'bt') { |
||||
const childCol = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: col.colOptions.fk_child_column_id, |
||||
}); |
||||
headers.push(childCol.column_name); |
||||
} else { |
||||
headers.push(col.column_name); |
||||
} |
||||
} else { |
||||
debugLog('header not found', header); |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
const row = {}; |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (results.data[i] !== '') { |
||||
row[headers[i]] = results.data[i]; |
||||
} |
||||
} |
||||
chunk.push(row); |
||||
if (chunk.length > 1000) { |
||||
parser.pause(); |
||||
try { |
||||
await this.bulkDataService.bulkDataUpdate({ |
||||
projectName: destProject.id, |
||||
tableName: model.id, |
||||
body: chunk, |
||||
cookie: null, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
parser.resume(); |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async () => { |
||||
if (chunk.length > 0) { |
||||
console.log('chunk', chunk); |
||||
try { |
||||
await this.bulkDataService.bulkDataUpdate({ |
||||
projectName: destProject.id, |
||||
tableName: model.id, |
||||
body: chunk, |
||||
cookie: null, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
elapsedTime(hrTime, `external bt ${model.title}`); |
||||
} |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,645 @@
|
||||
import { Readable } from 'stream'; |
||||
import { UITypes, ViewTypes } from 'nocodb-sdk'; |
||||
import { unparse } from 'papaparse'; |
||||
import { Injectable } from '@nestjs/common'; |
||||
import NcConnectionMgrv2 from '../../../utils/common/NcConnectionMgrv2'; |
||||
import { getViewAndModelByAliasOrId } from '../../../modules/datas/helpers'; |
||||
import { |
||||
clearPrefix, |
||||
generateBaseIdMap, |
||||
} from '../../../helpers/exportImportHelpers'; |
||||
import NcPluginMgrv2 from '../../../helpers/NcPluginMgrv2'; |
||||
import { NcError } from '../../../helpers/catchError'; |
||||
import { Base, Model, Project } from '../../../models'; |
||||
import { DatasService } from '../../../services/datas.service'; |
||||
import type { BaseModelSqlv2 } from '../../../db/BaseModelSqlv2'; |
||||
import type { LinkToAnotherRecordColumn, View } from '../../../models'; |
||||
|
||||
@Injectable() |
||||
export class ExportService { |
||||
constructor(private datasService: DatasService) {} |
||||
|
||||
async serializeModels(param: { modelIds: string[] }) { |
||||
const { modelIds } = param; |
||||
|
||||
const serializedModels = []; |
||||
|
||||
// db id to structured id
|
||||
const idMap = new Map<string, string>(); |
||||
|
||||
const projects: Project[] = []; |
||||
const bases: Base[] = []; |
||||
const modelsMap = new Map<string, Model[]>(); |
||||
|
||||
for (const modelId of modelIds) { |
||||
const model = await Model.get(modelId); |
||||
|
||||
if (!model) |
||||
return NcError.badRequest(`Model not found for id '${modelId}'`); |
||||
|
||||
const fndProject = projects.find((p) => p.id === model.project_id); |
||||
const project = fndProject || (await Project.get(model.project_id)); |
||||
|
||||
const fndBase = bases.find((b) => b.id === model.base_id); |
||||
const base = fndBase || (await Base.get(model.base_id)); |
||||
|
||||
if (!fndProject) projects.push(project); |
||||
if (!fndBase) bases.push(base); |
||||
|
||||
if (!modelsMap.has(base.id)) { |
||||
modelsMap.set(base.id, await generateBaseIdMap(base, idMap)); |
||||
} |
||||
|
||||
await model.getColumns(); |
||||
await model.getViews(); |
||||
|
||||
for (const column of model.columns) { |
||||
await column.getColOptions(); |
||||
if (column.colOptions) { |
||||
for (const [k, v] of Object.entries(column.colOptions)) { |
||||
switch (k) { |
||||
case 'fk_mm_child_column_id': |
||||
case 'fk_mm_parent_column_id': |
||||
case 'fk_mm_model_id': |
||||
case 'fk_parent_column_id': |
||||
case 'fk_child_column_id': |
||||
case 'fk_related_model_id': |
||||
case 'fk_relation_column_id': |
||||
case 'fk_lookup_column_id': |
||||
case 'fk_rollup_column_id': |
||||
column.colOptions[k] = idMap.get(v as string); |
||||
break; |
||||
case 'options': |
||||
for (const o of column.colOptions['options']) { |
||||
delete o.id; |
||||
delete o.fk_column_id; |
||||
} |
||||
break; |
||||
case 'formula': |
||||
column.colOptions[k] = column.colOptions[k].replace( |
||||
/(?<=\{\{).*?(?=\}\})/gm, |
||||
(match) => idMap.get(match), |
||||
); |
||||
break; |
||||
case 'id': |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_column_id': |
||||
delete column.colOptions[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
for (const view of model.views) { |
||||
idMap.set(view.id, `${idMap.get(model.id)}::${view.id}`); |
||||
await view.getColumns(); |
||||
await view.getFilters(); |
||||
await view.getSorts(); |
||||
if (view.filter) { |
||||
const export_filters = []; |
||||
for (const fl of view.filter.children) { |
||||
const tempFl = { |
||||
id: `${idMap.get(view.id)}::${fl.id}`, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: fl.fk_parent_id, |
||||
is_group: fl.is_group, |
||||
logical_op: fl.logical_op, |
||||
comparison_op: fl.comparison_op, |
||||
comparison_sub_op: fl.comparison_sub_op, |
||||
value: fl.value, |
||||
}; |
||||
if (tempFl.is_group) { |
||||
delete tempFl.comparison_op; |
||||
delete tempFl.comparison_sub_op; |
||||
delete tempFl.value; |
||||
} |
||||
export_filters.push(tempFl); |
||||
} |
||||
view.filter.children = export_filters; |
||||
} |
||||
|
||||
if (view.sorts) { |
||||
const export_sorts = []; |
||||
for (const sr of view.sorts) { |
||||
const tempSr = { |
||||
fk_column_id: idMap.get(sr.fk_column_id), |
||||
direction: sr.direction, |
||||
}; |
||||
export_sorts.push(tempSr); |
||||
} |
||||
view.sorts = export_sorts; |
||||
} |
||||
|
||||
if (view.view) { |
||||
for (const [k, v] of Object.entries(view.view)) { |
||||
switch (k) { |
||||
case 'fk_column_id': |
||||
case 'fk_cover_image_col_id': |
||||
case 'fk_grp_col_id': |
||||
view.view[k] = idMap.get(v as string); |
||||
break; |
||||
case 'meta': |
||||
if (view.type === ViewTypes.KANBAN) { |
||||
const meta = JSON.parse(view.view.meta as string) as Record< |
||||
string, |
||||
any |
||||
>; |
||||
for (const [k, v] of Object.entries(meta)) { |
||||
const colId = idMap.get(k as string); |
||||
for (const op of v) { |
||||
op.fk_column_id = idMap.get(op.fk_column_id); |
||||
delete op.id; |
||||
} |
||||
meta[colId] = v; |
||||
delete meta[k]; |
||||
} |
||||
view.view.meta = meta; |
||||
} |
||||
break; |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_view_id': |
||||
case 'project_id': |
||||
case 'base_id': |
||||
case 'uuid': |
||||
delete view.view[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
serializedModels.push({ |
||||
model: { |
||||
id: idMap.get(model.id), |
||||
prefix: project.prefix, |
||||
title: model.title, |
||||
table_name: clearPrefix(model.table_name, project.prefix), |
||||
meta: model.meta, |
||||
columns: model.columns.map((column) => ({ |
||||
id: idMap.get(column.id), |
||||
ai: column.ai, |
||||
column_name: column.column_name, |
||||
cc: column.cc, |
||||
cdf: column.cdf, |
||||
meta: column.meta, |
||||
pk: column.pk, |
||||
pv: column.pv, |
||||
order: column.order, |
||||
rqd: column.rqd, |
||||
system: column.system, |
||||
uidt: column.uidt, |
||||
title: column.title, |
||||
un: column.un, |
||||
unique: column.unique, |
||||
colOptions: column.colOptions, |
||||
})), |
||||
}, |
||||
views: model.views.map((view) => ({ |
||||
id: idMap.get(view.id), |
||||
is_default: view.is_default, |
||||
type: view.type, |
||||
meta: view.meta, |
||||
order: view.order, |
||||
title: view.title, |
||||
show: view.show, |
||||
show_system_fields: view.show_system_fields, |
||||
filter: view.filter, |
||||
sorts: view.sorts, |
||||
lock_type: view.lock_type, |
||||
columns: view.columns.map((column) => { |
||||
const { |
||||
id, |
||||
fk_view_id, |
||||
fk_column_id, |
||||
project_id, |
||||
base_id, |
||||
created_at, |
||||
updated_at, |
||||
uuid, |
||||
...rest |
||||
} = column as any; |
||||
return { |
||||
fk_column_id: idMap.get(fk_column_id), |
||||
...rest, |
||||
}; |
||||
}), |
||||
view: view.view, |
||||
})), |
||||
}); |
||||
} |
||||
|
||||
return serializedModels; |
||||
} |
||||
|
||||
async streamModelData(param: { |
||||
dataStream: Readable; |
||||
linkStream: Readable; |
||||
projectId: string; |
||||
modelId: string; |
||||
viewId?: string; |
||||
handledMmList?: string[]; |
||||
_fieldIds?: string[]; |
||||
}) { |
||||
const { dataStream, linkStream, handledMmList } = param; |
||||
|
||||
const { model, view } = await getViewAndModelByAliasOrId({ |
||||
projectName: param.projectId, |
||||
tableName: param.modelId, |
||||
viewName: param.viewId, |
||||
}); |
||||
|
||||
const base = await Base.get(model.base_id); |
||||
|
||||
await model.getColumns(); |
||||
|
||||
const btMap = new Map<string, string>(); |
||||
|
||||
for (const column of model.columns.filter( |
||||
(col) => |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions?.type === 'bt', |
||||
)) { |
||||
await column.getColOptions(); |
||||
const fkCol = model.columns.find( |
||||
(c) => c.id === column.colOptions?.fk_child_column_id, |
||||
); |
||||
if (fkCol) { |
||||
// replace bt column with fk column if it is in _fieldIds
|
||||
if (param._fieldIds && param._fieldIds.includes(column.id)) { |
||||
param._fieldIds.push(fkCol.id); |
||||
const btIndex = param._fieldIds.indexOf(column.id); |
||||
param._fieldIds.splice(btIndex, 1); |
||||
} |
||||
|
||||
btMap.set( |
||||
fkCol.id, |
||||
`${column.project_id}::${column.base_id}::${column.fk_model_id}::${column.id}`, |
||||
); |
||||
} |
||||
} |
||||
|
||||
const fields = param._fieldIds |
||||
? model.columns |
||||
.filter((c) => param._fieldIds?.includes(c.id)) |
||||
.map((c) => c.title) |
||||
.join(',') |
||||
: model.columns |
||||
.filter((c) => c.uidt !== UITypes.LinkToAnotherRecord) |
||||
.map((c) => c.title) |
||||
.join(','); |
||||
|
||||
const mmColumns = model.columns.filter( |
||||
(col) => |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions?.type === 'mm', |
||||
); |
||||
|
||||
const hasLink = mmColumns.length > 0; |
||||
|
||||
dataStream.setEncoding('utf8'); |
||||
|
||||
const formatData = (data: any) => { |
||||
for (const row of data) { |
||||
for (const [k, v] of Object.entries(row)) { |
||||
const col = model.columns.find((c) => c.title === k); |
||||
if (col) { |
||||
const colId = `${col.project_id}::${col.base_id}::${col.fk_model_id}::${col.id}`; |
||||
switch (col.uidt) { |
||||
case UITypes.ForeignKey: |
||||
{ |
||||
if (btMap.has(col.id)) { |
||||
row[btMap.get(col.id)] = v; |
||||
delete row[k]; |
||||
} |
||||
} |
||||
break; |
||||
case UITypes.Attachment: |
||||
try { |
||||
row[colId] = JSON.stringify(v); |
||||
} catch (e) { |
||||
row[colId] = v; |
||||
} |
||||
break; |
||||
case UITypes.Formula: |
||||
case UITypes.Lookup: |
||||
case UITypes.Rollup: |
||||
case UITypes.Barcode: |
||||
case UITypes.QrCode: |
||||
// skip these types
|
||||
break; |
||||
default: |
||||
row[colId] = v; |
||||
break; |
||||
} |
||||
delete row[k]; |
||||
} |
||||
} |
||||
} |
||||
return { data }; |
||||
}; |
||||
|
||||
const baseModel = await Model.getBaseModelSQL({ |
||||
id: model.id, |
||||
viewId: view?.id, |
||||
dbDriver: await NcConnectionMgrv2.get(base), |
||||
}); |
||||
|
||||
const limit = 200; |
||||
const offset = 0; |
||||
|
||||
try { |
||||
await this.recursiveRead( |
||||
formatData, |
||||
baseModel, |
||||
dataStream, |
||||
model, |
||||
view, |
||||
offset, |
||||
limit, |
||||
fields, |
||||
true, |
||||
); |
||||
} catch (e) { |
||||
console.error(e); |
||||
throw e; |
||||
} |
||||
|
||||
if (hasLink) { |
||||
linkStream.setEncoding('utf8'); |
||||
|
||||
for (const mm of mmColumns) { |
||||
if (handledMmList.includes(mm.colOptions?.fk_mm_model_id)) continue; |
||||
|
||||
const mmModel = await Model.get(mm.colOptions?.fk_mm_model_id); |
||||
|
||||
await mmModel.getColumns(); |
||||
|
||||
const childColumn = mmModel.columns.find( |
||||
(col) => col.id === mm.colOptions?.fk_mm_child_column_id, |
||||
); |
||||
|
||||
const parentColumn = mmModel.columns.find( |
||||
(col) => col.id === mm.colOptions?.fk_mm_parent_column_id, |
||||
); |
||||
|
||||
const childColumnTitle = childColumn.title; |
||||
const parentColumnTitle = parentColumn.title; |
||||
|
||||
const mmFields = mmModel.columns |
||||
.filter((c) => c.uidt === UITypes.ForeignKey) |
||||
.map((c) => c.title) |
||||
.join(','); |
||||
|
||||
const mmFormatData = (data: any) => { |
||||
data.map((d) => { |
||||
d.column = mm.id; |
||||
d.child = d[childColumnTitle]; |
||||
d.parent = d[parentColumnTitle]; |
||||
delete d[childColumnTitle]; |
||||
delete d[parentColumnTitle]; |
||||
return d; |
||||
}); |
||||
return { data }; |
||||
}; |
||||
|
||||
const mmLimit = 200; |
||||
const mmOffset = 0; |
||||
|
||||
const mmBase = |
||||
mmModel.base_id === base.id ? base : await Base.get(mmModel.base_id); |
||||
|
||||
const mmBaseModel = await Model.getBaseModelSQL({ |
||||
id: mmModel.id, |
||||
dbDriver: await NcConnectionMgrv2.get(mmBase), |
||||
}); |
||||
|
||||
try { |
||||
await this.recursiveLinkRead( |
||||
mmFormatData, |
||||
mmBaseModel, |
||||
linkStream, |
||||
mmModel, |
||||
undefined, |
||||
mmOffset, |
||||
mmLimit, |
||||
mmFields, |
||||
true, |
||||
); |
||||
} catch (e) { |
||||
console.error(e); |
||||
throw e; |
||||
} |
||||
|
||||
handledMmList.push(mm.colOptions?.fk_mm_model_id); |
||||
} |
||||
|
||||
linkStream.push(null); |
||||
} else { |
||||
linkStream.push(null); |
||||
} |
||||
} |
||||
|
||||
async recursiveRead( |
||||
formatter: (data: any) => { data: any }, |
||||
baseModel: BaseModelSqlv2, |
||||
stream: Readable, |
||||
model: Model, |
||||
view: View, |
||||
offset: number, |
||||
limit: number, |
||||
fields: string, |
||||
header = false, |
||||
): Promise<void> { |
||||
return new Promise((resolve, reject) => { |
||||
this.datasService |
||||
.getDataList({ |
||||
model, |
||||
view, |
||||
query: { limit, offset, fields }, |
||||
baseModel, |
||||
}) |
||||
.then((result) => { |
||||
try { |
||||
if (!header) { |
||||
stream.push('\r\n'); |
||||
} |
||||
const { data } = formatter(result.list); |
||||
stream.push(unparse(data, { header })); |
||||
if (result.pageInfo.isLastPage) { |
||||
stream.push(null); |
||||
resolve(); |
||||
} else { |
||||
this.recursiveRead( |
||||
formatter, |
||||
baseModel, |
||||
stream, |
||||
model, |
||||
view, |
||||
offset + limit, |
||||
limit, |
||||
fields, |
||||
).then(resolve); |
||||
} |
||||
} catch (e) { |
||||
reject(e); |
||||
} |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
async recursiveLinkRead( |
||||
formatter: (data: any) => { data: any }, |
||||
baseModel: BaseModelSqlv2, |
||||
linkStream: Readable, |
||||
model: Model, |
||||
view: View, |
||||
offset: number, |
||||
limit: number, |
||||
fields: string, |
||||
header = false, |
||||
): Promise<void> { |
||||
return new Promise((resolve, reject) => { |
||||
this.datasService |
||||
.getDataList({ |
||||
model, |
||||
view, |
||||
query: { limit, offset, fields }, |
||||
baseModel, |
||||
}) |
||||
.then((result) => { |
||||
try { |
||||
if (!header) { |
||||
linkStream.push('\r\n'); |
||||
} |
||||
const { data } = formatter(result.list); |
||||
if (data) linkStream.push(unparse(data, { header })); |
||||
if (result.pageInfo.isLastPage) { |
||||
resolve(); |
||||
} else { |
||||
this.recursiveLinkRead( |
||||
formatter, |
||||
baseModel, |
||||
linkStream, |
||||
model, |
||||
view, |
||||
offset + limit, |
||||
limit, |
||||
fields, |
||||
).then(resolve); |
||||
} |
||||
} catch (e) { |
||||
reject(e); |
||||
} |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
async exportBase(param: { path: string; baseId: string }) { |
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) |
||||
throw NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const project = await Project.get(base.project_id); |
||||
|
||||
const models = (await base.getModels()).filter( |
||||
// TODO revert this when issue with cache is fixed
|
||||
(m) => m.base_id === base.id && !m.mm && m.type === 'table', |
||||
); |
||||
|
||||
const exportedModels = await this.serializeModels({ |
||||
modelIds: models.map((m) => m.id), |
||||
}); |
||||
|
||||
const exportData = { |
||||
id: `${project.id}::${base.id}`, |
||||
models: exportedModels, |
||||
}; |
||||
|
||||
const storageAdapter = await NcPluginMgrv2.storageAdapter(); |
||||
|
||||
const destPath = `export/${project.id}/${base.id}/${param.path}`; |
||||
|
||||
try { |
||||
const readableStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
readableStream.setEncoding('utf8'); |
||||
|
||||
readableStream.push(JSON.stringify(exportData)); |
||||
|
||||
readableStream.push(null); |
||||
|
||||
await storageAdapter.fileCreateByStream( |
||||
`${destPath}/schema.json`, |
||||
readableStream, |
||||
); |
||||
|
||||
const handledMmList: string[] = []; |
||||
|
||||
const combinedLinkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const uploadLinkPromise = storageAdapter.fileCreateByStream( |
||||
`${destPath}/data/links.csv`, |
||||
combinedLinkStream, |
||||
); |
||||
|
||||
for (const model of models) { |
||||
const dataStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkPromise = new Promise((resolve) => { |
||||
linkStream.on('data', (chunk) => { |
||||
combinedLinkStream.push(chunk); |
||||
}); |
||||
|
||||
linkStream.on('end', () => { |
||||
combinedLinkStream.push('\r\n'); |
||||
resolve(null); |
||||
}); |
||||
|
||||
linkStream.on('error', (e) => { |
||||
console.error(e); |
||||
resolve(null); |
||||
}); |
||||
}); |
||||
|
||||
const uploadPromise = storageAdapter.fileCreateByStream( |
||||
`${destPath}/data/${model.id}.csv`, |
||||
dataStream, |
||||
); |
||||
|
||||
this.streamModelData({ |
||||
dataStream, |
||||
linkStream, |
||||
projectId: project.id, |
||||
modelId: model.id, |
||||
handledMmList, |
||||
}); |
||||
|
||||
await Promise.all([uploadPromise, linkPromise]); |
||||
} |
||||
|
||||
combinedLinkStream.push(null); |
||||
|
||||
await uploadLinkPromise; |
||||
} catch (e) { |
||||
throw NcError.badRequest(e); |
||||
} |
||||
|
||||
return { |
||||
path: destPath, |
||||
}; |
||||
} |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,118 @@
|
||||
import { Injectable } from '@nestjs/common'; |
||||
import PQueue from 'p-queue'; |
||||
import Emittery from 'emittery'; |
||||
import { JobTypes } from '../../interface/Jobs'; |
||||
import { DuplicateProcessor } from './export-import/duplicate.processor'; |
||||
import { JobsEventService } from './jobs-event.service'; |
||||
import { AtImportProcessor } from './at-import/at-import.processor'; |
||||
|
||||
interface Job { |
||||
id: string; |
||||
name: string; |
||||
status: string; |
||||
data: any; |
||||
} |
||||
|
||||
@Injectable() |
||||
export class QueueService { |
||||
static queue = new PQueue({ concurrency: 1 }); |
||||
static queueIndex = 1; |
||||
static processed = 0; |
||||
static queueMemory: Job[] = []; |
||||
static emitter = new Emittery(); |
||||
|
||||
constructor( |
||||
private readonly jobsEventService: JobsEventService, |
||||
private readonly duplicateProcessor: DuplicateProcessor, |
||||
private readonly atImportProcessor: AtImportProcessor, |
||||
) { |
||||
this.emitter.on('active', (data: any) => { |
||||
const job = this.queueMemory.find( |
||||
(job) => job.id === data.id && job.name === data.name, |
||||
); |
||||
job.status = 'active'; |
||||
this.jobsEventService.onActive.apply(this.jobsEventService, [job as any]); |
||||
}); |
||||
this.emitter.on('completed', (data: any) => { |
||||
const job = this.queueMemory.find( |
||||
(job) => job.id === data.id && job.name === data.name, |
||||
); |
||||
job.status = 'completed'; |
||||
this.jobsEventService.onCompleted.apply(this.jobsEventService, [ |
||||
data as any, |
||||
]); |
||||
}); |
||||
this.emitter.on('failed', (data: { job: Job; error: Error }) => { |
||||
const job = this.queueMemory.find( |
||||
(job) => job.id === data.job.id && job.name === data.job.name, |
||||
); |
||||
job.status = 'failed'; |
||||
this.jobsEventService.onFailed.apply(this.jobsEventService, [ |
||||
data.job as any, |
||||
data.error, |
||||
]); |
||||
}); |
||||
} |
||||
|
||||
jobMap = { |
||||
[JobTypes.DuplicateBase]: { |
||||
this: this.duplicateProcessor, |
||||
fn: this.duplicateProcessor.duplicateBase, |
||||
}, |
||||
[JobTypes.DuplicateModel]: { |
||||
this: this.duplicateProcessor, |
||||
fn: this.duplicateProcessor.duplicateModel, |
||||
}, |
||||
[JobTypes.AtImport]: { |
||||
this: this.atImportProcessor, |
||||
fn: this.atImportProcessor.job, |
||||
}, |
||||
}; |
||||
|
||||
async jobWrapper(job: Job) { |
||||
this.emitter.emit('active', job); |
||||
try { |
||||
await this.jobMap[job.name].fn.apply(this.jobMap[job.name].this, [job]); |
||||
this.emitter.emit('completed', job); |
||||
} catch (error) { |
||||
this.emitter.emit('failed', { job, error }); |
||||
} |
||||
} |
||||
|
||||
get emitter() { |
||||
return QueueService.emitter; |
||||
} |
||||
|
||||
get queue() { |
||||
return QueueService.queue; |
||||
} |
||||
|
||||
get queueMemory() { |
||||
return QueueService.queueMemory; |
||||
} |
||||
|
||||
get queueIndex() { |
||||
return QueueService.queueIndex; |
||||
} |
||||
|
||||
set queueIndex(index: number) { |
||||
QueueService.queueIndex = index; |
||||
} |
||||
|
||||
async add(name: string, data: any) { |
||||
const id = `${this.queueIndex++}`; |
||||
const job = { id: `${id}`, name, status: 'waiting', data }; |
||||
this.queueMemory.push(job); |
||||
this.queue.add(() => this.jobWrapper(job)); |
||||
return { id, name }; |
||||
} |
||||
|
||||
async getJobs(types: string[] | string) { |
||||
types = Array.isArray(types) ? types : [types]; |
||||
return this.queueMemory.filter((q) => types.includes(q.status)); |
||||
} |
||||
|
||||
async getJob(id: string) { |
||||
return this.queueMemory.find((q) => q.id === id); |
||||
} |
||||
} |
@ -0,0 +1,62 @@
|
||||
import { |
||||
OnQueueActive, |
||||
OnQueueCompleted, |
||||
OnQueueFailed, |
||||
Processor, |
||||
} from '@nestjs/bull'; |
||||
import { Job } from 'bull'; |
||||
import boxen from 'boxen'; |
||||
import { EventEmitter2 } from '@nestjs/event-emitter'; |
||||
import { JOBS_QUEUE } from '../../interface/Jobs'; |
||||
|
||||
@Processor(JOBS_QUEUE) |
||||
export class JobsEventService { |
||||
constructor(private eventEmitter: EventEmitter2) {} |
||||
|
||||
@OnQueueActive() |
||||
onActive(job: Job) { |
||||
this.eventEmitter.emit('job.status', { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
status: 'active', |
||||
}); |
||||
} |
||||
|
||||
@OnQueueFailed() |
||||
onFailed(job: Job, error: Error) { |
||||
console.error( |
||||
boxen( |
||||
`---- !! JOB FAILED !! ----\nname: ${job.name}\nid:${job.id}\nerror:${error.name} (${error.message})\n\nstack: ${error.stack}`, |
||||
{ |
||||
padding: 1, |
||||
borderStyle: 'double', |
||||
borderColor: 'yellow', |
||||
}, |
||||
), |
||||
); |
||||
|
||||
this.eventEmitter.emit('job.status', { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
status: 'failed', |
||||
error: error?.message, |
||||
}); |
||||
} |
||||
|
||||
@OnQueueCompleted() |
||||
onCompleted(job: Job) { |
||||
this.eventEmitter.emit('job.status', { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
status: 'completed', |
||||
}); |
||||
} |
||||
|
||||
sendLog(job: Job, data: { message: string }) { |
||||
this.eventEmitter.emit('job.log', { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
data, |
||||
}); |
||||
} |
||||
} |
@ -0,0 +1,126 @@
|
||||
import { |
||||
ConnectedSocket, |
||||
MessageBody, |
||||
SubscribeMessage, |
||||
WebSocketGateway, |
||||
WebSocketServer, |
||||
} from '@nestjs/websockets'; |
||||
import { Server, Socket } from 'socket.io'; |
||||
import { ExecutionContextHost } from '@nestjs/core/helpers/execution-context-host'; |
||||
import { AuthGuard } from '@nestjs/passport'; |
||||
import { OnEvent } from '@nestjs/event-emitter'; |
||||
import { JobsService } from './jobs.service'; |
||||
import type { OnModuleInit } from '@nestjs/common'; |
||||
|
||||
@WebSocketGateway({ |
||||
cors: { |
||||
origin: '*', |
||||
allowedHeaders: ['xc-auth'], |
||||
credentials: true, |
||||
}, |
||||
namespace: 'jobs', |
||||
}) |
||||
export class JobsGateway implements OnModuleInit { |
||||
constructor(private readonly jobsService: JobsService) {} |
||||
|
||||
@WebSocketServer() |
||||
server: Server; |
||||
|
||||
async onModuleInit() { |
||||
this.server.use(async (socket, next) => { |
||||
try { |
||||
const context = new ExecutionContextHost([socket.handshake as any]); |
||||
const guard = new (AuthGuard('jwt'))(context); |
||||
await guard.canActivate(context); |
||||
} catch {} |
||||
|
||||
next(); |
||||
}); |
||||
} |
||||
|
||||
@SubscribeMessage('subscribe') |
||||
async subscribe( |
||||
@MessageBody() |
||||
body: { _id: number; data: { id: string; name: string } | any }, |
||||
@ConnectedSocket() client: Socket, |
||||
): Promise<void> { |
||||
const { _id, data } = body; |
||||
if ( |
||||
Object.keys(data).every((k) => ['name', 'id'].includes(k)) && |
||||
data?.name && |
||||
data?.id |
||||
) { |
||||
const rooms = (await this.jobsService.jobList(data.name)).map( |
||||
(j) => `${j.name}-${j.id}`, |
||||
); |
||||
const room = rooms.find((r) => r === `${data.name}-${data.id}`); |
||||
if (room) { |
||||
client.join(`${data.name}-${data.id}`); |
||||
client.emit('subscribed', { |
||||
_id, |
||||
name: data.name, |
||||
id: data.id, |
||||
}); |
||||
} |
||||
} else { |
||||
const job = await this.jobsService.getJobWithData(data); |
||||
if (job) { |
||||
client.join(`${job.name}-${job.id}`); |
||||
client.emit('subscribed', { |
||||
_id, |
||||
name: job.name, |
||||
id: job.id, |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
|
||||
@SubscribeMessage('status') |
||||
async status( |
||||
@MessageBody() body: { _id: number; data: { id: string; name: string } }, |
||||
@ConnectedSocket() client: Socket, |
||||
): Promise<void> { |
||||
const { _id, data } = body; |
||||
client.emit('status', { |
||||
_id, |
||||
id: data.id, |
||||
name: data.name, |
||||
status: await this.jobsService.jobStatus(data.id), |
||||
}); |
||||
} |
||||
|
||||
@OnEvent('job.status') |
||||
async sendJobStatus(data: { |
||||
name: string; |
||||
id: string; |
||||
status: |
||||
| 'completed' |
||||
| 'waiting' |
||||
| 'active' |
||||
| 'delayed' |
||||
| 'failed' |
||||
| 'paused' |
||||
| 'refresh'; |
||||
error?: any; |
||||
}): Promise<void> { |
||||
this.server.to(`${data.name}-${data.id}`).emit('status', { |
||||
id: data.id, |
||||
name: data.name, |
||||
status: data.status, |
||||
error: data.error, |
||||
}); |
||||
} |
||||
|
||||
@OnEvent('job.log') |
||||
async sendJobLog(data: { |
||||
name: string; |
||||
id: string; |
||||
data: { message: string }; |
||||
}): Promise<void> { |
||||
this.server.to(`${data.name}-${data.id}`).emit('log', { |
||||
id: data.id, |
||||
name: data.name, |
||||
data: data.data, |
||||
}); |
||||
} |
||||
} |
@ -0,0 +1,39 @@
|
||||
import { Module } from '@nestjs/common'; |
||||
import { BullModule } from '@nestjs/bull'; |
||||
import { GlobalModule } from '../global/global.module'; |
||||
import { DatasModule } from '../datas/datas.module'; |
||||
import { MetasModule } from '../metas/metas.module'; |
||||
import { JOBS_QUEUE } from '../../interface/Jobs'; |
||||
import { JobsService } from './jobs.service'; |
||||
import { ExportService } from './export-import/export.service'; |
||||
import { ImportService } from './export-import/import.service'; |
||||
import { DuplicateController } from './export-import/duplicate.controller'; |
||||
import { DuplicateProcessor } from './export-import/duplicate.processor'; |
||||
import { JobsGateway } from './jobs.gateway'; |
||||
import { QueueService } from './fallback-queue.service'; |
||||
import { JobsEventService } from './jobs-event.service'; |
||||
import { AtImportController } from './at-import/at-import.controller'; |
||||
import { AtImportProcessor } from './at-import/at-import.processor'; |
||||
|
||||
@Module({ |
||||
imports: [ |
||||
GlobalModule, |
||||
DatasModule, |
||||
MetasModule, |
||||
BullModule.registerQueue({ |
||||
name: JOBS_QUEUE, |
||||
}), |
||||
], |
||||
controllers: [DuplicateController, AtImportController], |
||||
providers: [ |
||||
QueueService, |
||||
JobsGateway, |
||||
JobsService, |
||||
JobsEventService, |
||||
DuplicateProcessor, |
||||
ExportService, |
||||
ImportService, |
||||
AtImportProcessor, |
||||
], |
||||
}) |
||||
export class JobsModule {} |
@ -0,0 +1,52 @@
|
||||
import { InjectQueue } from '@nestjs/bull'; |
||||
import { Injectable } from '@nestjs/common'; |
||||
import { Queue } from 'bull'; |
||||
import { JOBS_QUEUE } from '../../interface/Jobs'; |
||||
import { QueueService } from './fallback-queue.service'; |
||||
|
||||
@Injectable() |
||||
export class JobsService { |
||||
activeQueue; |
||||
constructor( |
||||
@InjectQueue(JOBS_QUEUE) private readonly jobsQueue: Queue, |
||||
private readonly fallbackQueueService: QueueService, |
||||
) { |
||||
this.activeQueue = process.env.NC_REDIS_URL |
||||
? this.jobsQueue |
||||
: this.fallbackQueueService; |
||||
} |
||||
|
||||
async jobStatus(jobId: string) { |
||||
return await (await this.activeQueue.getJob(jobId)).getState(); |
||||
} |
||||
|
||||
async jobList(jobType: string) { |
||||
return ( |
||||
await this.activeQueue.getJobs(['active', 'waiting', 'delayed', 'paused']) |
||||
).filter((j) => j.name === jobType); |
||||
} |
||||
|
||||
async getJobWithData(data: any) { |
||||
const jobs = await this.activeQueue.getJobs([ |
||||
// 'completed',
|
||||
'waiting', |
||||
'active', |
||||
'delayed', |
||||
// 'failed',
|
||||
'paused', |
||||
]); |
||||
|
||||
const job = jobs.find((j) => { |
||||
for (const key in data) { |
||||
if (j.data[key]) { |
||||
if (j.data[key] !== data[key]) return false; |
||||
} else { |
||||
return false; |
||||
} |
||||
} |
||||
return true; |
||||
}); |
||||
|
||||
return job; |
||||
} |
||||
} |
@ -0,0 +1,19 @@
|
||||
import { Test } from '@nestjs/testing'; |
||||
import { SocketGateway } from './socket.gateway'; |
||||
import type { TestingModule } from '@nestjs/testing'; |
||||
|
||||
describe('SocketGateway', () => { |
||||
let gateway: SocketGateway; |
||||
|
||||
beforeEach(async () => { |
||||
const module: TestingModule = await Test.createTestingModule({ |
||||
providers: [SocketGateway], |
||||
}).compile(); |
||||
|
||||
gateway = module.get<SocketGateway>(SocketGateway); |
||||
}); |
||||
|
||||
it('should be defined', () => { |
||||
expect(gateway).toBeDefined(); |
||||
}); |
||||
}); |
@ -0,0 +1,67 @@
|
||||
import crypto from 'crypto'; |
||||
import { WebSocketGateway, WebSocketServer } from '@nestjs/websockets'; |
||||
import { Inject, Injectable } from '@nestjs/common'; |
||||
import { HttpAdapterHost } from '@nestjs/core'; |
||||
import { T } from 'nc-help'; |
||||
import { Server } from 'socket.io'; |
||||
import { AuthGuard } from '@nestjs/passport'; |
||||
import { ExecutionContextHost } from '@nestjs/core/helpers/execution-context-host'; |
||||
import { JwtStrategy } from '../strategies/jwt.strategy'; |
||||
import type { OnModuleInit } from '@nestjs/common'; |
||||
import type { Socket } from 'socket.io'; |
||||
|
||||
function getHash(str) { |
||||
return crypto.createHash('md5').update(str).digest('hex'); |
||||
} |
||||
|
||||
@WebSocketGateway({ |
||||
cors: { |
||||
origin: '*', |
||||
allowedHeaders: ['xc-auth'], |
||||
credentials: true, |
||||
}, |
||||
}) |
||||
@Injectable() |
||||
export class SocketGateway implements OnModuleInit { |
||||
// private server: HttpServer;
|
||||
private clients: { [id: string]: Socket } = {}; |
||||
|
||||
constructor( |
||||
private jwtStrategy: JwtStrategy, |
||||
@Inject(HttpAdapterHost) private httpAdapterHost: HttpAdapterHost, |
||||
) {} |
||||
|
||||
@WebSocketServer() |
||||
server: Server; |
||||
|
||||
async onModuleInit() { |
||||
this.server |
||||
.use(async (socket, next) => { |
||||
try { |
||||
const context = new ExecutionContextHost([socket.handshake as any]); |
||||
const guard = new (AuthGuard('jwt'))(context); |
||||
await guard.canActivate(context); |
||||
} catch {} |
||||
|
||||
next(); |
||||
}) |
||||
.on('connection', (socket) => { |
||||
this.clients[socket.id] = socket; |
||||
const id = getHash( |
||||
(process.env.NC_SERVER_UUID || T.id) + |
||||
(socket?.handshake as any)?.user?.id, |
||||
); |
||||
|
||||
socket.on('page', (args) => { |
||||
T.page({ ...args, id }); |
||||
}); |
||||
socket.on('event', (args) => { |
||||
T.event({ ...args, id }); |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
public get io() { |
||||
return this.server; |
||||
} |
||||
} |
@ -1,19 +1,19 @@
|
||||
import { Test } from '@nestjs/testing'; |
||||
import { SocketService } from './socket.service'; |
||||
import { SocketGateway } from './socket.gateway'; |
||||
import type { TestingModule } from '@nestjs/testing'; |
||||
|
||||
describe('ClientService', () => { |
||||
let service: SocketService; |
||||
describe('SocketGateway', () => { |
||||
let gateway: SocketGateway; |
||||
|
||||
beforeEach(async () => { |
||||
const module: TestingModule = await Test.createTestingModule({ |
||||
providers: [SocketService], |
||||
providers: [SocketGateway], |
||||
}).compile(); |
||||
|
||||
service = module.get<SocketService>(SocketService); |
||||
gateway = module.get<SocketGateway>(SocketGateway); |
||||
}); |
||||
|
||||
it('should be defined', () => { |
||||
expect(service).toBeDefined(); |
||||
expect(gateway).toBeDefined(); |
||||
}); |
||||
}); |
||||
|
@ -0,0 +1,68 @@
|
||||
/** |
||||
* Compare obj1 and obj2 conditionally based on ignoredFields set |
||||
* Ignore the field names which are passed in the ignoredFields. |
||||
* optionally keyId will be use to prefix the keys mismatched |
||||
* |
||||
* |
||||
* use utility boolean param breakAtFirstMismatch to print diff for |
||||
* all the fields instead of breaking at first mismatch |
||||
* |
||||
* @param obj1 |
||||
* @param obj2 |
||||
* @param ignoredFields : filed names ex: title |
||||
* @param ignoredKeys : json path for the filed ex: ".project.is_meta.title" |
||||
* @param keyId : starts with "" |
||||
* @param breakAtFirstMismatch : default true. returns false on first field mismatch |
||||
* @returns |
||||
*/ |
||||
export function deepCompare( |
||||
obj1: any, |
||||
obj2: any, |
||||
ignoredFields?: Set<string>, |
||||
ignoredKeys?: Set<string>, |
||||
keyId = '', |
||||
breakAtFirstMismatch = true |
||||
): boolean { |
||||
if (ignoredKeys !== undefined && ignoredKeys.has(keyId)) { |
||||
return true; |
||||
} |
||||
// If the objects are the same instance, they are equal
|
||||
if (obj1 === obj2) { |
||||
return true; |
||||
} |
||||
|
||||
// If one of the objects is null or not an object, they are not equal
|
||||
if (!obj1 || !obj2 || typeof obj1 !== 'object' || typeof obj2 !== 'object') { |
||||
console.log(`Mismatch key: ${keyId} value1: "${obj1}" value2: "${obj2}"`); |
||||
return !breakAtFirstMismatch; |
||||
// return false;
|
||||
} |
||||
|
||||
// If the objects have different numbers of properties, they are not equal
|
||||
const keys1 = Object.keys(obj1); |
||||
const keys2 = Object.keys(obj2); |
||||
if (keys1.length !== keys2.length) { |
||||
console.log(`Mismatch length key: ${keyId} value1: "${obj1}" value2: "${obj2}"`); |
||||
return !breakAtFirstMismatch; |
||||
// return false;
|
||||
} |
||||
|
||||
// Recursively compare each property of the objects
|
||||
for (const key of keys1) { |
||||
if ( |
||||
(ignoredFields !== undefined && ignoredFields.has(key)) || |
||||
key.endsWith(' List') /* temp hack to avoid fields like */ |
||||
) { |
||||
// console.log(`${keyId} ignored in comparison`)
|
||||
} else { |
||||
keyId = keyId + '.' + key; |
||||
if (!deepCompare(obj1[key], obj2[key], ignoredFields, ignoredKeys, keyId, breakAtFirstMismatch)) { |
||||
return !breakAtFirstMismatch; |
||||
// return false;
|
||||
} |
||||
} |
||||
} |
||||
|
||||
// If all properties match, the objects are equal
|
||||
return true; |
||||
} |
@ -0,0 +1,167 @@
|
||||
import { |
||||
Api, |
||||
BaseListType, |
||||
BaseType, |
||||
FilterListType, |
||||
FilterType, |
||||
HookListType, |
||||
HookType, |
||||
PaginatedType, |
||||
ProjectType, |
||||
SharedViewListType, |
||||
SharedViewType, |
||||
SignInReqType, |
||||
SortListType, |
||||
SortType, |
||||
TableListType, |
||||
TableType, |
||||
UserType, |
||||
ViewListType, |
||||
ViewType, |
||||
} from 'nocodb-sdk'; |
||||
|
||||
export class ViewInfo { |
||||
view: ViewType; |
||||
filters: FilterType[]; |
||||
sorts: SortType[]; |
||||
firstPageData?: { |
||||
/** List of data objects */ |
||||
list: any[]; |
||||
/** Paginated Info */ |
||||
pageInfo: PaginatedType; |
||||
}; |
||||
} |
||||
|
||||
export class TableInfo { |
||||
table: TableType; |
||||
views: ViewInfo[]; |
||||
shares: SharedViewType[]; |
||||
webhooks: HookType[]; |
||||
firstPageData?: { |
||||
/** List of data objects */ |
||||
list: any[]; |
||||
/** Paginated Info */ |
||||
pageInfo: PaginatedType; |
||||
}; |
||||
} |
||||
|
||||
export class ProjectInfo { |
||||
project: ProjectType; |
||||
bases: BaseType[]; |
||||
users: UserType[]; |
||||
tables: TableInfo[]; |
||||
} |
||||
|
||||
export class ProjectInfoOperator { |
||||
api: Api<any>; |
||||
|
||||
constructor(token: string) { |
||||
this.api = new Api({ |
||||
baseURL: `http://localhost:8080/`, |
||||
headers: { |
||||
'xc-auth': token, |
||||
}, |
||||
}); |
||||
} |
||||
/** |
||||
* extracts the projectInfo using sdk via apis |
||||
* |
||||
* @param projectId |
||||
* @returns |
||||
*/ |
||||
async extractProjectData(projectId: string): Promise<ProjectInfo> { |
||||
// TODO: capture apiTokens, projectSettings, ACLVisibilityRules, UI ACL (discuss before adding)
|
||||
const project: ProjectType = await this.api.project.read(projectId); |
||||
// bases
|
||||
const bases: BaseListType = await this.api.base.list(projectId); |
||||
// users
|
||||
const usersWrapper: any = await this.api.auth.projectUserList(projectId); |
||||
|
||||
// SET project, users and bases
|
||||
const projectInfo: ProjectInfo = { project: project, tables: [], bases: [], users: [] }; |
||||
projectInfo.bases = bases.list; |
||||
if (usersWrapper.users) { |
||||
projectInfo.users = usersWrapper.users.list as UserType[]; |
||||
} |
||||
|
||||
const tables: TableListType = await this.api.dbTable.list(projectId); |
||||
for (const table of tables.list) { |
||||
const tableInfo: TableInfo = { table: table, shares: [], views: [], webhooks: [] }; |
||||
const views: ViewListType = await this.api.dbView.list(table.id); |
||||
for (const v of views.list) { |
||||
const filters: FilterListType = await this.api.dbTableFilter.read(v.id); |
||||
const sorts: SortListType = await this.api.dbTableSort.list(v.id); |
||||
|
||||
// create ViewData and push to array
|
||||
const viewInfo: ViewInfo = { view: v, filters: [], sorts: [] }; |
||||
viewInfo.firstPageData = await this.api.dbViewRow.list('noco', projectId, table.id, v.id); |
||||
viewInfo.filters = filters.list; |
||||
viewInfo.sorts = sorts.list; |
||||
tableInfo.views.push(viewInfo); |
||||
} |
||||
const shares: SharedViewListType = await this.api.dbViewShare.list(table.id); |
||||
const webhooks: HookListType = await this.api.dbTableWebhook.list(table.id); |
||||
tableInfo.shares = shares.list; |
||||
tableInfo.webhooks = webhooks.list; |
||||
projectInfo.tables.push(tableInfo); |
||||
tableInfo.firstPageData = await this.api.dbTableRow.list('noco', projectId, table.id); |
||||
} |
||||
return projectInfo; |
||||
} |
||||
|
||||
/** |
||||
* helper function to print projectInfo |
||||
* do not use this function to assert anything. |
||||
* this is only helper function to debug and should |
||||
* be allowed to modify without any test failures. |
||||
* |
||||
* @param projectData |
||||
*/ |
||||
async printProjectData(projectData: ProjectInfo) { |
||||
console.log('project.title : ' + projectData.project.title); |
||||
// bases
|
||||
console.log('Bases:'); |
||||
for (const base of projectData.bases) { |
||||
console.log(base.id); |
||||
} |
||||
// users
|
||||
console.log('Users:'); |
||||
if (projectData.users) { |
||||
for (const user of projectData.users) { |
||||
console.log(user.email); |
||||
} |
||||
} |
||||
console.log('Tables: '); |
||||
|
||||
if (projectData.tables) { |
||||
for (const tableData of projectData.tables) { |
||||
console.log('Table: ' + tableData.table.title); |
||||
console.log('Views: '); |
||||
|
||||
console.log('Filters: '); |
||||
for (const viewData of tableData.views) { |
||||
const v: ViewType = viewData.view; |
||||
console.log(`${v.title} ${v.id}`); |
||||
if (viewData.filters.length > 0) { |
||||
console.log('======= Filters ======='); |
||||
console.log(viewData.filters); |
||||
} |
||||
if (viewData.sorts.length > 0) { |
||||
console.log('======= Sorts ======='); |
||||
console.log(viewData.sorts); |
||||
} |
||||
} |
||||
|
||||
if (tableData.shares.length > 0) { |
||||
console.log('======= Shares ======='); |
||||
console.log(tableData.shares.forEach(s => console.log(s.uuid))); |
||||
} |
||||
|
||||
if (tableData.webhooks.length > 0) { |
||||
console.log('======= Webhooks ======='); |
||||
console.log(tableData.webhooks.forEach(w => console.log(w.id))); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
Loading…
Reference in new issue