mirror of https://github.com/nocodb/nocodb
github-actions[bot]
2 years ago
committed by
GitHub
135 changed files with 18115 additions and 8790 deletions
@ -0,0 +1,84 @@
|
||||
<script setup lang="ts"> |
||||
import type { ProjectType } from 'nocodb-sdk' |
||||
import { useVModel } from '#imports' |
||||
|
||||
const props = defineProps<{ |
||||
modelValue: boolean |
||||
project: ProjectType |
||||
onOk: (jobData: { name: string; id: string }) => Promise<void> |
||||
}>() |
||||
|
||||
const emit = defineEmits(['update:modelValue']) |
||||
|
||||
const { api } = useApi() |
||||
|
||||
const dialogShow = useVModel(props, 'modelValue', emit) |
||||
|
||||
const options = ref({ |
||||
includeData: true, |
||||
includeViews: true, |
||||
includeHooks: true, |
||||
}) |
||||
|
||||
const optionsToExclude = computed(() => { |
||||
const { includeData, includeViews, includeHooks } = options.value |
||||
return { |
||||
excludeData: !includeData, |
||||
excludeViews: !includeViews, |
||||
excludeHooks: !includeHooks, |
||||
} |
||||
}) |
||||
|
||||
const isLoading = ref(false) |
||||
|
||||
const _duplicate = async () => { |
||||
isLoading.value = true |
||||
try { |
||||
const jobData = await api.project.duplicate(props.project.id as string, optionsToExclude.value) |
||||
props.onOk(jobData as any) |
||||
} catch (e: any) { |
||||
message.error(await extractSdkResponseErrorMsg(e)) |
||||
} |
||||
isLoading.value = false |
||||
dialogShow.value = false |
||||
} |
||||
|
||||
const isEaster = ref(false) |
||||
</script> |
||||
|
||||
<template> |
||||
<a-modal |
||||
v-model:visible="dialogShow" |
||||
:class="{ active: dialogShow }" |
||||
width="max(30vw, 600px)" |
||||
centered |
||||
wrap-class-name="nc-modal-project-duplicate" |
||||
@keydown.esc="dialogShow = false" |
||||
> |
||||
<template #footer> |
||||
<a-button key="back" size="large" @click="dialogShow = false">{{ $t('general.cancel') }}</a-button> |
||||
|
||||
<a-button key="submit" size="large" type="primary" :loading="isLoading" @click="_duplicate" |
||||
>{{ $t('general.confirm') }} |
||||
</a-button> |
||||
</template> |
||||
|
||||
<div class="pl-10 pr-10 pt-5"> |
||||
<div class="prose-xl font-bold self-center my-4" @dblclick="isEaster = !isEaster">{{ $t('general.duplicate') }}</div> |
||||
|
||||
<div class="mb-2">Are you sure you want to duplicate the `{{ project.title }}` project?</div> |
||||
|
||||
<div class="prose-md self-center text-gray-500 mt-4">{{ $t('title.advancedSettings') }}</div> |
||||
|
||||
<a-divider class="!m-0 !p-0 !my-2" /> |
||||
|
||||
<div class="text-xs p-2"> |
||||
<a-checkbox v-model:checked="options.includeData">Include data</a-checkbox> |
||||
<a-checkbox v-model:checked="options.includeViews">Include views</a-checkbox> |
||||
<a-checkbox v-show="isEaster" v-model:checked="options.includeHooks">Include webhooks</a-checkbox> |
||||
</div> |
||||
</div> |
||||
</a-modal> |
||||
</template> |
||||
|
||||
<style scoped lang="scss"></style> |
@ -0,0 +1,84 @@
|
||||
<script setup lang="ts"> |
||||
import type { TableType } from 'nocodb-sdk' |
||||
import { useVModel } from '#imports' |
||||
|
||||
const props = defineProps<{ |
||||
modelValue: boolean |
||||
table: TableType |
||||
onOk: (jobData: { name: string; id: string }) => Promise<void> |
||||
}>() |
||||
|
||||
const emit = defineEmits(['update:modelValue']) |
||||
|
||||
const { api } = useApi() |
||||
|
||||
const dialogShow = useVModel(props, 'modelValue', emit) |
||||
|
||||
const options = ref({ |
||||
includeData: true, |
||||
includeViews: true, |
||||
includeHooks: true, |
||||
}) |
||||
|
||||
const optionsToExclude = computed(() => { |
||||
const { includeData, includeViews, includeHooks } = options.value |
||||
return { |
||||
excludeData: !includeData, |
||||
excludeViews: !includeViews, |
||||
excludeHooks: !includeHooks, |
||||
} |
||||
}) |
||||
|
||||
const isLoading = ref(false) |
||||
|
||||
const _duplicate = async () => { |
||||
isLoading.value = true |
||||
try { |
||||
const jobData = await api.dbTable.duplicate(props.table.project_id!, props.table.id!, optionsToExclude.value) |
||||
props.onOk(jobData as any) |
||||
} catch (e: any) { |
||||
message.error(await extractSdkResponseErrorMsg(e)) |
||||
} |
||||
isLoading.value = false |
||||
dialogShow.value = false |
||||
} |
||||
|
||||
const isEaster = ref(false) |
||||
</script> |
||||
|
||||
<template> |
||||
<a-modal |
||||
v-model:visible="dialogShow" |
||||
:class="{ active: dialogShow }" |
||||
width="max(30vw, 600px)" |
||||
centered |
||||
wrap-class-name="nc-modal-table-duplicate" |
||||
@keydown.esc="dialogShow = false" |
||||
> |
||||
<template #footer> |
||||
<a-button key="back" size="large" @click="dialogShow = false">{{ $t('general.cancel') }}</a-button> |
||||
|
||||
<a-button key="submit" size="large" type="primary" :loading="isLoading" @click="_duplicate" |
||||
>{{ $t('general.confirm') }} |
||||
</a-button> |
||||
</template> |
||||
|
||||
<div class="pl-10 pr-10 pt-5"> |
||||
<div class="prose-xl font-bold self-center my-4" @dblclick="isEaster = !isEaster">{{ $t('general.duplicate') }}</div> |
||||
|
||||
<div class="mb-2">Are you sure you want to duplicate the `{{ table.title }}` table?</div> |
||||
|
||||
<div class="prose-md self-center text-gray-500 mt-4">{{ $t('title.advancedSettings') }}</div> |
||||
|
||||
<a-divider class="!m-0 !p-0 !my-2" /> |
||||
|
||||
<div class="text-xs p-2"> |
||||
<a-checkbox v-model:checked="options.includeData">Include data</a-checkbox> |
||||
<a-checkbox v-model:checked="options.includeViews">Include views</a-checkbox> |
||||
<a-checkbox v-show="isEaster" v-model:checked="options.includeHooks">Include hooks</a-checkbox> |
||||
</div> |
||||
</div> |
||||
</a-modal> |
||||
</template> |
||||
|
||||
<style scoped lang="scss"></style> |
@ -0,0 +1,100 @@
|
||||
import type { Socket } from 'socket.io-client' |
||||
import io from 'socket.io-client' |
||||
import { JobStatus, defineNuxtPlugin, useGlobal, watch } from '#imports' |
||||
|
||||
export default defineNuxtPlugin(async (nuxtApp) => { |
||||
const { appInfo } = $(useGlobal()) |
||||
|
||||
let socket: Socket | null = null |
||||
let messageIndex = 0 |
||||
|
||||
const init = async (token: string) => { |
||||
try { |
||||
if (socket) socket.disconnect() |
||||
|
||||
const url = new URL(appInfo.ncSiteUrl, window.location.href.split(/[?#]/)[0]) |
||||
|
||||
socket = io(`${url.href}jobs`, { |
||||
extraHeaders: { 'xc-auth': token }, |
||||
}) |
||||
|
||||
socket.on('connect_error', (e) => { |
||||
console.error(e) |
||||
socket?.disconnect() |
||||
}) |
||||
} catch {} |
||||
} |
||||
|
||||
if (nuxtApp.$state.signedIn.value) { |
||||
await init(nuxtApp.$state.token.value) |
||||
} |
||||
|
||||
const send = (name: string, data: any) => { |
||||
if (socket) { |
||||
const _id = messageIndex++ |
||||
socket.emit(name, { _id, data }) |
||||
return _id |
||||
} |
||||
} |
||||
|
||||
const jobs = { |
||||
subscribe( |
||||
job: { id: string; name: string } | any, |
||||
subscribedCb?: () => void, |
||||
statusCb?: (status: JobStatus, data?: any) => void, |
||||
logCb?: (data: { message: string }) => void, |
||||
) { |
||||
const logFn = (data: { id: string; name: string; data: { message: string } }) => { |
||||
if (data.id === job.id) { |
||||
if (logCb) logCb(data.data) |
||||
} |
||||
} |
||||
const statusFn = (data: any) => { |
||||
if (data.id === job.id) { |
||||
if (statusCb) statusCb(data.status, data.data) |
||||
if (data.status === JobStatus.COMPLETED || data.status === JobStatus.FAILED) { |
||||
socket?.off('status', statusFn) |
||||
socket?.off('log', logFn) |
||||
} |
||||
} |
||||
} |
||||
|
||||
const _id = send('subscribe', job) |
||||
|
||||
const subscribeFn = (data: { _id: number; name: string; id: string }) => { |
||||
if (data._id === _id) { |
||||
if (data.id !== job.id || data.name !== job.name) { |
||||
job.id = data.id |
||||
job.name = data.name |
||||
} |
||||
if (subscribedCb) subscribedCb() |
||||
socket?.on('log', logFn) |
||||
socket?.on('status', statusFn) |
||||
socket?.off('subscribed', subscribeFn) |
||||
} |
||||
} |
||||
socket?.on('subscribed', subscribeFn) |
||||
}, |
||||
getStatus(name: string, id: string): Promise<string> { |
||||
return new Promise((resolve) => { |
||||
if (socket) { |
||||
const _id = send('status', { name, id }) |
||||
const tempFn = (data: any) => { |
||||
if (data._id === _id) { |
||||
resolve(data.status) |
||||
socket?.off('status', tempFn) |
||||
} |
||||
} |
||||
socket.on('status', tempFn) |
||||
} |
||||
}) |
||||
}, |
||||
} |
||||
|
||||
watch((nuxtApp.$state as ReturnType<typeof useGlobal>).token, (newToken, oldToken) => { |
||||
if (newToken && newToken !== oldToken) init(newToken) |
||||
else if (!newToken) socket?.disconnect() |
||||
}) |
||||
|
||||
nuxtApp.provide('jobs', jobs) |
||||
}) |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,19 @@
|
||||
import { Router } from 'express'; |
||||
import ncMetaAclMw from '../../meta/helpers/ncMetaAclMw'; |
||||
import { exportService } from '../../services'; |
||||
import type { Request, Response } from 'express'; |
||||
|
||||
export async function exportBase(req: Request, res: Response) { |
||||
res.json( |
||||
await exportService.exportBase({ baseId: req.params.baseId, path: req.body.path }) |
||||
); |
||||
} |
||||
|
||||
const router = Router({ mergeParams: true }); |
||||
|
||||
router.post( |
||||
'/api/v1/db/meta/export/:projectId/:baseId', |
||||
ncMetaAclMw(exportBase, 'exportBase') |
||||
); |
||||
|
||||
export default router; |
@ -0,0 +1,39 @@
|
||||
import { Router } from 'express'; |
||||
import ncMetaAclMw from '../../meta/helpers/ncMetaAclMw'; |
||||
import { importService } from '../../services'; |
||||
import type { Request, Response } from 'express'; |
||||
|
||||
export async function importModels(req: Request, res: Response) { |
||||
const { body, ...rest } = req; |
||||
res.json( |
||||
await importService.importModels({ |
||||
user: (req as any).user, |
||||
projectId: req.params.projectId, |
||||
baseId: req.params.baseId, |
||||
data: Array.isArray(body) ? body : body.models, |
||||
req: rest, |
||||
}) |
||||
); |
||||
} |
||||
|
||||
export async function importBase(req: Request, res: Response) { |
||||
const { body, ...rest } = req; |
||||
res.json( |
||||
await importService.importBase({ |
||||
user: (req as any).user, |
||||
projectId: req.params.projectId, |
||||
baseId: req.params.baseId, |
||||
src: body.src, |
||||
req: rest, |
||||
}) |
||||
); |
||||
} |
||||
|
||||
const router = Router({ mergeParams: true }); |
||||
|
||||
router.post( |
||||
'/api/v1/db/meta/import/:projectId/:baseId', |
||||
ncMetaAclMw(importBase, 'importBase') |
||||
); |
||||
|
||||
export default router; |
@ -0,0 +1,7 @@
|
||||
import exportController from './export.ctl'; |
||||
import importController from './import.ctl'; |
||||
|
||||
export default { |
||||
exportController, |
||||
importController, |
||||
}; |
@ -0,0 +1,493 @@
|
||||
import { NcError } from './../../meta/helpers/catchError'; |
||||
import { UITypes, ViewTypes } from 'nocodb-sdk'; |
||||
import { Project, Base, Model, View, LinkToAnotherRecordColumn } from '../../models'; |
||||
import { dataService } from '..'; |
||||
import { getViewAndModelByAliasOrId } from '../dbData/helpers'; |
||||
import { Readable } from 'stream'; |
||||
import NcPluginMgrv2 from '../../meta/helpers/NcPluginMgrv2'; |
||||
import { unparse } from 'papaparse'; |
||||
import { IStorageAdapterV2 } from 'nc-plugin'; |
||||
|
||||
/* |
||||
{ |
||||
"entity": "project", |
||||
"bases": [ |
||||
### current scope |
||||
{ |
||||
"entity": "base", |
||||
"models": [ |
||||
{ |
||||
"entity": "model", |
||||
"model": {}, |
||||
"views": [] |
||||
} |
||||
] |
||||
} |
||||
### end current scope |
||||
] |
||||
} |
||||
*/ |
||||
|
||||
async function generateBaseIdMap(base: Base, idMap: Map<string, string>) { |
||||
idMap.set(base.project_id, base.project_id); |
||||
idMap.set(base.id, `${base.project_id}::${base.id}`); |
||||
const models = await base.getModels(); |
||||
|
||||
for (const md of models) { |
||||
idMap.set(md.id, `${base.project_id}::${base.id}::${md.id}`); |
||||
await md.getColumns(); |
||||
for (const column of md.columns) { |
||||
idMap.set(column.id, `${idMap.get(md.id)}::${column.id}`); |
||||
} |
||||
} |
||||
|
||||
return models; |
||||
} |
||||
|
||||
async function serializeModels(param: { modelId: string[] }) { |
||||
const serializedModels = []; |
||||
|
||||
// db id to structured id
|
||||
const idMap = new Map<string, string>(); |
||||
|
||||
const projects: Project[] = [] |
||||
const bases: Base[] = [] |
||||
const modelsMap = new Map<string, Model[]>(); |
||||
|
||||
for (const modelId of param.modelId) { |
||||
const model = await Model.get(modelId); |
||||
|
||||
if (!model) return NcError.badRequest(`Model not found for id '${modelId}'`); |
||||
|
||||
const fndProject = projects.find(p => p.id === model.project_id) |
||||
const project = fndProject || await Project.get(model.project_id); |
||||
|
||||
const fndBase = bases.find(b => b.id === model.base_id) |
||||
const base = fndBase || await Base.get(model.base_id); |
||||
|
||||
if (!fndProject) projects.push(project); |
||||
if (!fndBase) bases.push(base); |
||||
|
||||
if (!modelsMap.has(base.id)) { |
||||
modelsMap.set(base.id, await generateBaseIdMap(base, idMap)); |
||||
} |
||||
|
||||
await model.getColumns(); |
||||
await model.getViews(); |
||||
|
||||
for (const column of model.columns) { |
||||
await column.getColOptions(); |
||||
if (column.colOptions) { |
||||
for (const [k, v] of Object.entries(column.colOptions)) { |
||||
switch (k) { |
||||
case 'fk_mm_child_column_id': |
||||
case 'fk_mm_parent_column_id': |
||||
case 'fk_mm_model_id': |
||||
case 'fk_parent_column_id': |
||||
case 'fk_child_column_id': |
||||
case 'fk_related_model_id': |
||||
case 'fk_relation_column_id': |
||||
case 'fk_lookup_column_id': |
||||
case 'fk_rollup_column_id': |
||||
column.colOptions[k] = idMap.get(v as string); |
||||
break; |
||||
case 'options': |
||||
for (const o of column.colOptions['options']) { |
||||
delete o.id; |
||||
delete o.fk_column_id; |
||||
} |
||||
break; |
||||
case 'formula': |
||||
column.colOptions[k] = column.colOptions[k].replace(/(?<=\{\{).*?(?=\}\})/gm, (match) => idMap.get(match)); |
||||
break; |
||||
case 'id': |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_column_id': |
||||
delete column.colOptions[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
for (const view of model.views) { |
||||
idMap.set(view.id, `${idMap.get(model.id)}::${view.id}`); |
||||
await view.getColumns(); |
||||
await view.getFilters(); |
||||
await view.getSorts(); |
||||
if (view.filter) { |
||||
const export_filters = [] |
||||
for (const fl of view.filter.children) { |
||||
const tempFl = { |
||||
id: `${idMap.get(view.id)}::${fl.id}`, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: fl.fk_parent_id, |
||||
is_group: fl.is_group, |
||||
logical_op: fl.logical_op, |
||||
comparison_op: fl.comparison_op, |
||||
comparison_sub_op: fl.comparison_sub_op, |
||||
value: fl.value, |
||||
} |
||||
if (tempFl.is_group) { |
||||
delete tempFl.comparison_op; |
||||
delete tempFl.comparison_sub_op; |
||||
delete tempFl.value; |
||||
} |
||||
export_filters.push(tempFl) |
||||
} |
||||
view.filter.children = export_filters; |
||||
} |
||||
|
||||
if (view.sorts) { |
||||
const export_sorts = [] |
||||
for (const sr of view.sorts) { |
||||
const tempSr = { |
||||
fk_column_id: idMap.get(sr.fk_column_id), |
||||
direction: sr.direction, |
||||
} |
||||
export_sorts.push(tempSr) |
||||
} |
||||
view.sorts = export_sorts; |
||||
} |
||||
|
||||
if (view.view) { |
||||
for (const [k, v] of Object.entries(view.view)) { |
||||
switch (k) { |
||||
case 'fk_column_id': |
||||
case 'fk_cover_image_col_id': |
||||
case 'fk_grp_col_id': |
||||
view.view[k] = idMap.get(v as string); |
||||
break; |
||||
case 'meta': |
||||
if (view.type === ViewTypes.KANBAN) { |
||||
const meta = JSON.parse(view.view.meta as string) as Record<string, any>; |
||||
for (const [k, v] of Object.entries(meta)) { |
||||
const colId = idMap.get(k as string); |
||||
for (const op of v) { |
||||
op.fk_column_id = idMap.get(op.fk_column_id); |
||||
delete op.id; |
||||
} |
||||
meta[colId] = v; |
||||
delete meta[k]; |
||||
} |
||||
view.view.meta = meta; |
||||
} |
||||
break; |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_view_id': |
||||
case 'project_id': |
||||
case 'base_id': |
||||
case 'uuid': |
||||
delete view.view[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
serializedModels.push({ |
||||
entity: 'model', |
||||
model: { |
||||
id: idMap.get(model.id), |
||||
prefix: project.prefix, |
||||
title: model.title, |
||||
table_name: clearPrefix(model.table_name, project.prefix), |
||||
meta: model.meta, |
||||
columns: model.columns.map((column) => ({ |
||||
id: idMap.get(column.id), |
||||
ai: column.ai, |
||||
column_name: column.column_name, |
||||
cc: column.cc, |
||||
cdf: column.cdf, |
||||
meta: column.meta, |
||||
pk: column.pk, |
||||
order: column.order, |
||||
rqd: column.rqd, |
||||
system: column.system, |
||||
uidt: column.uidt, |
||||
title: column.title, |
||||
un: column.un, |
||||
unique: column.unique, |
||||
colOptions: column.colOptions, |
||||
})), |
||||
}, |
||||
views: model.views.map((view) => ({ |
||||
id: idMap.get(view.id), |
||||
is_default: view.is_default, |
||||
type: view.type, |
||||
meta: view.meta, |
||||
order: view.order, |
||||
title: view.title, |
||||
show: view.show, |
||||
show_system_fields: view.show_system_fields, |
||||
filter: view.filter, |
||||
sorts: view.sorts, |
||||
lock_type: view.lock_type, |
||||
columns: view.columns.map((column) => { |
||||
const { |
||||
id, |
||||
fk_view_id, |
||||
fk_column_id, |
||||
project_id, |
||||
base_id, |
||||
created_at, |
||||
updated_at, |
||||
uuid, |
||||
...rest |
||||
} = column as any; |
||||
return { |
||||
fk_column_id: idMap.get(fk_column_id), |
||||
...rest, |
||||
}; |
||||
}), |
||||
view: view.view, |
||||
})), |
||||
}); |
||||
} |
||||
|
||||
return serializedModels; |
||||
} |
||||
|
||||
async function exportModelData(param: { |
||||
storageAdapter: IStorageAdapterV2; |
||||
path: string; |
||||
projectId: string; |
||||
modelId: string; |
||||
viewId?: string; |
||||
}) { |
||||
const { model, view } = await getViewAndModelByAliasOrId({ |
||||
projectName: param.projectId, |
||||
tableName: param.modelId, |
||||
viewName: param.viewId, |
||||
}); |
||||
|
||||
await model.getColumns(); |
||||
|
||||
const hasLink = model.columns.some((c) => c.uidt === UITypes.LinkToAnotherRecord && c.colOptions?.type === 'mm'); |
||||
|
||||
const pkMap = new Map<string, string>(); |
||||
|
||||
for (const column of model.columns.filter((c) => c.uidt === UITypes.LinkToAnotherRecord && c.colOptions?.type !== 'hm')) { |
||||
const relatedTable = await ( |
||||
(await column.getColOptions()) as LinkToAnotherRecordColumn |
||||
).getRelatedTable(); |
||||
|
||||
await relatedTable.getColumns(); |
||||
|
||||
pkMap.set(column.id, relatedTable.primaryKey.title); |
||||
} |
||||
|
||||
const readableStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const readableLinkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
readableStream.setEncoding('utf8'); |
||||
|
||||
readableLinkStream.setEncoding('utf8'); |
||||
|
||||
const storageAdapter = param.storageAdapter; |
||||
|
||||
const uploadPromise = storageAdapter.fileCreateByStream( |
||||
`${param.path}/${model.id}.csv`, |
||||
readableStream |
||||
); |
||||
|
||||
const uploadLinkPromise = hasLink |
||||
? storageAdapter.fileCreateByStream( |
||||
`${param.path}/${model.id}_links.csv`, |
||||
readableLinkStream |
||||
) |
||||
: Promise.resolve(); |
||||
|
||||
const limit = 100; |
||||
let offset = 0; |
||||
|
||||
const primaryKey = model.columns.find((c) => c.pk); |
||||
|
||||
const formatData = (data: any) => { |
||||
const linkData = []; |
||||
for (const row of data) { |
||||
const pkValue = primaryKey ? row[primaryKey.title] : undefined; |
||||
const linkRow = {}; |
||||
for (const [k, v] of Object.entries(row)) { |
||||
const col = model.columns.find((c) => c.title === k); |
||||
if (col) { |
||||
if (col.pk) linkRow['pk'] = pkValue; |
||||
const colId = `${col.project_id}::${col.base_id}::${col.fk_model_id}::${col.id}`; |
||||
switch(col.uidt) { |
||||
case UITypes.LinkToAnotherRecord: |
||||
if (col.system || col.colOptions.type === 'hm') break; |
||||
const pkList = []; |
||||
|
||||
const links = Array.isArray(v) ? v : [v]; |
||||
|
||||
for (const link of links) { |
||||
if (link) { |
||||
for (const [k, val] of Object.entries(link)) { |
||||
if (k === pkMap.get(col.id)) { |
||||
pkList.push(val); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
if (col.colOptions.type === 'mm') { |
||||
linkRow[colId] = pkList.join(','); |
||||
} else { |
||||
row[colId] = pkList[0]; |
||||
} |
||||
break; |
||||
case UITypes.Attachment: |
||||
try { |
||||
row[colId] = JSON.stringify(v); |
||||
} catch (e) { |
||||
row[colId] = v; |
||||
} |
||||
break; |
||||
case UITypes.ForeignKey: |
||||
case UITypes.Formula: |
||||
case UITypes.Lookup: |
||||
case UITypes.Rollup: |
||||
case UITypes.Rating: |
||||
case UITypes.Barcode: |
||||
// skip these types
|
||||
break; |
||||
default: |
||||
row[colId] = v; |
||||
break; |
||||
} |
||||
delete row[k]; |
||||
} |
||||
} |
||||
linkData.push(linkRow); |
||||
} |
||||
return { data, linkData }; |
||||
} |
||||
|
||||
try { |
||||
await recursiveRead(formatData, readableStream, readableLinkStream, model, view, offset, limit, true); |
||||
await uploadPromise; |
||||
await uploadLinkPromise; |
||||
} catch (e) { |
||||
await storageAdapter.fileDelete(`${param.path}/${model.id}.csv`); |
||||
await storageAdapter.fileDelete(`${param.path}/${model.id}_links.csv`); |
||||
console.error(e); |
||||
throw e; |
||||
} |
||||
|
||||
return true; |
||||
} |
||||
|
||||
async function recursiveRead( |
||||
formatter: Function, |
||||
stream: Readable, |
||||
linkStream: Readable, |
||||
model: Model, |
||||
view: View, |
||||
offset: number, |
||||
limit: number, |
||||
header = false |
||||
): Promise<void> { |
||||
return new Promise((resolve, reject) => { |
||||
dataService |
||||
.getDataList({ model, view, query: { limit, offset } }) |
||||
.then((result) => { |
||||
try { |
||||
if (!header) { |
||||
stream.push('\r\n'); |
||||
linkStream.push('\r\n'); |
||||
} |
||||
const { data, linkData } = formatter(result.list); |
||||
stream.push(unparse(data, { header })); |
||||
linkStream.push(unparse(linkData, { header })); |
||||
if (result.pageInfo.isLastPage) { |
||||
stream.push(null); |
||||
linkStream.push(null); |
||||
resolve(); |
||||
} else { |
||||
recursiveRead(formatter, stream, linkStream, model, view, offset + limit, limit).then(resolve); |
||||
} |
||||
} catch (e) { |
||||
reject(e); |
||||
} |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
function clearPrefix(text: string, prefix?: string) { |
||||
if (!prefix || prefix.length === 0) return text; |
||||
return text.replace(new RegExp(`^${prefix}_?`), ''); |
||||
} |
||||
|
||||
export async function exportBaseSchema(param: { baseId: string }) { |
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) return NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const project = await Project.get(base.project_id); |
||||
|
||||
const models = (await base.getModels()).filter((m) => !m.mm && m.type === 'table'); |
||||
|
||||
const exportedModels = await serializeModels({ modelId: models.map(m => m.id) }); |
||||
|
||||
const exportData = { id: `${project.id}::${base.id}`, entity: 'base', models: exportedModels }; |
||||
|
||||
return exportData; |
||||
} |
||||
|
||||
export async function exportBase(param: { path: string; baseId: string }) { |
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) return NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const project = await Project.get(base.project_id); |
||||
|
||||
const models = (await base.getModels()).filter((m) => !m.mm && m.type === 'table'); |
||||
|
||||
const exportedModels = await serializeModels({ modelId: models.map(m => m.id) }); |
||||
|
||||
const exportData = { id: `${project.id}::${base.id}`, entity: 'base', models: exportedModels }; |
||||
|
||||
const storageAdapter = await NcPluginMgrv2.storageAdapter(); |
||||
|
||||
const destPath = `export/${project.id}/${base.id}/${param.path}/schema.json`; |
||||
|
||||
try { |
||||
|
||||
const readableStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
readableStream.setEncoding('utf8'); |
||||
|
||||
readableStream.push(JSON.stringify(exportData)); |
||||
|
||||
readableStream.push(null); |
||||
|
||||
await storageAdapter.fileCreateByStream( |
||||
destPath, |
||||
readableStream |
||||
); |
||||
|
||||
for (const model of models) { |
||||
await exportModelData({ |
||||
storageAdapter, |
||||
path: `export/${project.id}/${base.id}/${param.path}/data`, |
||||
projectId: project.id, |
||||
modelId: model.id, |
||||
}); |
||||
} |
||||
|
||||
} catch (e) { |
||||
console.error(e); |
||||
return NcError.internalServerError('Error while exporting base'); |
||||
} |
||||
|
||||
return true; |
||||
} |
@ -0,0 +1,844 @@
|
||||
import type { ViewCreateReqType } from 'nocodb-sdk'; |
||||
import { UITypes, ViewTypes } from 'nocodb-sdk'; |
||||
import { tableService, gridViewService, filterService, viewColumnService, gridViewColumnService, sortService, formViewService, galleryViewService, kanbanViewService, formViewColumnService, columnService, bulkDataService } from '..'; |
||||
import { NcError } from '../../meta/helpers/catchError'; |
||||
import { Project, Base, User, View, Model, Column, LinkToAnotherRecordColumn } from '../../models'; |
||||
import NcPluginMgrv2 from '../../meta/helpers/NcPluginMgrv2'; |
||||
import papaparse from 'papaparse'; |
||||
|
||||
export async function importModels(param: { |
||||
user: User; |
||||
projectId: string; |
||||
baseId: string; |
||||
data: { models: { model: any; views: any[] }[] } | { model: any; views: any[] }[]; |
||||
req: any; |
||||
}) { |
||||
|
||||
// structured id to db id
|
||||
const idMap = new Map<string, string>(); |
||||
|
||||
const project = await Project.get(param.projectId); |
||||
|
||||
if (!project) return NcError.badRequest(`Project not found for id '${param.projectId}'`); |
||||
|
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) return NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const tableReferences = new Map<string, Model>(); |
||||
const linkMap = new Map<string, string>(); |
||||
|
||||
param.data = Array.isArray(param.data) ? param.data : param.data.models; |
||||
|
||||
// create tables with static columns
|
||||
for (const data of param.data) { |
||||
const modelData = data.model; |
||||
|
||||
const reducedColumnSet = modelData.columns.filter( |
||||
(a) => |
||||
a.uidt !== UITypes.LinkToAnotherRecord && |
||||
a.uidt !== UITypes.Lookup && |
||||
a.uidt !== UITypes.Rollup && |
||||
a.uidt !== UITypes.Formula && |
||||
a.uidt !== UITypes.ForeignKey |
||||
); |
||||
|
||||
// create table with static columns
|
||||
const table = await tableService.tableCreate({ |
||||
projectId: project.id, |
||||
baseId: base.id, |
||||
user: param.user, |
||||
table: withoutId({ |
||||
...modelData, |
||||
columns: reducedColumnSet.map((a) => withoutId(a)), |
||||
}), |
||||
}); |
||||
|
||||
idMap.set(modelData.id, table.id); |
||||
|
||||
// map column id's with new created column id's
|
||||
for (const col of table.columns) { |
||||
const colRef = modelData.columns.find( |
||||
(a) => a.column_name === col.column_name |
||||
); |
||||
idMap.set(colRef.id, col.id); |
||||
} |
||||
|
||||
tableReferences.set(modelData.id, table); |
||||
} |
||||
|
||||
const referencedColumnSet = [] |
||||
|
||||
// create columns with reference to other columns
|
||||
for (const data of param.data) { |
||||
const modelData = data.model; |
||||
const table = tableReferences.get(modelData.id); |
||||
|
||||
const linkedColumnSet = modelData.columns.filter( |
||||
(a) => a.uidt === UITypes.LinkToAnotherRecord |
||||
); |
||||
|
||||
// create columns with reference to other columns
|
||||
for (const col of linkedColumnSet) { |
||||
if (col.colOptions) { |
||||
const colOptions = col.colOptions; |
||||
if (col.uidt === UITypes.LinkToAnotherRecord && idMap.has(colOptions.fk_related_model_id)) { |
||||
if (colOptions.type === 'mm') { |
||||
if (!linkMap.has(colOptions.fk_mm_model_id)) { |
||||
// delete col.column_name as it is not required and will cause ajv error (null for LTAR)
|
||||
delete col.column_name; |
||||
|
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: table.id, |
||||
column: withoutId({ |
||||
...col, |
||||
...{ |
||||
parentId: idMap.get(getParentIdentifier(colOptions.fk_child_column_id)), |
||||
childId: idMap.get(getParentIdentifier(colOptions.fk_parent_column_id)), |
||||
type: colOptions.type, |
||||
virtual: colOptions.virtual, |
||||
ur: colOptions.ur, |
||||
dr: colOptions.dr, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
linkMap.set(colOptions.fk_mm_model_id, nColumn.colOptions.fk_mm_model_id); |
||||
break; |
||||
} |
||||
} |
||||
|
||||
const childModel = getParentIdentifier(colOptions.fk_parent_column_id) === modelData.id ? freshModelData : await Model.get(idMap.get(getParentIdentifier(colOptions.fk_parent_column_id))); |
||||
|
||||
if (getParentIdentifier(colOptions.fk_parent_column_id) !== modelData.id) await childModel.getColumns(); |
||||
|
||||
const childColumn = param.data.find(a => a.model.id === getParentIdentifier(colOptions.fk_parent_column_id)).model.columns.find(a => a.colOptions?.fk_mm_model_id === colOptions.fk_mm_model_id && a.id !== col.id); |
||||
|
||||
for (const nColumn of childModel.columns) { |
||||
if (nColumn?.colOptions?.fk_mm_model_id === linkMap.get(colOptions.fk_mm_model_id) && nColumn.id !== idMap.get(col.id)) { |
||||
idMap.set(childColumn.id, nColumn.id); |
||||
|
||||
if (nColumn.title !== childColumn.title) { |
||||
await columnService.columnUpdate({ |
||||
columnId: nColumn.id, |
||||
column: { |
||||
...nColumn, |
||||
column_name: childColumn.title, |
||||
title: childColumn.title, |
||||
}, |
||||
}); |
||||
} |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} else if (colOptions.type === 'hm') { |
||||
// delete col.column_name as it is not required and will cause ajv error (null for LTAR)
|
||||
delete col.column_name; |
||||
|
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: table.id, |
||||
column: withoutId({ |
||||
...col, |
||||
...{ |
||||
parentId: idMap.get(getParentIdentifier(colOptions.fk_parent_column_id)), |
||||
childId: idMap.get(getParentIdentifier(colOptions.fk_child_column_id)), |
||||
type: colOptions.type, |
||||
virtual: colOptions.virtual, |
||||
ur: colOptions.ur, |
||||
dr: colOptions.dr, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
idMap.set(colOptions.fk_parent_column_id, nColumn.colOptions.fk_parent_column_id); |
||||
idMap.set(colOptions.fk_child_column_id, nColumn.colOptions.fk_child_column_id); |
||||
break; |
||||
} |
||||
} |
||||
|
||||
const childModel = colOptions.fk_related_model_id === modelData.id ? freshModelData : await Model.get(idMap.get(colOptions.fk_related_model_id)); |
||||
|
||||
if (colOptions.fk_related_model_id !== modelData.id) await childModel.getColumns(); |
||||
|
||||
const childColumn = param.data |
||||
.find((a) => a.model.id === colOptions.fk_related_model_id) |
||||
.model.columns.find( |
||||
(a) => |
||||
a.colOptions?.fk_parent_column_id === |
||||
colOptions.fk_parent_column_id && |
||||
a.colOptions?.fk_child_column_id === |
||||
colOptions.fk_child_column_id && |
||||
a.id !== col.id |
||||
); |
||||
|
||||
for (const nColumn of childModel.columns) { |
||||
if (nColumn.id !== idMap.get(col.id) && nColumn.colOptions?.fk_parent_column_id === idMap.get(colOptions.fk_parent_column_id) && nColumn.colOptions?.fk_child_column_id === idMap.get(colOptions.fk_child_column_id)) { |
||||
idMap.set(childColumn.id, nColumn.id); |
||||
|
||||
if (nColumn.title !== childColumn.title) { |
||||
await columnService.columnUpdate({ |
||||
columnId: nColumn.id, |
||||
column: { |
||||
...nColumn, |
||||
column_name: childColumn.title, |
||||
title: childColumn.title, |
||||
}, |
||||
}); |
||||
} |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
referencedColumnSet.push(...modelData.columns.filter( |
||||
(a) => |
||||
a.uidt === UITypes.Lookup || |
||||
a.uidt === UITypes.Rollup || |
||||
a.uidt === UITypes.Formula |
||||
)); |
||||
} |
||||
|
||||
const sortedReferencedColumnSet = []; |
||||
|
||||
// sort referenced columns to avoid referencing before creation
|
||||
for (const col of referencedColumnSet) { |
||||
const relatedColIds = []; |
||||
if (col.colOptions?.fk_lookup_column_id) { |
||||
relatedColIds.push(col.colOptions.fk_lookup_column_id); |
||||
} |
||||
if (col.colOptions?.fk_rollup_column_id) { |
||||
relatedColIds.push(col.colOptions.fk_rollup_column_id); |
||||
} |
||||
if (col.colOptions?.formula) { |
||||
relatedColIds.push(...col.colOptions.formula.match(/(?<=\{\{).*?(?=\}\})/gm)); |
||||
} |
||||
|
||||
// find the last related column in the sorted array
|
||||
let fnd = undefined; |
||||
for (let i = sortedReferencedColumnSet.length - 1; i >= 0; i--) { |
||||
if (relatedColIds.includes(sortedReferencedColumnSet[i].id)) { |
||||
fnd = sortedReferencedColumnSet[i]; |
||||
break; |
||||
} |
||||
} |
||||
|
||||
if (!fnd) { |
||||
sortedReferencedColumnSet.unshift(col); |
||||
} else { |
||||
sortedReferencedColumnSet.splice(sortedReferencedColumnSet.indexOf(fnd) + 1, 0, col); |
||||
} |
||||
} |
||||
|
||||
// create referenced columns
|
||||
for (const col of sortedReferencedColumnSet) { |
||||
const { colOptions, ...flatCol } = col; |
||||
if (col.uidt === UITypes.Lookup) { |
||||
if (!idMap.get(colOptions.fk_relation_column_id)) continue; |
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: idMap.get(getParentIdentifier(col.id)), |
||||
column: withoutId({ |
||||
...flatCol, |
||||
...{ |
||||
fk_lookup_column_id: idMap.get(colOptions.fk_lookup_column_id), |
||||
fk_relation_column_id: idMap.get(colOptions.fk_relation_column_id), |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
break; |
||||
} |
||||
} |
||||
} else if (col.uidt === UITypes.Rollup) { |
||||
if (!idMap.get(colOptions.fk_relation_column_id)) continue; |
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: idMap.get(getParentIdentifier(col.id)), |
||||
column: withoutId({ |
||||
...flatCol, |
||||
...{ |
||||
fk_rollup_column_id: idMap.get(colOptions.fk_rollup_column_id), |
||||
fk_relation_column_id: idMap.get(colOptions.fk_relation_column_id), |
||||
rollup_function: colOptions.rollup_function, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
break; |
||||
} |
||||
} |
||||
} else if (col.uidt === UITypes.Formula) { |
||||
const freshModelData = await columnService.columnAdd({ |
||||
tableId: idMap.get(getParentIdentifier(col.id)), |
||||
column: withoutId({ |
||||
...flatCol, |
||||
...{ |
||||
formula_raw: colOptions.formula_raw, |
||||
}, |
||||
}), |
||||
req: param.req, |
||||
}); |
||||
|
||||
for (const nColumn of freshModelData.columns) { |
||||
if (nColumn.title === col.title) { |
||||
idMap.set(col.id, nColumn.id); |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
// create views
|
||||
for (const data of param.data) { |
||||
const modelData = data.model; |
||||
const viewsData = data.views; |
||||
|
||||
const table = tableReferences.get(modelData.id); |
||||
|
||||
// get default view
|
||||
await table.getViews(); |
||||
|
||||
for (const view of viewsData) { |
||||
const viewData = withoutId({ |
||||
...view, |
||||
}); |
||||
|
||||
const vw = await createView(idMap, table, viewData, table.views); |
||||
|
||||
if (!vw) continue; |
||||
|
||||
idMap.set(view.id, vw.id); |
||||
|
||||
// create filters
|
||||
const filters = view.filter.children; |
||||
|
||||
for (const fl of filters) { |
||||
const fg = await filterService.filterCreate({ |
||||
viewId: vw.id, |
||||
filter: withoutId({ |
||||
...fl, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: idMap.get(fl.fk_parent_id), |
||||
}), |
||||
}); |
||||
|
||||
idMap.set(fl.id, fg.id); |
||||
} |
||||
|
||||
// create sorts
|
||||
for (const sr of view.sorts) { |
||||
await sortService.sortCreate({ |
||||
viewId: vw.id, |
||||
sort: withoutId({ |
||||
...sr, |
||||
fk_column_id: idMap.get(sr.fk_column_id), |
||||
}), |
||||
}) |
||||
} |
||||
|
||||
// update view columns
|
||||
const vwColumns = await viewColumnService.columnList({ viewId: vw.id }) |
||||
|
||||
for (const cl of vwColumns) { |
||||
const fcl = view.columns.find(a => a.fk_column_id === reverseGet(idMap, cl.fk_column_id)) |
||||
if (!fcl) continue; |
||||
await viewColumnService.columnUpdate({ |
||||
viewId: vw.id, |
||||
columnId: cl.id, |
||||
column: { |
||||
show: fcl.show, |
||||
order: fcl.order, |
||||
}, |
||||
}) |
||||
} |
||||
|
||||
switch (vw.type) { |
||||
case ViewTypes.GRID: |
||||
for (const cl of vwColumns) { |
||||
const fcl = view.columns.find(a => a.fk_column_id === reverseGet(idMap, cl.fk_column_id)) |
||||
if (!fcl) continue; |
||||
const { fk_column_id, ...rest } = fcl; |
||||
await gridViewColumnService.gridColumnUpdate({ |
||||
gridViewColumnId: cl.id, |
||||
grid: { |
||||
...withoutNull(rest), |
||||
}, |
||||
}) |
||||
} |
||||
break; |
||||
case ViewTypes.FORM: |
||||
for (const cl of vwColumns) { |
||||
const fcl = view.columns.find(a => a.fk_column_id === reverseGet(idMap, cl.fk_column_id)) |
||||
if (!fcl) continue; |
||||
const { fk_column_id, ...rest } = fcl; |
||||
await formViewColumnService.columnUpdate({ |
||||
formViewColumnId: cl.id, |
||||
formViewColumn: { |
||||
...withoutNull(rest), |
||||
}, |
||||
}) |
||||
} |
||||
break; |
||||
case ViewTypes.GALLERY: |
||||
case ViewTypes.KANBAN: |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return idMap; |
||||
} |
||||
|
||||
async function createView(idMap: Map<string, string>, md: Model, vw: Partial<View>, views: View[]): Promise<View> { |
||||
if (vw.is_default) { |
||||
const view = views.find((a) => a.is_default); |
||||
if (view) { |
||||
const gridData = withoutNull(vw.view); |
||||
if (gridData) { |
||||
await gridViewService.gridViewUpdate({ |
||||
viewId: view.id, |
||||
grid: gridData, |
||||
}); |
||||
} |
||||
} |
||||
return view; |
||||
} |
||||
|
||||
switch (vw.type) { |
||||
case ViewTypes.GRID: |
||||
const gview = await gridViewService.gridViewCreate({ |
||||
tableId: md.id, |
||||
grid: vw as ViewCreateReqType, |
||||
}); |
||||
const gridData = withoutNull(vw.view); |
||||
if (gridData) { |
||||
await gridViewService.gridViewUpdate({ |
||||
viewId: gview.id, |
||||
grid: gridData, |
||||
}); |
||||
} |
||||
return gview; |
||||
case ViewTypes.FORM: |
||||
const fview = await formViewService.formViewCreate({ |
||||
tableId: md.id, |
||||
body: vw as ViewCreateReqType, |
||||
}); |
||||
const formData = withoutNull(vw.view); |
||||
if (formData) { |
||||
await formViewService.formViewUpdate({ |
||||
formViewId: fview.id, |
||||
form: formData, |
||||
}); |
||||
} |
||||
return fview; |
||||
case ViewTypes.GALLERY: |
||||
const glview = await galleryViewService.galleryViewCreate({ |
||||
tableId: md.id, |
||||
gallery: vw as ViewCreateReqType, |
||||
}); |
||||
const galleryData = withoutNull(vw.view); |
||||
if (galleryData) { |
||||
for (const [k, v] of Object.entries(galleryData)) { |
||||
switch (k) { |
||||
case 'fk_cover_image_col_id': |
||||
galleryData[k] = idMap.get(v as string); |
||||
break; |
||||
} |
||||
} |
||||
await galleryViewService.galleryViewUpdate({ |
||||
galleryViewId: glview.id, |
||||
gallery: galleryData, |
||||
}); |
||||
} |
||||
return glview; |
||||
case ViewTypes.KANBAN: |
||||
const kview = await kanbanViewService.kanbanViewCreate({ |
||||
tableId: md.id, |
||||
kanban: vw as ViewCreateReqType, |
||||
}); |
||||
const kanbanData = withoutNull(vw.view); |
||||
if (kanbanData) { |
||||
for (const [k, v] of Object.entries(kanbanData)) { |
||||
switch (k) { |
||||
case 'fk_grp_col_id': |
||||
case 'fk_cover_image_col_id': |
||||
kanbanData[k] = idMap.get(v as string); |
||||
break; |
||||
case 'meta': |
||||
const meta = {}; |
||||
for (const [mk, mv] of Object.entries(v as any)) { |
||||
const tempVal = []; |
||||
for (const vl of mv as any) { |
||||
if (vl.fk_column_id) { |
||||
tempVal.push({ |
||||
...vl, |
||||
fk_column_id: idMap.get(vl.fk_column_id), |
||||
}); |
||||
} else { |
||||
delete vl.fk_column_id; |
||||
tempVal.push({ |
||||
...vl, |
||||
id: "uncategorized", |
||||
}); |
||||
} |
||||
} |
||||
meta[idMap.get(mk)] = tempVal; |
||||
} |
||||
kanbanData[k] = meta; |
||||
break; |
||||
} |
||||
} |
||||
await kanbanViewService.kanbanViewUpdate({ |
||||
kanbanViewId: kview.id, |
||||
kanban: kanbanData, |
||||
}); |
||||
} |
||||
return kview; |
||||
} |
||||
|
||||
return null |
||||
} |
||||
|
||||
function withoutNull(obj: any) { |
||||
const newObj = {}; |
||||
let found = false; |
||||
for (const [key, value] of Object.entries(obj)) { |
||||
if (value !== null) { |
||||
newObj[key] = value; |
||||
found = true; |
||||
} |
||||
} |
||||
if (!found) return null; |
||||
return newObj; |
||||
} |
||||
|
||||
function reverseGet(map: Map<string, string>, vl: string) { |
||||
for (const [key, value] of map.entries()) { |
||||
if (vl === value) { |
||||
return key; |
||||
} |
||||
} |
||||
return undefined |
||||
} |
||||
|
||||
function withoutId(obj: any) { |
||||
const { id, ...rest } = obj; |
||||
return rest; |
||||
} |
||||
|
||||
function getParentIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
arr.pop(); |
||||
return arr.join('::'); |
||||
} |
||||
|
||||
function getEntityIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
return arr.pop(); |
||||
} |
||||
|
||||
function findWithIdentifier(map: Map<string, any>, id: string) { |
||||
for (const key of map.keys()) { |
||||
if (getEntityIdentifier(key) === id) { |
||||
return map.get(key); |
||||
} |
||||
} |
||||
return undefined; |
||||
} |
||||
|
||||
export async function importBase(param: { |
||||
user: User; |
||||
projectId: string; |
||||
baseId: string; |
||||
src: { type: 'local' | 'url' | 'file'; path?: string; url?: string; file?: any }; |
||||
req: any; |
||||
}) { |
||||
const { user, projectId, baseId, src, req } = param; |
||||
|
||||
const debug = req.params.debug === 'true'; |
||||
|
||||
const debugLog = (...args: any[]) => { |
||||
if (!debug) return; |
||||
console.log(...args); |
||||
} |
||||
|
||||
let start = process.hrtime(); |
||||
|
||||
let elapsedTime = function(label?: string){ |
||||
const elapsedS = (process.hrtime(start)[0]).toFixed(3); |
||||
const elapsedMs = process.hrtime(start)[1] / 1000000; |
||||
if (label) debugLog(`${label}: ${elapsedS}s ${elapsedMs}ms`); |
||||
start = process.hrtime(); |
||||
} |
||||
|
||||
switch (src.type) { |
||||
case 'local': |
||||
const path = src.path.replace(/\/$/, ''); |
||||
|
||||
const storageAdapter = await NcPluginMgrv2.storageAdapter(); |
||||
|
||||
try { |
||||
const schema = JSON.parse(await storageAdapter.fileRead(`${path}/schema.json`)); |
||||
|
||||
elapsedTime('read schema'); |
||||
|
||||
// store fk_mm_model_id (mm) to link once
|
||||
const handledLinks = []; |
||||
|
||||
const idMap = await importModels({ |
||||
user, |
||||
projectId, |
||||
baseId, |
||||
data: schema, |
||||
req, |
||||
}); |
||||
|
||||
elapsedTime('import models'); |
||||
|
||||
if (idMap) { |
||||
const files = await storageAdapter.getDirectoryList(`${path}/data`); |
||||
const dataFiles = files.filter((file) => !file.match(/_links\.csv$/)); |
||||
const linkFiles = files.filter((file) => file.match(/_links\.csv$/)); |
||||
|
||||
for (const file of dataFiles) { |
||||
const readStream = await storageAdapter.fileReadByStream( |
||||
`${path}/data/${file}` |
||||
); |
||||
|
||||
const headers: string[] = []; |
||||
let chunk = []; |
||||
|
||||
const modelId = findWithIdentifier( |
||||
idMap, |
||||
file.replace(/\.csv$/, '') |
||||
); |
||||
|
||||
const model = await Model.get(modelId); |
||||
|
||||
debugLog(`Importing ${model.title}...`); |
||||
|
||||
await new Promise(async (resolve) => { |
||||
papaparse.parse(readStream, { |
||||
newline: '\r\n', |
||||
step: async function (results, parser) { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: baseId, |
||||
colId: id, |
||||
}); |
||||
if (col.colOptions?.type === 'bt') { |
||||
const childCol = await Column.get({ |
||||
base_id: baseId, |
||||
colId: col.colOptions.fk_child_column_id, |
||||
}); |
||||
headers.push(childCol.column_name); |
||||
} else { |
||||
headers.push(col.column_name); |
||||
} |
||||
} else { |
||||
debugLog(header); |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
const row = {}; |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (results.data[i] !== '') { |
||||
row[headers[i]] = results.data[i]; |
||||
} |
||||
} |
||||
chunk.push(row); |
||||
if (chunk.length > 100) { |
||||
parser.pause(); |
||||
elapsedTime('before import chunk'); |
||||
try { |
||||
await bulkDataService.bulkDataInsert({ |
||||
projectName: projectId, |
||||
tableName: modelId, |
||||
body: chunk, |
||||
cookie: null, |
||||
chunkSize: chunk.length + 1, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
debugLog(`${model.title} import throwed an error!`); |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
elapsedTime('after import chunk'); |
||||
parser.resume(); |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async function () { |
||||
if (chunk.length > 0) { |
||||
elapsedTime('before import chunk'); |
||||
try { |
||||
await bulkDataService.bulkDataInsert({ |
||||
projectName: projectId, |
||||
tableName: modelId, |
||||
body: chunk, |
||||
cookie: null, |
||||
chunkSize: chunk.length + 1, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
debugLog(chunk); |
||||
console.log(e); |
||||
} |
||||
chunk = []; |
||||
elapsedTime('after import chunk'); |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
// reset timer
|
||||
elapsedTime(); |
||||
|
||||
for (const file of linkFiles) { |
||||
const readStream = await storageAdapter.fileReadByStream( |
||||
`${path}/data/${file}` |
||||
); |
||||
|
||||
const headers: string[] = []; |
||||
const mmParentChild: any = {}; |
||||
let chunk: Record<string, any[]> = {}; // colId: { rowId, childId }[]
|
||||
|
||||
const modelId = findWithIdentifier( |
||||
idMap, |
||||
file.replace(/_links\.csv$/, '') |
||||
); |
||||
const model = await Model.get(modelId); |
||||
|
||||
let pkIndex = -1; |
||||
|
||||
debugLog(`Linking ${model.title}...`); |
||||
|
||||
await new Promise(async (resolve) => { |
||||
papaparse.parse(readStream, { |
||||
newline: '\r\n', |
||||
step: async function (results, parser) { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
if (header === 'pk') { |
||||
headers.push(null); |
||||
pkIndex = headers.length - 1; |
||||
continue; |
||||
} |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: baseId, |
||||
colId: id, |
||||
}); |
||||
if ( |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions.fk_mm_model_id && |
||||
handledLinks.includes(col.colOptions.fk_mm_model_id) |
||||
) { |
||||
headers.push(null); |
||||
} else { |
||||
if ( |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions.fk_mm_model_id && |
||||
!handledLinks.includes( |
||||
col.colOptions.fk_mm_model_id |
||||
) |
||||
) { |
||||
const colOptions = await col.getColOptions<LinkToAnotherRecordColumn>(); |
||||
|
||||
const vChildCol = await colOptions.getMMChildColumn(); |
||||
const vParentCol = await colOptions.getMMParentColumn(); |
||||
|
||||
mmParentChild[col.colOptions.fk_mm_model_id] = { |
||||
parent: vParentCol.column_name, |
||||
child: vChildCol.column_name, |
||||
} |
||||
|
||||
handledLinks.push(col.colOptions.fk_mm_model_id); |
||||
} |
||||
headers.push(col.colOptions.fk_mm_model_id); |
||||
chunk[col.colOptions.fk_mm_model_id] = [] |
||||
} |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (!headers[i]) continue; |
||||
|
||||
const mm = mmParentChild[headers[i]]; |
||||
|
||||
for (const rel of results.data[i].split(',')) { |
||||
if (rel.trim() === '') continue; |
||||
chunk[headers[i]].push({ [mm.parent]: rel, [mm.child]: results.data[pkIndex] }); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async function () { |
||||
for (const [k, v] of Object.entries(chunk)) { |
||||
try { |
||||
elapsedTime('prepare link chunk'); |
||||
await bulkDataService.bulkDataInsert({ |
||||
projectName: projectId, |
||||
tableName: k, |
||||
body: v, |
||||
cookie: null, |
||||
chunkSize: 1000, |
||||
foreign_key_checks: false, |
||||
raw: true, |
||||
}); |
||||
elapsedTime('insert link chunk'); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
} |
||||
} |
||||
} catch (e) { |
||||
throw new Error(e); |
||||
} |
||||
break; |
||||
case 'url': |
||||
break; |
||||
case 'file': |
||||
break; |
||||
} |
||||
} |
@ -1,6 +0,0 @@
|
||||
export abstract class NocoSyncSourceAdapter { |
||||
public abstract init(): Promise<void>; |
||||
public abstract destProjectWrite(): Promise<any>; |
||||
public abstract destSchemaWrite(): Promise<any>; |
||||
public abstract destDataWrite(): Promise<any>; |
||||
} |
@ -1,7 +0,0 @@
|
||||
export abstract class NocoSyncSourceAdapter { |
||||
public abstract init(): Promise<void>; |
||||
public abstract srcSchemaGet(): Promise<any>; |
||||
public abstract srcDataLoad(): Promise<any>; |
||||
public abstract srcDataListen(): Promise<any>; |
||||
public abstract srcDataPoll(): Promise<any>; |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -1,21 +0,0 @@
|
||||
import { Test } from '@nestjs/testing'; |
||||
import { ImportService } from '../../services/import.service'; |
||||
import { ImportController } from './import.controller'; |
||||
import type { TestingModule } from '@nestjs/testing'; |
||||
|
||||
describe('ImportController', () => { |
||||
let controller: ImportController; |
||||
|
||||
beforeEach(async () => { |
||||
const module: TestingModule = await Test.createTestingModule({ |
||||
controllers: [ImportController], |
||||
providers: [ImportService], |
||||
}).compile(); |
||||
|
||||
controller = module.get<ImportController>(ImportController); |
||||
}); |
||||
|
||||
it('should be defined', () => { |
||||
expect(controller).toBeDefined(); |
||||
}); |
||||
}); |
@ -1,148 +0,0 @@
|
||||
import { Controller, HttpCode, Post, Request, UseGuards } from '@nestjs/common'; |
||||
import { forwardRef, Inject } from '@nestjs/common'; |
||||
import { ModuleRef } from '@nestjs/core'; |
||||
import { GlobalGuard } from '../../guards/global/global.guard'; |
||||
import { NcError } from '../../helpers/catchError'; |
||||
import { ExtractProjectIdMiddleware } from '../../middlewares/extract-project-id/extract-project-id.middleware'; |
||||
import { SyncSource } from '../../models'; |
||||
import NocoJobs from '../../jobs/NocoJobs'; |
||||
import { SocketService } from '../../services/socket.service'; |
||||
import airtableSyncJob from './helpers/job'; |
||||
import type { AirtableSyncConfig } from './helpers/job'; |
||||
|
||||
import type { Server } from 'socket.io'; |
||||
|
||||
const AIRTABLE_IMPORT_JOB = 'AIRTABLE_IMPORT_JOB'; |
||||
const AIRTABLE_PROGRESS_JOB = 'AIRTABLE_PROGRESS_JOB'; |
||||
|
||||
enum SyncStatus { |
||||
PROGRESS = 'PROGRESS', |
||||
COMPLETED = 'COMPLETED', |
||||
FAILED = 'FAILED', |
||||
} |
||||
|
||||
const initJob = (sv: Server, jobs: { [p: string]: { last_message: any } }) => { |
||||
// add importer job handler and progress notification job handler
|
||||
NocoJobs.jobsMgr.addJobWorker(AIRTABLE_IMPORT_JOB, airtableSyncJob); |
||||
NocoJobs.jobsMgr.addJobWorker( |
||||
AIRTABLE_PROGRESS_JOB, |
||||
({ payload, progress }) => { |
||||
sv.to(payload?.id).emit('progress', { |
||||
msg: progress?.msg, |
||||
level: progress?.level, |
||||
status: progress?.status, |
||||
}); |
||||
|
||||
if (payload?.id in jobs) { |
||||
jobs[payload?.id].last_message = { |
||||
msg: progress?.msg, |
||||
level: progress?.level, |
||||
status: progress?.status, |
||||
}; |
||||
} |
||||
}, |
||||
); |
||||
|
||||
NocoJobs.jobsMgr.addProgressCbk(AIRTABLE_IMPORT_JOB, (payload, progress) => { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_PROGRESS_JOB, { |
||||
payload, |
||||
progress: { |
||||
msg: progress?.msg, |
||||
level: progress?.level, |
||||
status: progress?.status, |
||||
}, |
||||
}); |
||||
}); |
||||
NocoJobs.jobsMgr.addSuccessCbk(AIRTABLE_IMPORT_JOB, (payload) => { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_PROGRESS_JOB, { |
||||
payload, |
||||
progress: { |
||||
msg: 'Complete!', |
||||
status: SyncStatus.COMPLETED, |
||||
}, |
||||
}); |
||||
delete jobs[payload?.id]; |
||||
}); |
||||
NocoJobs.jobsMgr.addFailureCbk(AIRTABLE_IMPORT_JOB, (payload, error: any) => { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_PROGRESS_JOB, { |
||||
payload, |
||||
progress: { |
||||
msg: error?.message || 'Failed due to some internal error', |
||||
status: SyncStatus.FAILED, |
||||
}, |
||||
}); |
||||
delete jobs[payload?.id]; |
||||
}); |
||||
}; |
||||
@Controller() |
||||
@UseGuards(ExtractProjectIdMiddleware, GlobalGuard) |
||||
export class ImportController { |
||||
constructor( |
||||
private readonly socketService: SocketService, |
||||
@Inject(forwardRef(() => ModuleRef)) private readonly moduleRef: ModuleRef, |
||||
) {} |
||||
|
||||
@Post('/api/v1/db/meta/import/airtable') |
||||
@HttpCode(200) |
||||
importAirtable(@Request() req) { |
||||
NocoJobs.jobsMgr.add(AIRTABLE_IMPORT_JOB, { |
||||
id: req.query.id, |
||||
...req.body, |
||||
}); |
||||
return {}; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/trigger') |
||||
@HttpCode(200) |
||||
async triggerSync(@Request() req) { |
||||
if (req.params.syncId in this.socketService.jobs) { |
||||
NcError.badRequest('Sync already in progress'); |
||||
} |
||||
|
||||
const syncSource = await SyncSource.get(req.params.syncId); |
||||
|
||||
const user = await syncSource.getUser(); |
||||
|
||||
// Treat default baseUrl as siteUrl from req object
|
||||
let baseURL = (req as any).ncSiteUrl; |
||||
|
||||
// if environment value avail use it
|
||||
// or if it's docker construct using `PORT`
|
||||
if (process.env.NC_DOCKER) { |
||||
baseURL = `http://localhost:${process.env.PORT || 8080}`; |
||||
} |
||||
|
||||
setTimeout(() => { |
||||
NocoJobs.jobsMgr.add<AirtableSyncConfig>(AIRTABLE_IMPORT_JOB, { |
||||
id: req.params.syncId, |
||||
...(syncSource?.details || {}), |
||||
projectId: syncSource.project_id, |
||||
baseId: syncSource.base_id, |
||||
authToken: '', |
||||
baseURL, |
||||
user: user, |
||||
moduleRef: this.moduleRef, |
||||
}); |
||||
}, 1000); |
||||
|
||||
this.socketService.jobs[req.params.syncId] = { |
||||
last_message: { |
||||
msg: 'Sync started', |
||||
}, |
||||
}; |
||||
return {}; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/abort') |
||||
@HttpCode(200) |
||||
async abortImport(@Request() req) { |
||||
if (req.params.syncId in this.socketService.jobs) { |
||||
delete this.socketService.jobs[req.params.syncId]; |
||||
} |
||||
return {}; |
||||
} |
||||
|
||||
async onModuleInit() { |
||||
initJob(this.socketService.io, this.socketService.jobs); |
||||
} |
||||
} |
@ -1,19 +1,19 @@
|
||||
import { Test } from '@nestjs/testing'; |
||||
import { SocketService } from './socket.service'; |
||||
import { SocketGateway } from './socket.gateway'; |
||||
import type { TestingModule } from '@nestjs/testing'; |
||||
|
||||
describe('ClientService', () => { |
||||
let service: SocketService; |
||||
describe('SocketGateway', () => { |
||||
let gateway: SocketGateway; |
||||
|
||||
beforeEach(async () => { |
||||
const module: TestingModule = await Test.createTestingModule({ |
||||
providers: [SocketService], |
||||
providers: [SocketGateway], |
||||
}).compile(); |
||||
|
||||
service = module.get<SocketService>(SocketService); |
||||
gateway = module.get<SocketGateway>(SocketGateway); |
||||
}); |
||||
|
||||
it('should be defined', () => { |
||||
expect(service).toBeDefined(); |
||||
expect(gateway).toBeDefined(); |
||||
}); |
||||
}); |
@ -0,0 +1,82 @@
|
||||
import type { Base } from '../models'; |
||||
|
||||
export async function generateBaseIdMap( |
||||
base: Base, |
||||
idMap: Map<string, string>, |
||||
) { |
||||
idMap.set(base.project_id, base.project_id); |
||||
idMap.set(base.id, `${base.project_id}::${base.id}`); |
||||
const models = await base.getModels(); |
||||
|
||||
for (const md of models) { |
||||
idMap.set(md.id, `${base.project_id}::${base.id}::${md.id}`); |
||||
await md.getColumns(); |
||||
for (const column of md.columns) { |
||||
idMap.set(column.id, `${idMap.get(md.id)}::${column.id}`); |
||||
} |
||||
} |
||||
|
||||
return models; |
||||
} |
||||
|
||||
export function clearPrefix(text: string, prefix?: string) { |
||||
if (!prefix || prefix.length === 0) return text; |
||||
return text.replace(new RegExp(`^${prefix}_?`), ''); |
||||
} |
||||
|
||||
export function withoutNull(obj: any) { |
||||
const newObj = {}; |
||||
let found = false; |
||||
for (const [key, value] of Object.entries(obj)) { |
||||
if (value !== null) { |
||||
newObj[key] = value; |
||||
found = true; |
||||
} |
||||
} |
||||
if (!found) return null; |
||||
return newObj; |
||||
} |
||||
|
||||
export function reverseGet(map: Map<string, string>, vl: string) { |
||||
for (const [key, value] of map.entries()) { |
||||
if (vl === value) { |
||||
return key; |
||||
} |
||||
} |
||||
return undefined; |
||||
} |
||||
|
||||
export function withoutId(obj: any) { |
||||
const { id, ...rest } = obj; |
||||
return rest; |
||||
} |
||||
|
||||
export function getParentIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
arr.pop(); |
||||
return arr.join('::'); |
||||
} |
||||
|
||||
export function getEntityIdentifier(id: string) { |
||||
const arr = id.split('::'); |
||||
return arr.pop(); |
||||
} |
||||
|
||||
export function findWithIdentifier(map: Map<string, any>, id: string) { |
||||
for (const key of map.keys()) { |
||||
if (getEntityIdentifier(key) === id) { |
||||
return map.get(key); |
||||
} |
||||
} |
||||
return undefined; |
||||
} |
||||
|
||||
export function generateUniqueName(name: string, names: string[]) { |
||||
let newName = name; |
||||
let i = 1; |
||||
while (names.includes(newName)) { |
||||
newName = `${name}_${i}`; |
||||
i++; |
||||
} |
||||
return newName; |
||||
} |
@ -0,0 +1,22 @@
|
||||
export const JOBS_QUEUE = 'jobs'; |
||||
|
||||
export enum JobTypes { |
||||
DuplicateBase = 'duplicate-base', |
||||
DuplicateModel = 'duplicate-model', |
||||
AtImport = 'at-import', |
||||
} |
||||
|
||||
export enum JobStatus { |
||||
COMPLETED = 'completed', |
||||
WAITING = 'waiting', |
||||
ACTIVE = 'active', |
||||
DELAYED = 'delayed', |
||||
FAILED = 'failed', |
||||
PAUSED = 'paused', |
||||
REFRESH = 'refresh', |
||||
} |
||||
|
||||
export enum JobEvents { |
||||
STATUS = 'job.status', |
||||
LOG = 'job.log', |
||||
} |
@ -1,35 +0,0 @@
|
||||
import Emittery from 'emittery'; |
||||
import JobsMgr from './JobsMgr'; |
||||
|
||||
export default class EmitteryJobsMgr extends JobsMgr { |
||||
emitter: Emittery; |
||||
|
||||
constructor() { |
||||
super(); |
||||
this.emitter = new Emittery(); |
||||
} |
||||
|
||||
add(jobName: string, payload: any): Promise<any> { |
||||
return this.emitter.emit(jobName, payload); |
||||
} |
||||
|
||||
addJobWorker( |
||||
jobName: string, |
||||
workerFn: ( |
||||
payload: any, |
||||
progressCbk?: (payload: any, msg?: string) => void, |
||||
) => void, |
||||
) { |
||||
this.emitter.on(jobName, async (payload) => { |
||||
try { |
||||
await workerFn(payload, (msg) => |
||||
this.invokeProgressCbks(jobName, payload, msg), |
||||
); |
||||
await this.invokeSuccessCbks(jobName, payload); |
||||
} catch (e) { |
||||
console.log(e); |
||||
await this.invokeFailureCbks(jobName, payload, e); |
||||
} |
||||
}); |
||||
} |
||||
} |
@ -1,67 +0,0 @@
|
||||
export default abstract class JobsMgr { |
||||
protected successCbks: Array<{ |
||||
[jobName: string]: (payload: any) => void; |
||||
}> = []; |
||||
protected failureCbks: Array<{ |
||||
[jobName: string]: (payload: any, error: Error) => void; |
||||
}> = []; |
||||
protected progressCbks: Array<{ |
||||
[jobName: string]: (payload: any, msg?: string) => void; |
||||
}> = []; |
||||
|
||||
public abstract add<T>(jobName: string, payload: T): Promise<any>; |
||||
|
||||
public abstract addJobWorker( |
||||
jobName: string, |
||||
workerFn: ( |
||||
payload: any, |
||||
progressCbk?: (payload: any, msg?: string) => void, |
||||
) => void, |
||||
options?: { |
||||
onSuccess?: (payload: any) => void; |
||||
onFailure?: (payload: any, errorData: any) => void; |
||||
onProgress?: (payload: any, progressData: any) => void; |
||||
}, |
||||
); |
||||
|
||||
addSuccessCbk(jobName: string, cbk: (payload: any) => void) { |
||||
this.successCbks[jobName] = this.successCbks[jobName] || []; |
||||
this.successCbks[jobName].push(cbk); |
||||
} |
||||
|
||||
addFailureCbk(jobName: string, cbk: (payload: any, errorData: any) => void) { |
||||
this.failureCbks[jobName] = this.failureCbks[jobName] || []; |
||||
this.failureCbks[jobName].push(cbk); |
||||
} |
||||
addProgressCbk( |
||||
jobName: string, |
||||
cbk: (payload: any, progressData: any) => void, |
||||
) { |
||||
this.progressCbks[jobName] = this.progressCbks[jobName] || []; |
||||
this.progressCbks[jobName].push(cbk); |
||||
} |
||||
|
||||
protected async invokeSuccessCbks(jobName: string, payload: any) { |
||||
await Promise.all( |
||||
this.successCbks?.[jobName]?.map((cb) => cb(payload)) || [], |
||||
); |
||||
} |
||||
protected async invokeFailureCbks( |
||||
jobName: string, |
||||
payload: any, |
||||
error?: Error, |
||||
) { |
||||
await Promise.all( |
||||
this.failureCbks?.[jobName]?.map((cb) => cb(payload, error)) || [], |
||||
); |
||||
} |
||||
protected async invokeProgressCbks( |
||||
jobName: string, |
||||
payload: any, |
||||
data?: any, |
||||
) { |
||||
await Promise.all( |
||||
this.progressCbks?.[jobName]?.map((cb) => cb(payload, data)) || [], |
||||
); |
||||
} |
||||
} |
@ -1,20 +0,0 @@
|
||||
import EmitteryJobsMgr from './EmitteryJobsMgr'; |
||||
import RedisJobsMgr from './RedisJobsMgr'; |
||||
import type JobsMgr from './JobsMgr'; |
||||
|
||||
export default class NocoJobs { |
||||
private static client: JobsMgr; |
||||
|
||||
private static init() { |
||||
if (process.env.NC_REDIS_URL) { |
||||
this.client = new RedisJobsMgr(process.env.NC_REDIS_URL); |
||||
} else { |
||||
this.client = new EmitteryJobsMgr(); |
||||
} |
||||
} |
||||
|
||||
public static get jobsMgr(): JobsMgr { |
||||
if (!this.client) this.init(); |
||||
return this.client; |
||||
} |
||||
} |
@ -1,56 +0,0 @@
|
||||
import { Queue, Worker } from 'bullmq'; |
||||
import Redis from 'ioredis'; |
||||
import JobsMgr from './JobsMgr'; |
||||
|
||||
export default class RedisJobsMgr extends JobsMgr { |
||||
queue: { [jobName: string]: Queue }; |
||||
workers: { [jobName: string]: Worker }; |
||||
connection: Redis; |
||||
|
||||
constructor(config: any) { |
||||
super(); |
||||
this.queue = {}; |
||||
this.workers = {}; |
||||
this.connection = new Redis(config, { |
||||
maxRetriesPerRequest: null, |
||||
}); |
||||
} |
||||
|
||||
async add( |
||||
jobName: string, |
||||
payload: any, |
||||
// options?: {
|
||||
// onSuccess?: (payload: any) => void;
|
||||
// onFailure?: (payload: any, msg: string) => void;
|
||||
// onProgress?: (payload: any, msgOrData: any) => void;
|
||||
// }
|
||||
): Promise<any> { |
||||
this.queue[jobName] = |
||||
this.queue[jobName] || |
||||
new Queue(jobName, { connection: this.connection }); |
||||
this.queue[jobName].add(jobName, payload); |
||||
} |
||||
|
||||
addJobWorker( |
||||
jobName: string, |
||||
workerFn: ( |
||||
payload: any, |
||||
progressCbk?: (payload: any, msg?: string) => void, |
||||
) => void, |
||||
) { |
||||
this.workers[jobName] = new Worker( |
||||
jobName, |
||||
async (payload) => { |
||||
try { |
||||
await workerFn(payload.data, (...args) => |
||||
this.invokeProgressCbks(jobName, ...args), |
||||
); |
||||
await this.invokeFailureCbks(jobName, payload.data); |
||||
} catch (e) { |
||||
await this.invokeFailureCbks(jobName, payload.data); |
||||
} |
||||
}, |
||||
{ connection: this.connection }, |
||||
); |
||||
} |
||||
} |
@ -0,0 +1,6 @@
|
||||
export interface IEventEmitter { |
||||
emit(event: string, arg: any): void; |
||||
on(event: string, listener: (arg: any) => void): () => void; |
||||
removeListener(event: string, listener: (arg: any) => void): void; |
||||
removeAllListeners(event?: string): void; |
||||
} |
@ -0,0 +1,16 @@
|
||||
import { Global, Module } from '@nestjs/common'; |
||||
import { FallbackEventEmitter } from './fallback-event-emitter'; |
||||
|
||||
@Global() |
||||
@Module({ |
||||
providers: [ |
||||
{ |
||||
provide: 'IEventEmitter', |
||||
useFactory: () => { |
||||
return new FallbackEventEmitter(); |
||||
}, |
||||
}, |
||||
], |
||||
exports: ['IEventEmitter'], |
||||
}) |
||||
export class EventEmitterModule {} |
@ -0,0 +1,27 @@
|
||||
import Emittery from 'emittery'; |
||||
import { IEventEmitter } from './event-emitter.interface'; |
||||
|
||||
export class FallbackEventEmitter implements IEventEmitter { |
||||
private readonly emitter: Emittery; |
||||
|
||||
constructor() { |
||||
this.emitter = new Emittery(); |
||||
} |
||||
|
||||
emit(event: string, data: any): void { |
||||
this.emitter.emit(event, data); |
||||
} |
||||
|
||||
on(event: string, listener: (...args: any[]) => void) { |
||||
this.emitter.on(event, listener); |
||||
return () => this.emitter.off(event, listener); |
||||
} |
||||
|
||||
removeListener(event: string, listener: (...args: any[]) => void): void { |
||||
this.emitter.off(event, listener); |
||||
} |
||||
|
||||
removeAllListeners(event?: string): void { |
||||
this.emitter.clearListeners(event); |
||||
} |
||||
} |
@ -0,0 +1,23 @@
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter'; |
||||
import { IEventEmitter } from './event-emitter.interface'; |
||||
|
||||
export class NestjsEventEmitter implements IEventEmitter { |
||||
constructor(private readonly eventEmitter: EventEmitter2) {} |
||||
|
||||
emit(event: string, data: any): void { |
||||
this.eventEmitter.emit(event, data); |
||||
} |
||||
|
||||
on(event: string, listener: (...args: any[]) => void) { |
||||
this.eventEmitter.on(event, listener); |
||||
return () => this.eventEmitter.removeListener(event, listener); |
||||
} |
||||
|
||||
removeListener(event: string, listener: (...args: any[]) => void): void { |
||||
this.eventEmitter.removeListener(event, listener); |
||||
} |
||||
|
||||
removeAllListeners(event?: string): void { |
||||
this.eventEmitter.removeAllListeners(event); |
||||
} |
||||
} |
@ -0,0 +1,65 @@
|
||||
import { Controller, HttpCode, Post, Request, UseGuards } from '@nestjs/common'; |
||||
import { GlobalGuard } from '../../../guards/global/global.guard'; |
||||
import { ExtractProjectIdMiddleware } from '../../../middlewares/extract-project-id/extract-project-id.middleware'; |
||||
import { SyncSource } from '../../../models'; |
||||
import { NcError } from '../../../helpers/catchError'; |
||||
import { JobsService } from '../jobs.service'; |
||||
import { JobTypes } from '../../../interface/Jobs'; |
||||
|
||||
@Controller() |
||||
@UseGuards(ExtractProjectIdMiddleware, GlobalGuard) |
||||
export class AtImportController { |
||||
constructor(private readonly jobsService: JobsService) {} |
||||
|
||||
@Post('/api/v1/db/meta/import/airtable') |
||||
@HttpCode(200) |
||||
async importAirtable(@Request() req) { |
||||
const job = await this.jobsService.activeQueue.add(JobTypes.AtImport, { |
||||
...req.body, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/trigger') |
||||
@HttpCode(200) |
||||
async triggerSync(@Request() req) { |
||||
const jobs = await this.jobsService.jobList(JobTypes.AtImport); |
||||
const fnd = jobs.find((j) => j.data.syncId === req.params.syncId); |
||||
|
||||
if (fnd) { |
||||
NcError.badRequest('Sync already in progress'); |
||||
} |
||||
|
||||
const syncSource = await SyncSource.get(req.params.syncId); |
||||
|
||||
const user = await syncSource.getUser(); |
||||
|
||||
// Treat default baseUrl as siteUrl from req object
|
||||
let baseURL = (req as any).ncSiteUrl; |
||||
|
||||
// if environment value avail use it
|
||||
// or if it's docker construct using `PORT`
|
||||
if (process.env.NC_DOCKER) { |
||||
baseURL = `http://localhost:${process.env.PORT || 8080}`; |
||||
} |
||||
|
||||
const job = await this.jobsService.activeQueue.add(JobTypes.AtImport, { |
||||
syncId: req.params.syncId, |
||||
...(syncSource?.details || {}), |
||||
projectId: syncSource.project_id, |
||||
baseId: syncSource.base_id, |
||||
authToken: '', |
||||
baseURL, |
||||
user: user, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/syncs/:syncId/abort') |
||||
@HttpCode(200) |
||||
async abortImport(@Request() req) { |
||||
return {}; |
||||
} |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,8 @@
|
||||
/* eslint-disable no-async-promise-executor */ |
||||
import { RelationTypes, UITypes } from 'nocodb-sdk'; |
||||
import EntityMap from './EntityMap'; |
||||
import type { BulkDataAliasService } from '../../../services/bulk-data-alias.service'; |
||||
import type { TablesService } from '../../../services/tables.service'; |
||||
import type { BulkDataAliasService } from '../../../../services/bulk-data-alias.service'; |
||||
import type { TablesService } from '../../../../services/tables.service'; |
||||
// @ts-ignore
|
||||
import type { AirtableBase } from 'airtable/lib/airtable_base'; |
||||
import type { TableType } from 'nocodb-sdk'; |
@ -0,0 +1,136 @@
|
||||
import { |
||||
Body, |
||||
Controller, |
||||
HttpCode, |
||||
Param, |
||||
Post, |
||||
Request, |
||||
UseGuards, |
||||
} from '@nestjs/common'; |
||||
import { ProjectStatus } from 'nocodb-sdk'; |
||||
import { GlobalGuard } from '../../../guards/global/global.guard'; |
||||
import { |
||||
Acl, |
||||
ExtractProjectIdMiddleware, |
||||
} from '../../../middlewares/extract-project-id/extract-project-id.middleware'; |
||||
import { ProjectsService } from '../../../services/projects.service'; |
||||
import { Base, Model, Project } from '../../../models'; |
||||
import { generateUniqueName } from '../../../helpers/exportImportHelpers'; |
||||
import { JobsService } from '../jobs.service'; |
||||
import { JobTypes } from '../../../interface/Jobs'; |
||||
|
||||
@Controller() |
||||
@UseGuards(ExtractProjectIdMiddleware, GlobalGuard) |
||||
export class DuplicateController { |
||||
constructor( |
||||
private readonly jobsService: JobsService, |
||||
private readonly projectsService: ProjectsService, |
||||
) {} |
||||
|
||||
@Post('/api/v1/db/meta/duplicate/:projectId/:baseId?') |
||||
@HttpCode(200) |
||||
@Acl('duplicateBase') |
||||
async duplicateBase( |
||||
@Request() req, |
||||
@Param('projectId') projectId: string, |
||||
@Param('baseId') baseId?: string, |
||||
@Body() |
||||
options?: { |
||||
excludeData?: boolean; |
||||
excludeViews?: boolean; |
||||
excludeHooks?: boolean; |
||||
}, |
||||
) { |
||||
const project = await Project.get(projectId); |
||||
|
||||
if (!project) { |
||||
throw new Error(`Project not found for id '${projectId}'`); |
||||
} |
||||
|
||||
const base = baseId |
||||
? await Base.get(baseId) |
||||
: (await project.getBases())[0]; |
||||
|
||||
if (!base) { |
||||
throw new Error(`Base not found!`); |
||||
} |
||||
|
||||
const projects = await Project.list({}); |
||||
|
||||
const uniqueTitle = generateUniqueName( |
||||
`${project.title} copy`, |
||||
projects.map((p) => p.title), |
||||
); |
||||
|
||||
const dupProject = await this.projectsService.projectCreate({ |
||||
project: { title: uniqueTitle, status: ProjectStatus.JOB }, |
||||
user: { id: req.user.id }, |
||||
}); |
||||
|
||||
const job = await this.jobsService.activeQueue.add(JobTypes.DuplicateBase, { |
||||
projectId: project.id, |
||||
baseId: base.id, |
||||
dupProjectId: dupProject.id, |
||||
options, |
||||
req: { |
||||
user: req.user, |
||||
clientIp: req.clientIp, |
||||
}, |
||||
}); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
|
||||
@Post('/api/v1/db/meta/duplicate/:projectId/table/:modelId') |
||||
@HttpCode(200) |
||||
@Acl('duplicateModel') |
||||
async duplicateModel( |
||||
@Request() req, |
||||
@Param('projectId') projectId: string, |
||||
@Param('modelId') modelId?: string, |
||||
@Body() |
||||
options?: { |
||||
excludeData?: boolean; |
||||
excludeViews?: boolean; |
||||
excludeHooks?: boolean; |
||||
}, |
||||
) { |
||||
const project = await Project.get(projectId); |
||||
|
||||
if (!project) { |
||||
throw new Error(`Project not found for id '${projectId}'`); |
||||
} |
||||
|
||||
const model = await Model.get(modelId); |
||||
|
||||
if (!model) { |
||||
throw new Error(`Model not found!`); |
||||
} |
||||
|
||||
const base = await Base.get(model.base_id); |
||||
|
||||
const models = await base.getModels(); |
||||
|
||||
const uniqueTitle = generateUniqueName( |
||||
`${model.title} copy`, |
||||
models.map((p) => p.title), |
||||
); |
||||
|
||||
const job = await this.jobsService.activeQueue.add( |
||||
JobTypes.DuplicateModel, |
||||
{ |
||||
projectId: project.id, |
||||
baseId: base.id, |
||||
modelId: model.id, |
||||
title: uniqueTitle, |
||||
options, |
||||
req: { |
||||
user: req.user, |
||||
clientIp: req.clientIp, |
||||
}, |
||||
}, |
||||
); |
||||
|
||||
return { id: job.id, name: job.name }; |
||||
} |
||||
} |
@ -0,0 +1,408 @@
|
||||
import { Readable } from 'stream'; |
||||
import { Process, Processor } from '@nestjs/bull'; |
||||
import { Job } from 'bull'; |
||||
import papaparse from 'papaparse'; |
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import { Logger } from '@nestjs/common'; |
||||
import { Base, Column, Model, Project } from '../../../models'; |
||||
import { ProjectsService } from '../../../services/projects.service'; |
||||
import { findWithIdentifier } from '../../../helpers/exportImportHelpers'; |
||||
import { BulkDataAliasService } from '../../../services/bulk-data-alias.service'; |
||||
import { JOBS_QUEUE, JobTypes } from '../../../interface/Jobs'; |
||||
import { elapsedTime, initTime } from '../helpers'; |
||||
import { ExportService } from './export.service'; |
||||
import { ImportService } from './import.service'; |
||||
|
||||
@Processor(JOBS_QUEUE) |
||||
export class DuplicateProcessor { |
||||
private readonly logger = new Logger( |
||||
`${JOBS_QUEUE}:${DuplicateProcessor.name}`, |
||||
); |
||||
|
||||
constructor( |
||||
private readonly exportService: ExportService, |
||||
private readonly importService: ImportService, |
||||
private readonly projectsService: ProjectsService, |
||||
private readonly bulkDataService: BulkDataAliasService, |
||||
) {} |
||||
|
||||
@Process(JobTypes.DuplicateBase) |
||||
async duplicateBase(job: Job) { |
||||
const hrTime = initTime(); |
||||
|
||||
const { projectId, baseId, dupProjectId, req, options } = job.data; |
||||
|
||||
const excludeData = options?.excludeData || false; |
||||
const excludeHooks = options?.excludeHooks || false; |
||||
const excludeViews = options?.excludeViews || false; |
||||
|
||||
const project = await Project.get(projectId); |
||||
const dupProject = await Project.get(dupProjectId); |
||||
const base = await Base.get(baseId); |
||||
|
||||
try { |
||||
if (!project || !dupProject || !base) { |
||||
throw new Error(`Project or base not found!`); |
||||
} |
||||
|
||||
const user = (req as any).user; |
||||
|
||||
const models = (await base.getModels()).filter( |
||||
// TODO revert this when issue with cache is fixed
|
||||
(m) => m.base_id === base.id && !m.mm && m.type === 'table', |
||||
); |
||||
|
||||
const exportedModels = await this.exportService.serializeModels({ |
||||
modelIds: models.map((m) => m.id), |
||||
excludeViews, |
||||
excludeHooks, |
||||
}); |
||||
|
||||
elapsedTime( |
||||
hrTime, |
||||
`serialize models schema for ${base.project_id}::${base.id}`, |
||||
'duplicateBase', |
||||
); |
||||
|
||||
if (!exportedModels) { |
||||
throw new Error(`Export failed for base '${base.id}'`); |
||||
} |
||||
|
||||
await dupProject.getBases(); |
||||
|
||||
const dupBase = dupProject.bases[0]; |
||||
|
||||
const idMap = await this.importService.importModels({ |
||||
user, |
||||
projectId: dupProject.id, |
||||
baseId: dupBase.id, |
||||
data: exportedModels, |
||||
req: req, |
||||
}); |
||||
|
||||
elapsedTime(hrTime, `import models schema`, 'duplicateBase'); |
||||
|
||||
if (!idMap) { |
||||
throw new Error(`Import failed for base '${base.id}'`); |
||||
} |
||||
|
||||
if (!excludeData) { |
||||
await this.importModelsData({ |
||||
idMap, |
||||
sourceProject: project, |
||||
sourceModels: models, |
||||
destProject: dupProject, |
||||
destBase: dupBase, |
||||
hrTime, |
||||
}); |
||||
} |
||||
|
||||
await this.projectsService.projectUpdate({ |
||||
projectId: dupProject.id, |
||||
project: { |
||||
status: null, |
||||
}, |
||||
}); |
||||
} catch (e) { |
||||
if (dupProject?.id) { |
||||
await this.projectsService.projectSoftDelete({ |
||||
projectId: dupProject.id, |
||||
}); |
||||
} |
||||
throw e; |
||||
} |
||||
} |
||||
|
||||
@Process(JobTypes.DuplicateModel) |
||||
async duplicateModel(job: Job) { |
||||
const hrTime = initTime(); |
||||
|
||||
const { projectId, baseId, modelId, title, req, options } = job.data; |
||||
|
||||
const excludeData = options?.excludeData || false; |
||||
const excludeHooks = options?.excludeHooks || false; |
||||
const excludeViews = options?.excludeViews || false; |
||||
|
||||
const project = await Project.get(projectId); |
||||
const base = await Base.get(baseId); |
||||
|
||||
const user = (req as any).user; |
||||
|
||||
const models = (await base.getModels()).filter( |
||||
(m) => !m.mm && m.type === 'table', |
||||
); |
||||
|
||||
const sourceModel = models.find((m) => m.id === modelId); |
||||
|
||||
await sourceModel.getColumns(); |
||||
|
||||
const relatedModelIds = sourceModel.columns |
||||
.filter((col) => col.uidt === UITypes.LinkToAnotherRecord) |
||||
.map((col) => col.colOptions.fk_related_model_id) |
||||
.filter((id) => id); |
||||
|
||||
const relatedModels = models.filter((m) => relatedModelIds.includes(m.id)); |
||||
|
||||
const exportedModel = ( |
||||
await this.exportService.serializeModels({ |
||||
modelIds: [modelId], |
||||
excludeViews, |
||||
excludeHooks, |
||||
}) |
||||
)[0]; |
||||
|
||||
elapsedTime( |
||||
hrTime, |
||||
`serialize model schema for ${modelId}`, |
||||
'duplicateModel', |
||||
); |
||||
|
||||
if (!exportedModel) { |
||||
throw new Error(`Export failed for base '${base.id}'`); |
||||
} |
||||
|
||||
exportedModel.model.title = title; |
||||
exportedModel.model.table_name = title.toLowerCase().replace(/ /g, '_'); |
||||
|
||||
const idMap = await this.importService.importModels({ |
||||
projectId, |
||||
baseId, |
||||
data: [exportedModel], |
||||
user, |
||||
req, |
||||
externalModels: relatedModels, |
||||
}); |
||||
|
||||
elapsedTime(hrTime, 'import model schema', 'duplicateModel'); |
||||
|
||||
if (!idMap) { |
||||
throw new Error(`Import failed for model '${modelId}'`); |
||||
} |
||||
|
||||
if (!excludeData) { |
||||
const fields: Record<string, string[]> = {}; |
||||
|
||||
for (const md of relatedModels) { |
||||
const bts = md.columns |
||||
.filter( |
||||
(c) => |
||||
c.uidt === UITypes.LinkToAnotherRecord && |
||||
c.colOptions.type === 'bt' && |
||||
c.colOptions.fk_related_model_id === modelId, |
||||
) |
||||
.map((c) => c.id); |
||||
|
||||
if (bts.length > 0) { |
||||
fields[md.id] = [md.primaryKey.id]; |
||||
fields[md.id].push(...bts); |
||||
} |
||||
} |
||||
|
||||
await this.importModelsData({ |
||||
idMap, |
||||
sourceProject: project, |
||||
sourceModels: [sourceModel], |
||||
destProject: project, |
||||
destBase: base, |
||||
hrTime, |
||||
modelFieldIds: fields, |
||||
externalModels: relatedModels, |
||||
}); |
||||
|
||||
elapsedTime(hrTime, 'import model data', 'duplicateModel'); |
||||
} |
||||
|
||||
return await Model.get(findWithIdentifier(idMap, sourceModel.id)); |
||||
} |
||||
|
||||
async importModelsData(param: { |
||||
idMap: Map<string, string>; |
||||
sourceProject: Project; |
||||
sourceModels: Model[]; |
||||
destProject: Project; |
||||
destBase: Base; |
||||
hrTime: { hrTime: [number, number] }; |
||||
modelFieldIds?: Record<string, string[]>; |
||||
externalModels?: Model[]; |
||||
}) { |
||||
const { |
||||
idMap, |
||||
sourceProject, |
||||
sourceModels, |
||||
destProject, |
||||
destBase, |
||||
hrTime, |
||||
modelFieldIds, |
||||
externalModels, |
||||
} = param; |
||||
|
||||
let handledLinks = []; |
||||
|
||||
for (const sourceModel of sourceModels) { |
||||
const dataStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
this.exportService.streamModelDataAsCsv({ |
||||
dataStream, |
||||
linkStream, |
||||
projectId: sourceProject.id, |
||||
modelId: sourceModel.id, |
||||
handledMmList: handledLinks, |
||||
}); |
||||
|
||||
const model = await Model.get(findWithIdentifier(idMap, sourceModel.id)); |
||||
|
||||
await this.importService.importDataFromCsvStream({ |
||||
idMap, |
||||
dataStream, |
||||
destProject, |
||||
destBase, |
||||
destModel: model, |
||||
}); |
||||
|
||||
handledLinks = await this.importService.importLinkFromCsvStream({ |
||||
idMap, |
||||
linkStream, |
||||
destProject, |
||||
destBase, |
||||
handledLinks, |
||||
}); |
||||
|
||||
elapsedTime( |
||||
hrTime, |
||||
`import data and links for ${model.title}`, |
||||
'importModelsData', |
||||
); |
||||
} |
||||
|
||||
// update external models (has bt to this model)
|
||||
if (externalModels) { |
||||
for (const sourceModel of externalModels) { |
||||
const fields = modelFieldIds?.[sourceModel.id]; |
||||
|
||||
if (!fields) continue; |
||||
|
||||
const dataStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
this.exportService.streamModelDataAsCsv({ |
||||
dataStream, |
||||
linkStream, |
||||
projectId: sourceProject.id, |
||||
modelId: sourceModel.id, |
||||
handledMmList: handledLinks, |
||||
_fieldIds: fields, |
||||
}); |
||||
|
||||
const headers: string[] = []; |
||||
let chunk = []; |
||||
|
||||
const model = await Model.get(sourceModel.id); |
||||
|
||||
await new Promise((resolve) => { |
||||
papaparse.parse(dataStream, { |
||||
newline: '\r\n', |
||||
step: async (results, parser) => { |
||||
if (!headers.length) { |
||||
parser.pause(); |
||||
for (const header of results.data) { |
||||
const id = idMap.get(header); |
||||
if (id) { |
||||
const col = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: id, |
||||
}); |
||||
if (col) { |
||||
if (col.colOptions?.type === 'bt') { |
||||
const childCol = await Column.get({ |
||||
base_id: destBase.id, |
||||
colId: col.colOptions.fk_child_column_id, |
||||
}); |
||||
if (childCol) { |
||||
headers.push(childCol.column_name); |
||||
} else { |
||||
headers.push(null); |
||||
this.logger.error(`child column not found (${id})`); |
||||
} |
||||
} else { |
||||
headers.push(col.column_name); |
||||
} |
||||
} else { |
||||
headers.push(null); |
||||
this.logger.error(`column not found (${id})`); |
||||
} |
||||
} else { |
||||
headers.push(null); |
||||
this.logger.error(`id not found (${header})`); |
||||
} |
||||
} |
||||
parser.resume(); |
||||
} else { |
||||
if (results.errors.length === 0) { |
||||
const row = {}; |
||||
for (let i = 0; i < headers.length; i++) { |
||||
if (headers[i]) { |
||||
if (results.data[i] !== '') { |
||||
row[headers[i]] = results.data[i]; |
||||
} |
||||
} |
||||
} |
||||
chunk.push(row); |
||||
if (chunk.length > 1000) { |
||||
parser.pause(); |
||||
try { |
||||
await this.bulkDataService.bulkDataUpdate({ |
||||
projectName: destProject.id, |
||||
tableName: model.id, |
||||
body: chunk, |
||||
cookie: null, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
this.logger.error(e); |
||||
} |
||||
chunk = []; |
||||
parser.resume(); |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
complete: async () => { |
||||
if (chunk.length > 0) { |
||||
try { |
||||
await this.bulkDataService.bulkDataUpdate({ |
||||
projectName: destProject.id, |
||||
tableName: model.id, |
||||
body: chunk, |
||||
cookie: null, |
||||
raw: true, |
||||
}); |
||||
} catch (e) { |
||||
this.logger.error(e); |
||||
} |
||||
chunk = []; |
||||
} |
||||
resolve(null); |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
elapsedTime( |
||||
hrTime, |
||||
`map existing links to ${model.title}`, |
||||
'importModelsData', |
||||
); |
||||
} |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,721 @@
|
||||
import { Readable } from 'stream'; |
||||
import { UITypes, ViewTypes } from 'nocodb-sdk'; |
||||
import { unparse } from 'papaparse'; |
||||
import { Injectable, Logger } from '@nestjs/common'; |
||||
import NcConnectionMgrv2 from '../../../utils/common/NcConnectionMgrv2'; |
||||
import { getViewAndModelByAliasOrId } from '../../../modules/datas/helpers'; |
||||
import { |
||||
clearPrefix, |
||||
generateBaseIdMap, |
||||
} from '../../../helpers/exportImportHelpers'; |
||||
import NcPluginMgrv2 from '../../../helpers/NcPluginMgrv2'; |
||||
import { NcError } from '../../../helpers/catchError'; |
||||
import { Base, Hook, Model, Project } from '../../../models'; |
||||
import { DatasService } from '../../../services/datas.service'; |
||||
import { elapsedTime, initTime } from '../helpers'; |
||||
import type { BaseModelSqlv2 } from '../../../db/BaseModelSqlv2'; |
||||
import type { View } from '../../../models'; |
||||
|
||||
@Injectable() |
||||
export class ExportService { |
||||
private readonly logger = new Logger(ExportService.name); |
||||
|
||||
constructor(private datasService: DatasService) {} |
||||
|
||||
async serializeModels(param: { |
||||
modelIds: string[]; |
||||
excludeViews?: boolean; |
||||
excludeHooks?: boolean; |
||||
}) { |
||||
const { modelIds } = param; |
||||
|
||||
const excludeViews = param?.excludeViews || false; |
||||
const excludeHooks = param?.excludeHooks || false; |
||||
|
||||
const serializedModels = []; |
||||
|
||||
// db id to structured id
|
||||
const idMap = new Map<string, string>(); |
||||
|
||||
const projects: Project[] = []; |
||||
const bases: Base[] = []; |
||||
const modelsMap = new Map<string, Model[]>(); |
||||
|
||||
for (const modelId of modelIds) { |
||||
const model = await Model.get(modelId); |
||||
|
||||
if (!model) |
||||
return NcError.badRequest(`Model not found for id '${modelId}'`); |
||||
|
||||
const fndProject = projects.find((p) => p.id === model.project_id); |
||||
const project = fndProject || (await Project.get(model.project_id)); |
||||
|
||||
const fndBase = bases.find((b) => b.id === model.base_id); |
||||
const base = fndBase || (await Base.get(model.base_id)); |
||||
|
||||
if (!fndProject) projects.push(project); |
||||
if (!fndBase) bases.push(base); |
||||
|
||||
if (!modelsMap.has(base.id)) { |
||||
modelsMap.set(base.id, await generateBaseIdMap(base, idMap)); |
||||
} |
||||
|
||||
await model.getColumns(); |
||||
await model.getViews(); |
||||
|
||||
// if views are excluded, filter all views except default
|
||||
if (excludeViews) { |
||||
model.views = model.views.filter((v) => v.is_default); |
||||
} |
||||
|
||||
for (const column of model.columns) { |
||||
await column.getColOptions(); |
||||
if (column.colOptions) { |
||||
for (const [k, v] of Object.entries(column.colOptions)) { |
||||
switch (k) { |
||||
case 'fk_mm_child_column_id': |
||||
case 'fk_mm_parent_column_id': |
||||
case 'fk_mm_model_id': |
||||
case 'fk_parent_column_id': |
||||
case 'fk_child_column_id': |
||||
case 'fk_related_model_id': |
||||
case 'fk_relation_column_id': |
||||
case 'fk_lookup_column_id': |
||||
case 'fk_rollup_column_id': |
||||
column.colOptions[k] = idMap.get(v as string); |
||||
break; |
||||
case 'options': |
||||
for (const o of column.colOptions['options']) { |
||||
delete o.id; |
||||
delete o.fk_column_id; |
||||
} |
||||
break; |
||||
case 'formula': |
||||
column.colOptions[k] = column.colOptions[k].replace( |
||||
/(?<=\{\{).*?(?=\}\})/gm, |
||||
(match) => idMap.get(match), |
||||
); |
||||
break; |
||||
case 'id': |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_column_id': |
||||
delete column.colOptions[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
for (const view of model.views) { |
||||
idMap.set(view.id, `${idMap.get(model.id)}::${view.id}`); |
||||
await view.getColumns(); |
||||
await view.getFilters(); |
||||
await view.getSorts(); |
||||
if (view.filter) { |
||||
const export_filters = []; |
||||
for (const fl of view.filter.children) { |
||||
const tempFl = { |
||||
id: `${idMap.get(view.id)}::${fl.id}`, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: fl.fk_parent_id, |
||||
is_group: fl.is_group, |
||||
logical_op: fl.logical_op, |
||||
comparison_op: fl.comparison_op, |
||||
comparison_sub_op: fl.comparison_sub_op, |
||||
value: fl.value, |
||||
}; |
||||
if (tempFl.is_group) { |
||||
delete tempFl.comparison_op; |
||||
delete tempFl.comparison_sub_op; |
||||
delete tempFl.value; |
||||
} |
||||
export_filters.push(tempFl); |
||||
} |
||||
view.filter.children = export_filters; |
||||
} |
||||
|
||||
if (view.sorts) { |
||||
const export_sorts = []; |
||||
for (const sr of view.sorts) { |
||||
const tempSr = { |
||||
fk_column_id: idMap.get(sr.fk_column_id), |
||||
direction: sr.direction, |
||||
}; |
||||
export_sorts.push(tempSr); |
||||
} |
||||
view.sorts = export_sorts; |
||||
} |
||||
|
||||
if (view.view) { |
||||
for (const [k, v] of Object.entries(view.view)) { |
||||
switch (k) { |
||||
case 'fk_column_id': |
||||
case 'fk_cover_image_col_id': |
||||
case 'fk_grp_col_id': |
||||
view.view[k] = idMap.get(v as string); |
||||
break; |
||||
case 'meta': |
||||
if (view.type === ViewTypes.KANBAN) { |
||||
const meta = JSON.parse(view.view.meta as string) as Record< |
||||
string, |
||||
any |
||||
>; |
||||
for (const [k, v] of Object.entries(meta)) { |
||||
const colId = idMap.get(k as string); |
||||
for (const op of v) { |
||||
op.fk_column_id = idMap.get(op.fk_column_id); |
||||
delete op.id; |
||||
} |
||||
meta[colId] = v; |
||||
delete meta[k]; |
||||
} |
||||
view.view.meta = meta; |
||||
} |
||||
break; |
||||
case 'created_at': |
||||
case 'updated_at': |
||||
case 'fk_view_id': |
||||
case 'project_id': |
||||
case 'base_id': |
||||
case 'uuid': |
||||
delete view.view[k]; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
const serializedHooks = []; |
||||
|
||||
if (!excludeHooks) { |
||||
const hooks = await Hook.list({ fk_model_id: model.id }); |
||||
|
||||
for (const hook of hooks) { |
||||
idMap.set(hook.id, `${idMap.get(hook.fk_model_id)}::${hook.id}`); |
||||
|
||||
const hookFilters = await hook.getFilters(); |
||||
const export_filters = []; |
||||
|
||||
if (hookFilters) { |
||||
for (const fl of hookFilters) { |
||||
const tempFl = { |
||||
id: `${idMap.get(hook.id)}::${fl.id}`, |
||||
fk_column_id: idMap.get(fl.fk_column_id), |
||||
fk_parent_id: fl.fk_parent_id, |
||||
is_group: fl.is_group, |
||||
logical_op: fl.logical_op, |
||||
comparison_op: fl.comparison_op, |
||||
comparison_sub_op: fl.comparison_sub_op, |
||||
value: fl.value, |
||||
}; |
||||
if (tempFl.is_group) { |
||||
delete tempFl.comparison_op; |
||||
delete tempFl.comparison_sub_op; |
||||
delete tempFl.value; |
||||
} |
||||
export_filters.push(tempFl); |
||||
} |
||||
} |
||||
|
||||
serializedHooks.push({ |
||||
id: idMap.get(hook.id), |
||||
title: hook.title, |
||||
active: hook.active, |
||||
condition: hook.condition, |
||||
event: hook.event, |
||||
operation: hook.operation, |
||||
notification: hook.notification, |
||||
version: hook.version, |
||||
filters: export_filters, |
||||
}); |
||||
} |
||||
} |
||||
|
||||
serializedModels.push({ |
||||
model: { |
||||
id: idMap.get(model.id), |
||||
prefix: project.prefix, |
||||
title: model.title, |
||||
table_name: clearPrefix(model.table_name, project.prefix), |
||||
meta: model.meta, |
||||
columns: model.columns.map((column) => ({ |
||||
id: idMap.get(column.id), |
||||
ai: column.ai, |
||||
column_name: column.column_name, |
||||
cc: column.cc, |
||||
cdf: column.cdf, |
||||
meta: column.meta, |
||||
pk: column.pk, |
||||
pv: column.pv, |
||||
order: column.order, |
||||
rqd: column.rqd, |
||||
system: column.system, |
||||
uidt: column.uidt, |
||||
title: column.title, |
||||
un: column.un, |
||||
unique: column.unique, |
||||
colOptions: column.colOptions, |
||||
})), |
||||
}, |
||||
views: model.views.map((view) => ({ |
||||
id: idMap.get(view.id), |
||||
is_default: view.is_default, |
||||
type: view.type, |
||||
meta: view.meta, |
||||
order: view.order, |
||||
title: view.title, |
||||
show: view.show, |
||||
show_system_fields: view.show_system_fields, |
||||
filter: view.filter, |
||||
sorts: view.sorts, |
||||
lock_type: view.lock_type, |
||||
columns: view.columns.map((column) => { |
||||
const { |
||||
id, |
||||
fk_view_id, |
||||
fk_column_id, |
||||
project_id, |
||||
base_id, |
||||
created_at, |
||||
updated_at, |
||||
uuid, |
||||
...rest |
||||
} = column as any; |
||||
return { |
||||
fk_column_id: idMap.get(fk_column_id), |
||||
...rest, |
||||
}; |
||||
}), |
||||
view: view.view, |
||||
})), |
||||
hooks: serializedHooks, |
||||
}); |
||||
} |
||||
|
||||
return serializedModels; |
||||
} |
||||
|
||||
async streamModelDataAsCsv(param: { |
||||
dataStream: Readable; |
||||
linkStream: Readable; |
||||
projectId: string; |
||||
modelId: string; |
||||
viewId?: string; |
||||
handledMmList?: string[]; |
||||
_fieldIds?: string[]; |
||||
}) { |
||||
const { dataStream, linkStream, handledMmList } = param; |
||||
|
||||
const { model, view } = await getViewAndModelByAliasOrId({ |
||||
projectName: param.projectId, |
||||
tableName: param.modelId, |
||||
viewName: param.viewId, |
||||
}); |
||||
|
||||
const base = await Base.get(model.base_id); |
||||
|
||||
await model.getColumns(); |
||||
|
||||
const btMap = new Map<string, string>(); |
||||
|
||||
for (const column of model.columns.filter( |
||||
(col) => |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions?.type === 'bt', |
||||
)) { |
||||
await column.getColOptions(); |
||||
const fkCol = model.columns.find( |
||||
(c) => c.id === column.colOptions?.fk_child_column_id, |
||||
); |
||||
if (fkCol) { |
||||
// replace bt column with fk column if it is in _fieldIds
|
||||
if (param._fieldIds && param._fieldIds.includes(column.id)) { |
||||
param._fieldIds.push(fkCol.id); |
||||
const btIndex = param._fieldIds.indexOf(column.id); |
||||
param._fieldIds.splice(btIndex, 1); |
||||
} |
||||
|
||||
btMap.set( |
||||
fkCol.id, |
||||
`${column.project_id}::${column.base_id}::${column.fk_model_id}::${column.id}`, |
||||
); |
||||
} |
||||
} |
||||
|
||||
const fields = param._fieldIds |
||||
? model.columns |
||||
.filter((c) => param._fieldIds?.includes(c.id)) |
||||
.map((c) => c.title) |
||||
.join(',') |
||||
: model.columns |
||||
.filter((c) => c.uidt !== UITypes.LinkToAnotherRecord) |
||||
.map((c) => c.title) |
||||
.join(','); |
||||
|
||||
const mmColumns = model.columns.filter( |
||||
(col) => |
||||
col.uidt === UITypes.LinkToAnotherRecord && |
||||
col.colOptions?.type === 'mm', |
||||
); |
||||
|
||||
const hasLink = mmColumns.length > 0; |
||||
|
||||
dataStream.setEncoding('utf8'); |
||||
|
||||
const formatData = (data: any) => { |
||||
for (const row of data) { |
||||
for (const [k, v] of Object.entries(row)) { |
||||
const col = model.columns.find((c) => c.title === k); |
||||
if (col) { |
||||
const colId = `${col.project_id}::${col.base_id}::${col.fk_model_id}::${col.id}`; |
||||
switch (col.uidt) { |
||||
case UITypes.ForeignKey: |
||||
{ |
||||
if (btMap.has(col.id)) { |
||||
row[btMap.get(col.id)] = v; |
||||
delete row[k]; |
||||
} |
||||
} |
||||
break; |
||||
case UITypes.Attachment: |
||||
try { |
||||
row[colId] = JSON.stringify(v); |
||||
} catch (e) { |
||||
row[colId] = v; |
||||
} |
||||
break; |
||||
case UITypes.Formula: |
||||
case UITypes.Lookup: |
||||
case UITypes.Rollup: |
||||
case UITypes.Barcode: |
||||
case UITypes.QrCode: |
||||
// skip these types
|
||||
break; |
||||
default: |
||||
row[colId] = v; |
||||
break; |
||||
} |
||||
delete row[k]; |
||||
} |
||||
} |
||||
} |
||||
return { data }; |
||||
}; |
||||
|
||||
const baseModel = await Model.getBaseModelSQL({ |
||||
id: model.id, |
||||
viewId: view?.id, |
||||
dbDriver: await NcConnectionMgrv2.get(base), |
||||
}); |
||||
|
||||
const limit = 200; |
||||
const offset = 0; |
||||
|
||||
try { |
||||
await this.recursiveRead( |
||||
formatData, |
||||
baseModel, |
||||
dataStream, |
||||
model, |
||||
view, |
||||
offset, |
||||
limit, |
||||
fields, |
||||
true, |
||||
); |
||||
} catch (e) { |
||||
this.logger.error(e); |
||||
throw e; |
||||
} |
||||
|
||||
if (hasLink) { |
||||
linkStream.setEncoding('utf8'); |
||||
|
||||
for (const mm of mmColumns) { |
||||
if (handledMmList.includes(mm.colOptions?.fk_mm_model_id)) continue; |
||||
|
||||
const mmModel = await Model.get(mm.colOptions?.fk_mm_model_id); |
||||
|
||||
await mmModel.getColumns(); |
||||
|
||||
const childColumn = mmModel.columns.find( |
||||
(col) => col.id === mm.colOptions?.fk_mm_child_column_id, |
||||
); |
||||
|
||||
const parentColumn = mmModel.columns.find( |
||||
(col) => col.id === mm.colOptions?.fk_mm_parent_column_id, |
||||
); |
||||
|
||||
const childColumnTitle = childColumn.title; |
||||
const parentColumnTitle = parentColumn.title; |
||||
|
||||
const mmFields = mmModel.columns |
||||
.filter((c) => c.uidt === UITypes.ForeignKey) |
||||
.map((c) => c.title) |
||||
.join(','); |
||||
|
||||
const mmFormatData = (data: any) => { |
||||
data.map((d) => { |
||||
d.column = mm.id; |
||||
d.child = d[childColumnTitle]; |
||||
d.parent = d[parentColumnTitle]; |
||||
delete d[childColumnTitle]; |
||||
delete d[parentColumnTitle]; |
||||
return d; |
||||
}); |
||||
return { data }; |
||||
}; |
||||
|
||||
const mmLimit = 200; |
||||
const mmOffset = 0; |
||||
|
||||
const mmBase = |
||||
mmModel.base_id === base.id ? base : await Base.get(mmModel.base_id); |
||||
|
||||
const mmBaseModel = await Model.getBaseModelSQL({ |
||||
id: mmModel.id, |
||||
dbDriver: await NcConnectionMgrv2.get(mmBase), |
||||
}); |
||||
|
||||
try { |
||||
await this.recursiveLinkRead( |
||||
mmFormatData, |
||||
mmBaseModel, |
||||
linkStream, |
||||
mmModel, |
||||
undefined, |
||||
mmOffset, |
||||
mmLimit, |
||||
mmFields, |
||||
true, |
||||
); |
||||
} catch (e) { |
||||
this.logger.error(e); |
||||
throw e; |
||||
} |
||||
|
||||
handledMmList.push(mm.colOptions?.fk_mm_model_id); |
||||
} |
||||
|
||||
linkStream.push(null); |
||||
} else { |
||||
linkStream.push(null); |
||||
} |
||||
} |
||||
|
||||
async recursiveRead( |
||||
formatter: (data: any) => { data: any }, |
||||
baseModel: BaseModelSqlv2, |
||||
stream: Readable, |
||||
model: Model, |
||||
view: View, |
||||
offset: number, |
||||
limit: number, |
||||
fields: string, |
||||
header = false, |
||||
): Promise<void> { |
||||
return new Promise((resolve, reject) => { |
||||
this.datasService |
||||
.getDataList({ |
||||
model, |
||||
view, |
||||
query: { limit, offset, fields }, |
||||
baseModel, |
||||
}) |
||||
.then((result) => { |
||||
try { |
||||
if (!header) { |
||||
stream.push('\r\n'); |
||||
} |
||||
const { data } = formatter(result.list); |
||||
stream.push(unparse(data, { header })); |
||||
if (result.pageInfo.isLastPage) { |
||||
stream.push(null); |
||||
resolve(); |
||||
} else { |
||||
this.recursiveRead( |
||||
formatter, |
||||
baseModel, |
||||
stream, |
||||
model, |
||||
view, |
||||
offset + limit, |
||||
limit, |
||||
fields, |
||||
).then(resolve); |
||||
} |
||||
} catch (e) { |
||||
reject(e); |
||||
} |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
async recursiveLinkRead( |
||||
formatter: (data: any) => { data: any }, |
||||
baseModel: BaseModelSqlv2, |
||||
linkStream: Readable, |
||||
model: Model, |
||||
view: View, |
||||
offset: number, |
||||
limit: number, |
||||
fields: string, |
||||
header = false, |
||||
): Promise<void> { |
||||
return new Promise((resolve, reject) => { |
||||
this.datasService |
||||
.getDataList({ |
||||
model, |
||||
view, |
||||
query: { limit, offset, fields }, |
||||
baseModel, |
||||
}) |
||||
.then((result) => { |
||||
try { |
||||
if (!header) { |
||||
linkStream.push('\r\n'); |
||||
} |
||||
const { data } = formatter(result.list); |
||||
if (data) linkStream.push(unparse(data, { header })); |
||||
if (result.pageInfo.isLastPage) { |
||||
resolve(); |
||||
} else { |
||||
this.recursiveLinkRead( |
||||
formatter, |
||||
baseModel, |
||||
linkStream, |
||||
model, |
||||
view, |
||||
offset + limit, |
||||
limit, |
||||
fields, |
||||
).then(resolve); |
||||
} |
||||
} catch (e) { |
||||
reject(e); |
||||
} |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
async exportBase(param: { path: string; baseId: string }) { |
||||
const hrTime = initTime(); |
||||
|
||||
const base = await Base.get(param.baseId); |
||||
|
||||
if (!base) |
||||
throw NcError.badRequest(`Base not found for id '${param.baseId}'`); |
||||
|
||||
const project = await Project.get(base.project_id); |
||||
|
||||
const models = (await base.getModels()).filter( |
||||
// TODO revert this when issue with cache is fixed
|
||||
(m) => m.base_id === base.id && !m.mm && m.type === 'table', |
||||
); |
||||
|
||||
const exportedModels = await this.serializeModels({ |
||||
modelIds: models.map((m) => m.id), |
||||
}); |
||||
|
||||
elapsedTime( |
||||
hrTime, |
||||
`serialize models for ${base.project_id}::${base.id}`, |
||||
'exportBase', |
||||
); |
||||
|
||||
const exportData = { |
||||
id: `${project.id}::${base.id}`, |
||||
models: exportedModels, |
||||
}; |
||||
|
||||
const storageAdapter = await NcPluginMgrv2.storageAdapter(); |
||||
|
||||
const destPath = `export/${project.id}/${base.id}/${param.path}`; |
||||
|
||||
try { |
||||
const readableStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
readableStream.setEncoding('utf8'); |
||||
|
||||
readableStream.push(JSON.stringify(exportData)); |
||||
|
||||
readableStream.push(null); |
||||
|
||||
await (storageAdapter as any).fileCreateByStream( |
||||
`${destPath}/schema.json`, |
||||
readableStream, |
||||
); |
||||
|
||||
const handledMmList: string[] = []; |
||||
|
||||
const combinedLinkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const uploadLinkPromise = (storageAdapter as any).fileCreateByStream( |
||||
`${destPath}/data/links.csv`, |
||||
combinedLinkStream, |
||||
); |
||||
|
||||
for (const model of models) { |
||||
const dataStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkStream = new Readable({ |
||||
read() {}, |
||||
}); |
||||
|
||||
const linkPromise = new Promise((resolve) => { |
||||
linkStream.on('data', (chunk) => { |
||||
combinedLinkStream.push(chunk); |
||||
}); |
||||
|
||||
linkStream.on('end', () => { |
||||
combinedLinkStream.push('\r\n'); |
||||
resolve(null); |
||||
}); |
||||
|
||||
linkStream.on('error', (e) => { |
||||
this.logger.error(e); |
||||
resolve(null); |
||||
}); |
||||
}); |
||||
|
||||
const uploadPromise = (storageAdapter as any).fileCreateByStream( |
||||
`${destPath}/data/${model.id}.csv`, |
||||
dataStream, |
||||
); |
||||
|
||||
this.streamModelDataAsCsv({ |
||||
dataStream, |
||||
linkStream, |
||||
projectId: project.id, |
||||
modelId: model.id, |
||||
handledMmList, |
||||
}); |
||||
|
||||
await Promise.all([uploadPromise, linkPromise]); |
||||
} |
||||
|
||||
combinedLinkStream.push(null); |
||||
|
||||
await uploadLinkPromise; |
||||
|
||||
elapsedTime( |
||||
hrTime, |
||||
`export base ${base.project_id}::${base.id}`, |
||||
'exportBase', |
||||
); |
||||
} catch (e) { |
||||
throw NcError.badRequest(e); |
||||
} |
||||
|
||||
return { |
||||
path: destPath, |
||||
}; |
||||
} |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,136 @@
|
||||
import { Injectable } from '@nestjs/common'; |
||||
import PQueue from 'p-queue'; |
||||
import Emittery from 'emittery'; |
||||
import { JobStatus, JobTypes } from '../../interface/Jobs'; |
||||
import { DuplicateProcessor } from './export-import/duplicate.processor'; |
||||
import { JobsEventService } from './jobs-event.service'; |
||||
import { AtImportProcessor } from './at-import/at-import.processor'; |
||||
|
||||
interface Job { |
||||
id: string; |
||||
name: string; |
||||
status: string; |
||||
data: any; |
||||
} |
||||
|
||||
@Injectable() |
||||
export class QueueService { |
||||
static queue = new PQueue({ concurrency: 1 }); |
||||
static queueIdCounter = 1; |
||||
static processed = 0; |
||||
static queueMemory: Job[] = []; |
||||
static emitter = new Emittery(); |
||||
|
||||
constructor( |
||||
private readonly jobsEventService: JobsEventService, |
||||
private readonly duplicateProcessor: DuplicateProcessor, |
||||
private readonly atImportProcessor: AtImportProcessor, |
||||
) { |
||||
this.emitter.on(JobStatus.ACTIVE, (data: { job: Job }) => { |
||||
const job = this.queueMemory.find( |
||||
(job) => job.id === data.job.id && job.name === data.job.name, |
||||
); |
||||
job.status = JobStatus.ACTIVE; |
||||
this.jobsEventService.onActive.apply(this.jobsEventService, [job as any]); |
||||
}); |
||||
this.emitter.on(JobStatus.COMPLETED, (data: { job: Job; result: any }) => { |
||||
const job = this.queueMemory.find( |
||||
(job) => job.id === data.job.id && job.name === data.job.name, |
||||
); |
||||
job.status = JobStatus.COMPLETED; |
||||
this.jobsEventService.onCompleted.apply(this.jobsEventService, [ |
||||
job, |
||||
data.result, |
||||
]); |
||||
// clear job from memory
|
||||
this.removeJob(job); |
||||
}); |
||||
this.emitter.on(JobStatus.FAILED, (data: { job: Job; error: Error }) => { |
||||
const job = this.queueMemory.find( |
||||
(job) => job.id === data.job.id && job.name === data.job.name, |
||||
); |
||||
job.status = JobStatus.FAILED; |
||||
this.jobsEventService.onFailed.apply(this.jobsEventService, [ |
||||
job, |
||||
data.error, |
||||
]); |
||||
// clear job from memory
|
||||
this.removeJob(job); |
||||
}); |
||||
} |
||||
|
||||
jobMap = { |
||||
[JobTypes.DuplicateBase]: { |
||||
this: this.duplicateProcessor, |
||||
fn: this.duplicateProcessor.duplicateBase, |
||||
}, |
||||
[JobTypes.DuplicateModel]: { |
||||
this: this.duplicateProcessor, |
||||
fn: this.duplicateProcessor.duplicateModel, |
||||
}, |
||||
[JobTypes.AtImport]: { |
||||
this: this.atImportProcessor, |
||||
fn: this.atImportProcessor.job, |
||||
}, |
||||
}; |
||||
|
||||
async jobWrapper(job: Job) { |
||||
this.emitter.emit(JobStatus.ACTIVE, { job }); |
||||
try { |
||||
const result = await this.jobMap[job.name].fn.apply( |
||||
this.jobMap[job.name].this, |
||||
[job], |
||||
); |
||||
this.emitter.emit(JobStatus.COMPLETED, { job, result }); |
||||
} catch (error) { |
||||
this.emitter.emit(JobStatus.FAILED, { job, error }); |
||||
} |
||||
} |
||||
|
||||
get emitter() { |
||||
return QueueService.emitter; |
||||
} |
||||
|
||||
get queue() { |
||||
return QueueService.queue; |
||||
} |
||||
|
||||
get queueMemory() { |
||||
return QueueService.queueMemory; |
||||
} |
||||
|
||||
get queueIndex() { |
||||
return QueueService.queueIdCounter; |
||||
} |
||||
|
||||
set queueIndex(index: number) { |
||||
QueueService.queueIdCounter = index; |
||||
} |
||||
|
||||
add(name: string, data: any) { |
||||
const id = `${this.queueIndex++}`; |
||||
const job = { id: `${id}`, name, status: JobStatus.WAITING, data }; |
||||
this.queueMemory.push(job); |
||||
this.queue.add(() => this.jobWrapper(job)); |
||||
return { id, name }; |
||||
} |
||||
|
||||
getJobs(types: string[] | string) { |
||||
types = Array.isArray(types) ? types : [types]; |
||||
return this.queueMemory.filter((q) => types.includes(q.status)); |
||||
} |
||||
|
||||
getJob(id: string) { |
||||
return this.queueMemory.find((q) => q.id === id); |
||||
} |
||||
|
||||
// remove job from memory
|
||||
private removeJob(job: Job) { |
||||
const fIndex = this.queueMemory.findIndex( |
||||
(q) => q.id === job.id && q.name === job.name, |
||||
); |
||||
if (fIndex) { |
||||
this.queueMemory.splice(fIndex, 1); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,23 @@
|
||||
import { Logger } from '@nestjs/common'; |
||||
import { JOBS_QUEUE } from '../../interface/Jobs'; |
||||
|
||||
export const initTime = function () { |
||||
return { |
||||
hrTime: process.hrtime(), |
||||
}; |
||||
}; |
||||
|
||||
export const elapsedTime = function ( |
||||
time: { hrTime: [number, number] }, |
||||
label?: string, |
||||
context?: string, |
||||
) { |
||||
const elapsedS = process.hrtime(time.hrTime)[0].toFixed(3); |
||||
const elapsedMs = process.hrtime(time.hrTime)[1] / 1000000; |
||||
if (label) |
||||
Logger.debug( |
||||
`${label}: ${elapsedS}s ${elapsedMs}ms`, |
||||
`${JOBS_QUEUE}${context ? `:${context}` : ''}`, |
||||
); |
||||
time.hrTime = process.hrtime(); |
||||
}; |
@ -0,0 +1,69 @@
|
||||
import { |
||||
OnQueueActive, |
||||
OnQueueCompleted, |
||||
OnQueueFailed, |
||||
Processor, |
||||
} from '@nestjs/bull'; |
||||
import { Job } from 'bull'; |
||||
import boxen from 'boxen'; |
||||
import { EventEmitter2 } from '@nestjs/event-emitter'; |
||||
import { JobEvents, JOBS_QUEUE, JobStatus } from '../../interface/Jobs'; |
||||
|
||||
@Processor(JOBS_QUEUE) |
||||
export class JobsEventService { |
||||
constructor(private eventEmitter: EventEmitter2) {} |
||||
|
||||
@OnQueueActive() |
||||
onActive(job: Job) { |
||||
this.eventEmitter.emit(JobEvents.STATUS, { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
status: JobStatus.ACTIVE, |
||||
}); |
||||
} |
||||
|
||||
@OnQueueFailed() |
||||
onFailed(job: Job, error: Error) { |
||||
console.error( |
||||
boxen( |
||||
`---- !! JOB FAILED !! ----\nname: ${job.name}\nid:${job.id}\nerror:${error.name} (${error.message})\n\nstack: ${error.stack}`, |
||||
{ |
||||
padding: 1, |
||||
borderStyle: 'double', |
||||
borderColor: 'yellow', |
||||
}, |
||||
), |
||||
); |
||||
|
||||
this.eventEmitter.emit(JobEvents.STATUS, { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
status: JobStatus.FAILED, |
||||
data: { |
||||
error: { |
||||
message: error?.message, |
||||
}, |
||||
}, |
||||
}); |
||||
} |
||||
|
||||
@OnQueueCompleted() |
||||
onCompleted(job: Job, data: any) { |
||||
this.eventEmitter.emit(JobEvents.STATUS, { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
status: JobStatus.COMPLETED, |
||||
data: { |
||||
result: data, |
||||
}, |
||||
}); |
||||
} |
||||
|
||||
sendLog(job: Job, data: { message: string }) { |
||||
this.eventEmitter.emit(JobEvents.LOG, { |
||||
name: job.name, |
||||
id: job.id.toString(), |
||||
data, |
||||
}); |
||||
} |
||||
} |
@ -0,0 +1,121 @@
|
||||
import { |
||||
ConnectedSocket, |
||||
MessageBody, |
||||
SubscribeMessage, |
||||
WebSocketGateway, |
||||
WebSocketServer, |
||||
} from '@nestjs/websockets'; |
||||
import { Server, Socket } from 'socket.io'; |
||||
import { ExecutionContextHost } from '@nestjs/core/helpers/execution-context-host'; |
||||
import { AuthGuard } from '@nestjs/passport'; |
||||
import { OnEvent } from '@nestjs/event-emitter'; |
||||
import { JobEvents } from '../../interface/Jobs'; |
||||
import { JobsService } from './jobs.service'; |
||||
import type { JobStatus } from '../../interface/Jobs'; |
||||
import type { OnModuleInit } from '@nestjs/common'; |
||||
|
||||
@WebSocketGateway({ |
||||
cors: { |
||||
origin: '*', |
||||
allowedHeaders: ['xc-auth'], |
||||
credentials: true, |
||||
}, |
||||
namespace: 'jobs', |
||||
}) |
||||
export class JobsGateway implements OnModuleInit { |
||||
constructor(private readonly jobsService: JobsService) {} |
||||
|
||||
@WebSocketServer() |
||||
server: Server; |
||||
|
||||
async onModuleInit() { |
||||
this.server.use(async (socket, next) => { |
||||
try { |
||||
const context = new ExecutionContextHost([socket.handshake as any]); |
||||
const guard = new (AuthGuard('jwt'))(context); |
||||
await guard.canActivate(context); |
||||
} catch {} |
||||
|
||||
next(); |
||||
}); |
||||
} |
||||
|
||||
@SubscribeMessage('subscribe') |
||||
async subscribe( |
||||
@MessageBody() |
||||
body: { _id: number; data: { id: string; name: string } | any }, |
||||
@ConnectedSocket() client: Socket, |
||||
): Promise<void> { |
||||
const { _id, data } = body; |
||||
if ( |
||||
Object.keys(data).every((k) => ['name', 'id'].includes(k)) && |
||||
data?.name && |
||||
data?.id |
||||
) { |
||||
const rooms = (await this.jobsService.jobList(data.name)).map( |
||||
(j) => `${j.name}-${j.id}`, |
||||
); |
||||
const room = rooms.find((r) => r === `${data.name}-${data.id}`); |
||||
if (room) { |
||||
client.join(`${data.name}-${data.id}`); |
||||
client.emit('subscribed', { |
||||
_id, |
||||
name: data.name, |
||||
id: data.id, |
||||
}); |
||||
} |
||||
} else { |
||||
const job = await this.jobsService.getJobWithData(data); |
||||
if (job) { |
||||
client.join(`${job.name}-${job.id}`); |
||||
client.emit('subscribed', { |
||||
_id, |
||||
name: job.name, |
||||
id: job.id, |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
|
||||
@SubscribeMessage('status') |
||||
async status( |
||||
@MessageBody() body: { _id: number; data: { id: string; name: string } }, |
||||
@ConnectedSocket() client: Socket, |
||||
): Promise<void> { |
||||
const { _id, data } = body; |
||||
client.emit('status', { |
||||
_id, |
||||
id: data.id, |
||||
name: data.name, |
||||
status: await this.jobsService.jobStatus(data.id), |
||||
}); |
||||
} |
||||
|
||||
@OnEvent(JobEvents.STATUS) |
||||
async sendJobStatus(data: { |
||||
name: string; |
||||
id: string; |
||||
status: JobStatus; |
||||
data?: any; |
||||
}): Promise<void> { |
||||
this.server.to(`${data.name}-${data.id}`).emit('status', { |
||||
id: data.id, |
||||
name: data.name, |
||||
status: data.status, |
||||
data: data.data, |
||||
}); |
||||
} |
||||
|
||||
@OnEvent(JobEvents.LOG) |
||||
async sendJobLog(data: { |
||||
name: string; |
||||
id: string; |
||||
data: { message: string }; |
||||
}): Promise<void> { |
||||
this.server.to(`${data.name}-${data.id}`).emit('log', { |
||||
id: data.id, |
||||
name: data.name, |
||||
data: data.data, |
||||
}); |
||||
} |
||||
} |
@ -0,0 +1,39 @@
|
||||
import { Module } from '@nestjs/common'; |
||||
import { BullModule } from '@nestjs/bull'; |
||||
import { GlobalModule } from '../global/global.module'; |
||||
import { DatasModule } from '../datas/datas.module'; |
||||
import { MetasModule } from '../metas/metas.module'; |
||||
import { JOBS_QUEUE } from '../../interface/Jobs'; |
||||
import { JobsService } from './jobs.service'; |
||||
import { ExportService } from './export-import/export.service'; |
||||
import { ImportService } from './export-import/import.service'; |
||||
import { DuplicateController } from './export-import/duplicate.controller'; |
||||
import { DuplicateProcessor } from './export-import/duplicate.processor'; |
||||
import { JobsGateway } from './jobs.gateway'; |
||||
import { QueueService } from './fallback-queue.service'; |
||||
import { JobsEventService } from './jobs-event.service'; |
||||
import { AtImportController } from './at-import/at-import.controller'; |
||||
import { AtImportProcessor } from './at-import/at-import.processor'; |
||||
|
||||
@Module({ |
||||
imports: [ |
||||
GlobalModule, |
||||
DatasModule, |
||||
MetasModule, |
||||
BullModule.registerQueue({ |
||||
name: JOBS_QUEUE, |
||||
}), |
||||
], |
||||
controllers: [DuplicateController, AtImportController], |
||||
providers: [ |
||||
QueueService, |
||||
JobsGateway, |
||||
JobsService, |
||||
JobsEventService, |
||||
DuplicateProcessor, |
||||
ExportService, |
||||
ImportService, |
||||
AtImportProcessor, |
||||
], |
||||
}) |
||||
export class JobsModule {} |
@ -0,0 +1,59 @@
|
||||
import { InjectQueue } from '@nestjs/bull'; |
||||
import { Injectable } from '@nestjs/common'; |
||||
import { Queue } from 'bull'; |
||||
import { JOBS_QUEUE, JobStatus } from '../../interface/Jobs'; |
||||
import { QueueService } from './fallback-queue.service'; |
||||
|
||||
@Injectable() |
||||
export class JobsService { |
||||
public activeQueue; |
||||
constructor( |
||||
@InjectQueue(JOBS_QUEUE) private readonly jobsQueue: Queue, |
||||
private readonly fallbackQueueService: QueueService, |
||||
) { |
||||
this.activeQueue = this.fallbackQueueService; |
||||
/* process.env.NC_REDIS_URL |
||||
? this.jobsQueue |
||||
: this.fallbackQueueService; |
||||
*/ |
||||
} |
||||
|
||||
async jobStatus(jobId: string) { |
||||
return await (await this.activeQueue.getJob(jobId)).getState(); |
||||
} |
||||
|
||||
async jobList(jobType: string) { |
||||
return ( |
||||
await this.activeQueue.getJobs([ |
||||
JobStatus.ACTIVE, |
||||
JobStatus.WAITING, |
||||
JobStatus.DELAYED, |
||||
JobStatus.PAUSED, |
||||
]) |
||||
).filter((j) => j.name === jobType); |
||||
} |
||||
|
||||
async getJobWithData(data: any) { |
||||
const jobs = await this.activeQueue.getJobs([ |
||||
// 'completed',
|
||||
JobStatus.WAITING, |
||||
JobStatus.ACTIVE, |
||||
JobStatus.DELAYED, |
||||
// 'failed',
|
||||
JobStatus.PAUSED, |
||||
]); |
||||
|
||||
const job = jobs.find((j) => { |
||||
for (const key in data) { |
||||
if (j.data[key]) { |
||||
if (j.data[key] !== data[key]) return false; |
||||
} else { |
||||
return false; |
||||
} |
||||
} |
||||
return true; |
||||
}); |
||||
|
||||
return job; |
||||
} |
||||
} |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue