Browse Source

feat: row expansion

pull/9788/head
DarkPhoenix2704 2 weeks ago
parent
commit
5c6b198206
No known key found for this signature in database
GPG Key ID: 3F76B10622A07849
  1. 108
      packages/nc-gui/components/dlg/ExpandTable.vue
  2. 62
      packages/nc-gui/components/smartsheet/grid/InfiniteTable.vue
  3. 4
      packages/nc-gui/components/smartsheet/grid/Table.vue
  4. 4
      packages/nc-gui/components/smartsheet/grid/index.vue
  5. 657
      packages/nc-gui/composables/useGridViewData.ts
  6. 448
      packages/nc-gui/composables/useInfiniteData.ts
  7. 232
      packages/nc-gui/composables/useMultiSelect/index.ts
  8. 1
      packages/nc-gui/lang/en.json
  9. 18
      packages/nocodb/src/controllers/bulk-data-alias.controller.ts
  10. 529
      packages/nocodb/src/db/BaseModelSqlv2.ts
  11. 128
      packages/nocodb/src/schema/swagger.json
  12. 15
      packages/nocodb/src/services/bulk-data-alias.service.ts
  13. 18
      packages/nocodb/src/utils/tsUtils.ts
  14. 31
      tests/playwright/pages/Dashboard/Grid/ExpandTable.ts
  15. 3
      tests/playwright/pages/Dashboard/Grid/index.ts
  16. 8
      tests/playwright/tests/db/features/keyboardShortcuts.spec.ts

108
packages/nc-gui/components/dlg/ExpandTable.vue

@ -0,0 +1,108 @@
<script setup lang="ts">
import { onKeyDown } from '@vueuse/core'
const props = defineProps<{
newRows: number
modelValue: boolean
newColumns: number
cellsOverwritten: number
rowsUpdated: number
}>()
const emit = defineEmits(['update:expand', 'cancel', 'update:modelValue'])
const dialogShow = useVModel(props, 'modelValue', emit)
const expand = ref(true)
const updateExpand = () => {
emit('update:expand', expand.value)
dialogShow.value = false
}
onKeyDown('esc', () => {
dialogShow.value = false
emit('update:modelValue', false)
})
const close = () => {
dialogShow.value = false
emit('cancel')
}
</script>
<template>
<NcModal
v-if="dialogShow"
v-model:visible="dialogShow"
:show-separator="false"
:header="$t('activity.createTable')"
size="small"
@keydown.esc="dialogShow = false"
>
<div class="flex justify-between w-full text-base font-semibold mb-2 text-nc-content-gray-emphasis items-center">
{{ 'Do you want to expand this table ?' }}
</div>
<div data-testid="nc-expand-table-modal" class="flex flex-col">
<div class="mb-2 nc-content-gray">
To accommodate your pasted data, we need to
<span v-if="cellsOverwritten && rowsUpdated" class="font-bold">
overwrite {{ cellsOverwritten }} {{ cellsOverwritten === 1 ? 'cell' : 'cells' }} in {{ rowsUpdated }}
{{ rowsUpdated === 1 ? 'record' : 'records' }}
</span>
<template v-if="newRows">
<template v-if="cellsOverwritten || rowsUpdated"> and </template>
<span class="font-bold"> insert {{ newRows }} additional {{ newRows === 1 ? 'record' : 'records' }} </span>
</template>
<template v-if="newColumns">
<template v-if="cellsOverwritten || rowsUpdated || newRows"> and </template>
<span class="font-bold"> insert {{ newColumns }} additional {{ newColumns === 1 ? 'field' : 'fields' }} </span>
</template>
</div>
<a-radio-group v-if="(newRows ?? 0) > 0" v-model:value="expand">
<a-radio
data-testid="nc-table-expand-yes"
:style="{
display: 'flex',
height: '30px',
lineHeight: '30px',
}"
:value="true"
>
<div class="nc-content-gray">
<span class="font-semibold"> Expand </span>
table to accommodate all pasted cells
</div>
</a-radio>
<a-radio
data-testid="nc-table-expand-no"
:style="{
display: 'flex',
lineHeight: '30px',
}"
:value="false"
>
<div class="nc-content-gray leading-5">
<span class="font-semibold"> Don't expand </span>
the table. Values beyond the table's current size will be skipped.
</div>
</a-radio>
</a-radio-group>
</div>
<div class="flex flex-row mt-5 justify-end gap-x-2">
<div class="flex gap-2 items-center">
<NcButton data-testid="nc-table-expand-cancel" type="secondary" size="small" @click="close">
{{ $t('labels.cancel') }}
</NcButton>
</div>
<div class="flex gap-2 items-center">
<NcButton data-testid="nc-table-expand" type="primary" size="small" @click="updateExpand">
{{ $t('labels.continue') }}
</NcButton>
</div>
</div>
</NcModal>
</template>
<style scoped lang="scss"></style>

62
packages/nc-gui/components/smartsheet/grid/InfiniteTable.vue

@ -39,6 +39,13 @@ const props = defineProps<{
metas?: { metaValue?: TableType; viewMetaValue?: ViewType },
undo?: boolean,
) => Promise<void>
bulkUpsertRows?: (
insertRows: Row[],
updateRows: [],
props: string[],
metas?: { metaValue?: TableType; viewMetaValue?: ViewType },
newColumns?: Partial<ColumnType>[],
) => Promise<void>
expandForm?: (row: Row, state?: Record<string, any>, fromToolbar?: boolean) => void
removeRowIfNew?: (row: Row) => void
rowSortRequiredRows: Row[]
@ -47,6 +54,7 @@ const props = defineProps<{
syncCount: () => Promise<void>
selectedRows: Array<Row>
chunkStates: Array<'loading' | 'loaded' | undefined>
isBulkOperationInProgress: boolean
}>()
const emits = defineEmits(['bulkUpdateDlg'])
@ -60,6 +68,7 @@ const {
clearCache,
syncCount,
bulkUpdateRows,
bulkUpsertRows,
deleteRangeOfRows,
removeRowIfNew,
clearInvalidRows,
@ -193,6 +202,8 @@ const totalRows = toRef(props, 'totalRows')
const chunkStates = toRef(props, 'chunkStates')
const isBulkOperationInProgress = toRef(props, 'isBulkOperationInProgress')
const rowHeight = computed(() => rowHeightInPx[`${props.rowHeightEnum}`])
const rowSlice = reactive({
@ -645,6 +656,45 @@ const onActiveCellChanged = () => {
}
}
const isOpen = ref(false)
async function expandRows({
newRows,
newColumns,
cellsOverwritten,
rowsUpdated,
}: {
newRows: number
newColumns: number
cellsOverwritten: number
rowsUpdated: number
}) {
isOpen.value = true
const options = {
continue: false,
expand: true,
}
const { close } = useDialog(resolveComponent('DlgExpandTable'), {
'modelValue': isOpen,
'newRows': newRows,
'newColumns': newColumns,
'cellsOverwritten': cellsOverwritten,
'rowsUpdated': rowsUpdated,
'onUpdate:expand': closeDialog,
'onUpdate:modelValue': closeDlg,
})
function closeDlg() {
isOpen.value = false
close(1000)
}
async function closeDialog(expand: boolean) {
options.continue = true
options.expand = expand
close(1000)
}
await until(isOpen).toBe(false)
return options
}
const {
selectRangeMap,
fillRangeMap,
@ -671,6 +721,7 @@ const {
clearSelectedRangeOfCells,
makeEditable,
scrollToCell,
expandRows,
(e: KeyboardEvent) => {
const activeDropdownEl = document.querySelector(
'.nc-dropdown-single-select-cell.active,.nc-dropdown-multi-select-cell.active',
@ -851,6 +902,7 @@ const {
await updateOrSaveRow?.(rowObj, ctx.updatedColumnTitle || columnObj.title)
},
bulkUpdateRows,
bulkUpsertRows,
fillHandle,
view,
undefined,
@ -1603,8 +1655,6 @@ const maxGridHeight = computed(() => {
return totalRows.value * (isMobileMode.value ? 56 : rowHeight.value)
})
const startRowHeight = computed(() => `${rowSlice.start * rowHeight.value}px`)
const { width, height } = useWindowSize()
watch(
@ -1638,6 +1688,12 @@ watch(
class="border-r-1 border-l-1 border-gray-200 h-full"
></div>
</div>
<div
v-if="isBulkOperationInProgress"
class="absolute h-full flex items-center justify-center z-70 w-full inset-0 bg-white/50"
>
<GeneralLoader size="regular" />
</div>
<div ref="gridWrapper" class="nc-grid-wrapper min-h-0 flex-1 relative !overflow-auto">
<NcDropdown
@ -1907,7 +1963,7 @@ watch(
<div
class="table-overlay"
:style="{
height: `${maxGridHeight + 256}px`,
height: isBulkOperationInProgress ? '100%' : `${maxGridHeight + 256}px`,
width: `${maxGridWidth}px`,
}"
>

4
packages/nc-gui/components/smartsheet/grid/Table.vue

@ -593,6 +593,7 @@ const {
clearSelectedRangeOfCells,
makeEditable,
scrollToCell,
undefined,
async (e: KeyboardEvent) => {
// ignore navigating if single/multi select options is open
const activeDropdownEl = document.querySelector(
@ -770,6 +771,7 @@ const {
await updateOrSaveRow?.(rowObj, ctx.updatedColumnTitle || columnObj.title)
},
bulkUpdateRows,
undefined,
fillHandle,
view,
paginationDataRef,
@ -787,7 +789,7 @@ async function saveEmptyRow(rowObj: Row) {
await updateOrSaveRow?.(rowObj)
}
function addEmptyRow(row?: number, skipUpdate: boolean = false) {
function addEmptyRow(row?: number, skipUpdate = false) {
const rowObj = callAddEmptyRow?.(row)
if (!skipUpdate && rowObj) {

4
packages/nc-gui/components/smartsheet/grid/index.vue

@ -49,6 +49,7 @@ const {
navigateToSiblingRow,
deleteRangeOfRows,
bulkUpdateRows,
bulkUpsertRows,
syncCount,
totalRows,
syncVisibleData,
@ -59,6 +60,7 @@ const {
clearInvalidRows,
isRowSortRequiredRows,
applySorting,
isBulkOperationInProgress,
} = useGridViewData(meta, view, xWhere, reloadVisibleDataHook)
const rowHeight = computed(() => {
@ -282,6 +284,7 @@ const {
:delete-range-of-rows="deleteRangeOfRows"
:apply-sorting="applySorting"
:bulk-update-rows="bulkUpdateRows"
:bulk-upsert-rows="bulkUpsertRows"
:clear-cache="clearCache"
:clear-invalid-rows="clearInvalidRows"
:data="cachedRows"
@ -293,6 +296,7 @@ const {
:row-height-enum="rowHeight"
:selected-rows="selectedRows"
:row-sort-required-rows="isRowSortRequiredRows"
:is-bulk-operation-in-progress="isBulkOperationInProgress"
@toggle-optimised-query="toggleOptimisedQuery"
@bulk-update-dlg="bulkUpdateDlg = true"
/>

657
packages/nc-gui/composables/useGridViewData.ts

@ -1,7 +1,16 @@
import type { Api, ColumnType, PaginatedType, TableType, ViewType } from 'nocodb-sdk'
import {
type Api,
type ColumnType,
type PaginatedType,
type TableType,
type ViewType,
isCreatedOrLastModifiedByCol,
isCreatedOrLastModifiedTimeCol,
isVirtualCol,
} from 'nocodb-sdk'
import type { ComputedRef, Ref } from 'vue'
import type { EventHook } from '@vueuse/core'
import { NavigateDir, type Row } from '#imports'
import { type CellRange, NavigateDir, type Row } from '#imports'
const formatData = (list: Record<string, any>[], pageInfo: PaginatedType) =>
list.map((row, index) => ({
@ -30,8 +39,14 @@ export function useGridViewData(
const optimisedQuery = useState('optimisedQuery', () => true)
const { getMeta } = useMetas()
const reloadAggregate = inject(ReloadAggregateHookInj)
const router = useRouter()
const { addUndo, clone, defineViewScope } = useUndoRedo()
const route = router.currentRoute
const { appInfo, gridViewPageSize } = useGlobal()
@ -56,6 +71,8 @@ export function useGridViewData(
const routeQuery = computed(() => route.value.query as Record<string, string>)
const isBulkOperationInProgress = ref(false)
const paginationData = computed({
get: () => (isPublic.value ? sharedPaginationData.value : _paginationData.value),
set: (value) => {
@ -73,21 +90,23 @@ export function useGridViewData(
addEmptyRow,
deleteRow,
deleteRowById,
deleteSelectedRows,
deleteRangeOfRows,
updateOrSaveRow,
cachedRows,
clearCache,
totalRows,
bulkUpdateRows,
bulkUpdateView,
removeRowIfNew,
syncCount,
fetchChunk,
fetchMissingChunks,
recoverLTARRefs,
getChunkIndex,
selectedRows,
chunkStates,
isRowSortRequiredRows,
clearInvalidRows,
applySorting,
CHUNK_SIZE,
} = useInfiniteData({
meta,
viewMeta,
@ -268,6 +287,632 @@ export function useGridViewData(
}
}
async function deleteSelectedRows(): Promise<void> {
const removedRowsData: Record<string, any>[] = []
let compositePrimaryKey = ''
isBulkOperationInProgress.value = true
for (const row of selectedRows.value) {
const { row: rowData, rowMeta } = row
if (!rowMeta.selected || rowMeta.new) {
continue
}
const extractedPk = extractPk(meta?.value?.columns as ColumnType[])
const compositePkValue = extractPkFromRow(rowData, meta?.value?.columns as ColumnType[]) as string
const pkData = rowPkData(rowData, meta?.value?.columns as ColumnType[])
if (extractedPk && compositePkValue) {
if (!compositePrimaryKey) compositePrimaryKey = extractedPk
removedRowsData.push({
[compositePrimaryKey]: compositePkValue as string,
pkData,
row: clone(row.row),
rowMeta,
})
}
}
if (!removedRowsData.length) return
try {
const { list } = await $api.dbTableRow.list(NOCO, base?.value.id as string, meta.value?.id as string, {
pks: removedRowsData.map((row) => row[compositePrimaryKey]).join(','),
})
for (const deleteRow of removedRowsData) {
const rowObj = deleteRow.row
const rowPk = rowPkData(rowObj.row, meta.value?.columns as ColumnType[])
const fullRecord = list.find((r: Record<string, any>) => {
return Object.keys(rowPk).every((key) => r[key] === rowPk[key])
})
if (!fullRecord) continue
rowObj.row = clone(fullRecord)
}
await bulkDeleteRows(removedRowsData.map((row) => row.pkData))
} catch (e: any) {
const errorMessage = await extractSdkResponseErrorMsg(e)
isBulkOperationInProgress.value = false
return message.error(`${t('msg.error.deleteRowFailed')}: ${errorMessage}`)
}
await updateCacheAfterDelete(removedRowsData, false)
addUndo({
undo: {
fn: async (removedRowsData: Record<string, any>[]) => {
const rowsToInsert = removedRowsData
.map((row) => {
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[])
row.row = { ...pkData, ...row.row }
return row
})
.reverse()
const insertedRowIds = await bulkInsertRows(rowsToInsert as Row[], undefined, true)
if (Array.isArray(insertedRowIds)) {
await Promise.all(rowsToInsert.map((row, _index) => recoverLTARRefs(row.row)))
}
},
args: [removedRowsData],
},
redo: {
fn: async (toBeRemovedData: Record<string, any>[]) => {
try {
isBulkOperationInProgress.value = true
await bulkDeleteRows(toBeRemovedData.map((row) => row.pkData))
await updateCacheAfterDelete(toBeRemovedData, false)
await syncCount()
} finally {
isBulkOperationInProgress.value = false
}
},
args: [removedRowsData],
},
scope: defineViewScope({ view: viewMeta.value }),
})
isBulkOperationInProgress.value = false
await syncCount()
}
async function bulkInsertRows(
rows: Row[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
undo = false,
): Promise<string[]> {
if (!metaValue || !viewMetaValue) {
throw new Error('Meta value or view meta value is undefined')
}
isBulkOperationInProgress.value = true
const autoGeneratedKeys = new Set(
metaValue.columns
?.filter((c) => !c.pk && (isCreatedOrLastModifiedTimeCol(c) || isCreatedOrLastModifiedByCol(c)))
.map((c) => c.title),
)
try {
const rowsToInsert = await Promise.all(
rows.map(async (currentRow) => {
const { missingRequiredColumns, insertObj } = await populateInsertObject({
meta: metaValue,
ltarState: {},
getMeta,
row: currentRow.row,
undo,
})
if (missingRequiredColumns.size === 0) {
for (const key of autoGeneratedKeys) {
delete insertObj[key!]
}
return { insertObj, rowIndex: currentRow.rowMeta.rowIndex }
}
return null
}),
)
const validRowsToInsert = rowsToInsert.filter(Boolean) as { insertObj: Record<string, any>; rowIndex: number }[]
const bulkInsertedIds = await $api.dbDataTableRow.create(
metaValue.id!,
validRowsToInsert.map((row) => row!.insertObj),
{
viewId: viewMetaValue.id,
},
)
validRowsToInsert.sort((a, b) => (a!.rowIndex ?? 0) - (b!.rowIndex ?? 0))
const newCachedRows = new Map<number, Row>()
for (const [index, row] of cachedRows.value) {
newCachedRows.set(index, { ...row, rowMeta: { ...row.rowMeta, rowIndex: index } })
}
for (const { insertObj, rowIndex } of validRowsToInsert) {
// If there's already a row at this index, shift it and all subsequent rows
if (newCachedRows.has(rowIndex!)) {
const rowsToShift = Array.from(newCachedRows.entries())
.filter(([index]) => index >= rowIndex!)
.sort((a, b) => b[0] - a[0]) // Sort in descending order
for (const [index, row] of rowsToShift) {
const newIndex = index + 1
newCachedRows.set(newIndex, { ...row, rowMeta: { ...row.rowMeta, rowIndex: newIndex } })
}
}
const newRow = {
row: { ...insertObj, id: bulkInsertedIds[validRowsToInsert.indexOf({ insertObj, rowIndex })] },
oldRow: {},
rowMeta: { rowIndex: rowIndex!, new: false },
}
newCachedRows.set(rowIndex!, newRow)
}
cachedRows.value = newCachedRows
totalRows.value += validRowsToInsert.length
await syncCount()
syncVisibleData()
return bulkInsertedIds
} catch (error: any) {
const errorMessage = await extractSdkResponseErrorMsg(error)
message.error(`Failed to bulk insert rows: ${errorMessage}`)
throw error
} finally {
isBulkOperationInProgress.value = false
}
}
async function bulkUpdateRows(
rows: Row[],
props: string[],
{ metaValue = meta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
undo = false,
): Promise<void> {
isBulkOperationInProgress.value = true
await Promise.all(
rows.map(async (row) => {
if (row.rowMeta) {
row.rowMeta.changed = false
await until(() => !(row.rowMeta?.new && row.rowMeta?.saving)).toMatch((v) => v)
row.rowMeta.saving = true
}
}),
)
const pksIndex = [] as { pk: string; rowIndex: number }[]
const updateArray = rows.map((row) => {
const pk = rowPkData(row.row, metaValue?.columns as ColumnType[])
const updateData = props.reduce((acc, prop) => ({ ...acc, [prop]: row.row[prop] }), {})
pksIndex.push({
pk: extractPkFromRow(row.row, metaValue?.columns as ColumnType[]) as string,
rowIndex: row.rowMeta.rowIndex!,
})
return { ...updateData, ...pk }
})
try {
const newRows = (await $api.dbTableRow.bulkUpdate(
NOCO,
metaValue?.base_id as string,
metaValue?.id as string,
updateArray,
)) as Record<string, any>
reloadAggregate?.trigger({ fields: props.map((p) => ({ title: p })) })
newRows.forEach((newRow: Record<string, any>) => {
const pk = extractPkFromRow(newRow, metaValue?.columns as ColumnType[])
const rowIndex = pksIndex.find((pkIndex) => pkIndex.pk === pk)?.rowIndex
if (rowIndex) {
const row = cachedRows.value.get(rowIndex)
if (row) {
row.rowMeta.saving = false
row.row = newRow
cachedRows.value.set(rowIndex, row)
}
}
})
} catch (e) {
message.error(await extractSdkResponseErrorMsg(e))
isBulkOperationInProgress.value = false
return
} finally {
rows.forEach((row) => {
if (row.rowMeta) row.rowMeta.saving = false
})
}
syncVisibleData()
if (!undo) {
addUndo({
undo: {
fn: async (undoRows: Row[], props: string[]) => {
await bulkUpdateRows(
undoRows.map((r) => ({
...r,
row: r.oldRow,
oldRow: r.row,
})),
props,
undefined,
true,
)
},
args: [clone(rows), props],
},
redo: {
fn: async (redoRows: Row[], props: string[]) => {
await bulkUpdateRows(redoRows, props, undefined, true)
},
args: [clone(rows), props],
},
scope: defineViewScope({ view: viewMeta.value }),
})
}
applySorting(rows)
isBulkOperationInProgress.value = false
}
async function bulkUpsertRows(
insertRows: Row[],
updateRows: Row[],
props: string[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
columns: Partial<ColumnType>[],
undo = false,
) {
try {
isBulkOperationInProgress.value = true
const newCols = (meta.value.columns ?? []).filter((col: ColumnType) => columns.some((c) => c.title === col.title))
const rowsToFetch = updateRows.filter((row) => !cachedRows.value.has(row.rowMeta.rowIndex!))
const chunksToFetch = new Set(rowsToFetch.map((row) => Math.floor(row.rowMeta.rowIndex! / CHUNK_SIZE)))
await Promise.all(Array.from(chunksToFetch).map((chunkId) => fetchChunk(chunkId)))
const getPk = (row: Row) => extractPkFromRow(row.row, metaValue?.columns as ColumnType[])
const ogUpdateRows = updateRows.map((_row) => {
const row = _row ?? cachedRows.value.get((_row as Row).rowMeta.rowIndex!)
newCols.forEach((col: ColumnType) => {
row.oldRow[col.title!] = undefined
})
return clone(row)
})
const cleanRow = (row: any) => {
const cleanedRow = { ...row }
metaValue?.columns?.forEach((col) => {
if (col.system || isVirtualCol(col)) delete cleanedRow[col.title!]
})
return cleanedRow
}
updateRows = updateRows.map((row) => {
const cachedRow = cachedRows.value.get(row.rowMeta.rowIndex!)
if (cachedRow) {
return {
...cachedRow,
row: { ...cachedRow.row, ...row.row },
oldRow: cachedRow.row,
}
}
return row
})
const bulkUpsertedRows = await $api.dbTableRow.bulkUpsert(
NOCO,
base.value?.id as string,
metaValue?.id as string,
[...insertRows.map((row) => cleanRow(row.row)), ...updateRows.map((row) => cleanRow(row.row))],
{},
)
const existingPks = new Set(Array.from(cachedRows.value.values()).map((row) => getPk(row)))
const [insertedRows, updatedRows] = bulkUpsertedRows.reduce(
([inserted, updated], row) => {
const isPkExisting = existingPks.has(extractPkFromRow(row, metaValue?.columns as ColumnType[]))
return isPkExisting
? [inserted, [...updated, { row, rowMeta: {}, oldRow: row }]]
: [[...inserted, { row, rowMeta: {}, oldRow: {} }], updated]
},
[[], []] as [Row[], Row[]],
)
insertedRows.forEach((row: Row, index: number) => {
const newIndex = totalRows.value + index
row.rowMeta.rowIndex = newIndex
cachedRows.value.set(newIndex, { ...row, rowMeta: { ...row.rowMeta, rowIndex: newIndex } })
})
updatedRows.forEach((row: Row) => {
const existingRow = Array.from(cachedRows.value.entries()).find(([_, r]) => getPk(r) === getPk(row))
if (existingRow) {
cachedRows.value.set(existingRow[0], { ...row, rowMeta: { ...row.rowMeta, rowIndex: existingRow[0] } })
}
})
totalRows.value += insertedRows.length
if (!undo) {
addUndo({
undo: {
fn: async (insertedRows: Row[], ogUpdateRows: Row[]) => {
try {
isBulkOperationInProgress.value = true
await bulkDeleteRows(
insertedRows.map((row) => rowPkData(row.row, metaValue?.columns as ColumnType[]) as Record<string, any>),
)
await bulkUpdateRows(
ogUpdateRows.map((r) => ({
...r,
row: r.oldRow,
oldRow: r.row,
})),
props,
{ metaValue },
true,
)
isBulkOperationInProgress.value = true
const columnsHash = (await $api.dbTableColumn.hash(meta.value?.id)).hash
await $api.dbTableColumn.bulk(meta.value?.id, {
hash: columnsHash,
ops: newCols.map((col: ColumnType) => ({
op: 'delete',
column: col,
})),
})
insertedRows.forEach((row) => {
cachedRows.value.delete(row.rowMeta.rowIndex!)
})
totalRows.value = totalRows.value - insertedRows.length
syncVisibleData?.()
await getMeta(meta.value?.id as string, true)
} catch (e) {
} finally {
isBulkOperationInProgress.value = false
}
},
args: [clone(insertedRows), clone(ogUpdateRows)],
},
redo: {
fn: async (insertRows: Row[], updateRows: Row[]) => {
try {
isBulkOperationInProgress.value = true
const columnsHash = (await $api.dbTableColumn.hash(meta.value?.id)).hash
await $api.dbTableColumn.bulk(meta.value?.id, {
hash: columnsHash,
ops: newCols.map((col: ColumnType) => ({
op: 'add',
column: col,
})),
})
await bulkUpsertRows(insertRows, updateRows, props, { metaValue, viewMetaValue }, columns, true)
isBulkOperationInProgress.value = true
await getMeta(meta.value?.id as string, true)
syncVisibleData?.()
} finally {
isBulkOperationInProgress.value = false
}
},
args: [clone(insertedRows), clone(updatedRows)],
},
scope: defineViewScope({ view: viewMeta.value }),
})
}
syncVisibleData?.()
await syncCount()
} catch (error: any) {
message.error(await extractSdkResponseErrorMsg(error))
} finally {
isBulkOperationInProgress.value = false
}
}
async function updateCacheAfterDelete(rowsToDelete: Record<string, any>[], nested = true): Promise<void> {
const maxCachedIndex = Math.max(...cachedRows.value.keys())
const newCachedRows = new Map<number, Row>()
const deleteSet = new Set(rowsToDelete.map((row) => (nested ? row.row : row).rowMeta.rowIndex))
let deletionCount = 0
let lastIndex = -1
for (let i = 0; i <= maxCachedIndex + 1; i++) {
if (deleteSet.has(i)) {
deletionCount++
continue
}
if (cachedRows.value.has(i)) {
const row = cachedRows.value.get(i)
if (row) {
const newIndex = i - deletionCount
if (lastIndex !== -1 && newIndex - lastIndex > 1) {
chunkStates.value[getChunkIndex(lastIndex)] = undefined
}
row.rowMeta.rowIndex = newIndex
newCachedRows.set(newIndex, row)
lastIndex = newIndex
}
}
}
if (lastIndex !== -1) {
chunkStates.value[getChunkIndex(lastIndex)] = undefined
}
cachedRows.value = newCachedRows
totalRows.value = Math.max(0, totalRows.value - rowsToDelete.length)
await syncCount()
syncVisibleData?.()
}
async function deleteRangeOfRows(cellRange: CellRange): Promise<void> {
if (!cellRange._start || !cellRange._end) return
isBulkOperationInProgress.value = true
const start = Math.min(cellRange._start.row, cellRange._end.row)
const end = Math.max(cellRange._start.row, cellRange._end.row)
const rowsToDelete: Record<string, any>[] = []
let compositePrimaryKey = ''
const uncachedRows = Array.from({ length: end - start + 1 }, (_, i) => start + i).filter(
(index) => !cachedRows.value.has(index),
)
if (uncachedRows.length > 0) {
await fetchMissingChunks(uncachedRows[0], uncachedRows[uncachedRows.length - 1])
}
for (let i = start; i <= end; i++) {
const cachedRow = cachedRows.value.get(i)
if (!cachedRow) {
console.warn(`Record at index ${i} not found in local cache`)
continue
}
const { row: rowData, rowMeta } = cachedRow
if (!rowMeta.new) {
const extractedPk = extractPk(meta?.value?.columns as ColumnType[])
const compositePkValue = extractPkFromRow(rowData, meta?.value?.columns as ColumnType[])
const pkData = rowPkData(rowData, meta?.value?.columns as ColumnType[])
if (extractedPk && compositePkValue) {
if (!compositePrimaryKey) compositePrimaryKey = extractedPk
rowsToDelete.push({
[compositePrimaryKey]: compositePkValue,
pkData,
row: { ...cachedRow },
rowIndex: i,
})
}
}
}
if (!rowsToDelete.length) return
const { list } = await $api.dbTableRow.list(NOCO, base?.value.id as string, meta.value?.id as string, {
pks: rowsToDelete.map((row) => row[compositePrimaryKey]).join(','),
})
try {
for (const deleteRow of rowsToDelete) {
const rowObj = deleteRow.row
const rowPk = rowPkData(rowObj.row, meta.value?.columns as ColumnType[])
const fullRecord = list.find((r: Record<string, any>) => {
return Object.keys(rowPk).every((key) => r[key] === rowPk[key])
})
if (!fullRecord) {
console.warn(`Full record not found for row with index ${deleteRow.rowIndex}`)
continue
}
rowObj.row = fullRecord
}
await bulkDeleteRows(rowsToDelete.map((row) => row.pkData))
} catch (e: any) {
const errorMessage = await extractSdkResponseErrorMsg(e)
message.error(`${t('msg.error.deleteRowFailed')}: ${errorMessage}`)
isBulkOperationInProgress.value = false
throw e
}
addUndo({
undo: {
fn: async (deletedRows: Record<string, any>[]) => {
const rowsToInsert = deletedRows
.map((row) => {
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[])
row.row = { ...pkData, ...row.row }
return row
})
.reverse()
const insertedRowIds = await bulkInsertRows(
rowsToInsert.map((row) => row.row),
undefined,
true,
)
if (Array.isArray(insertedRowIds)) {
await Promise.all(rowsToInsert.map((row, _index) => recoverLTARRefs(row.row)))
}
},
args: [rowsToDelete],
},
redo: {
fn: async (rowsToDelete: Record<string, any>[]) => {
await bulkDeleteRows(rowsToDelete.map((row) => row.pkData))
await updateCacheAfterDelete(rowsToDelete)
},
args: [rowsToDelete],
},
scope: defineViewScope({ view: viewMeta.value }),
})
await updateCacheAfterDelete(rowsToDelete)
isBulkOperationInProgress.value = false
}
async function bulkDeleteRows(
rows: Record<string, string>[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
): Promise<any> {
try {
const bulkDeletedRowsData = await $api.dbDataTableRow.delete(metaValue?.id as string, rows.length === 1 ? rows[0] : rows, {
viewId: viewMetaValue?.id as string,
})
reloadAggregate?.trigger()
return rows.length === 1 && bulkDeletedRowsData ? [bulkDeletedRowsData] : bulkDeletedRowsData
} catch (error: any) {
const errorMessage = await extractSdkResponseErrorMsg(error)
message.error(`Bulk delete failed: ${errorMessage}`)
}
}
return {
cachedRows,
loadData,
@ -282,6 +927,7 @@ export function useGridViewData(
deleteRangeOfRows,
updateOrSaveRow,
bulkUpdateRows,
bulkUpsertRows,
bulkUpdateView,
loadAggCommentsCount,
syncCount,
@ -299,5 +945,6 @@ export function useGridViewData(
clearInvalidRows,
applySorting,
isRowSortRequiredRows,
isBulkOperationInProgress,
}
}

448
packages/nc-gui/composables/useInfiniteData.ts

@ -8,12 +8,9 @@ import {
UITypes,
type ViewType,
extractFilterFromXwhere,
isCreatedOrLastModifiedByCol,
isCreatedOrLastModifiedTimeCol,
} from 'nocodb-sdk'
import type { Row } from '../lib/types'
import { validateRowFilters } from '../utils/dataUtils'
import type { CellRange } from './useMultiSelect/cellRange'
export function useInfiniteData(args: {
meta: Ref<TableType | undefined> | ComputedRef<TableType | undefined>
@ -151,6 +148,17 @@ export function useInfiniteData(args: {
)
}
const fetchMissingChunks = async (startIndex: number, endIndex: number) => {
const firstChunkId = Math.floor(startIndex / CHUNK_SIZE)
const lastChunkId = Math.floor(endIndex / CHUNK_SIZE)
const chunksToFetch = Array.from({ length: lastChunkId - firstChunkId + 1 }, (_, i) => firstChunkId + i).filter(
(chunkId) => !chunkStates.value[chunkId],
)
await Promise.all(chunksToFetch.map(fetchChunk))
}
const selectedRows = computed<Row[]>(() => {
return Array.from(cachedRows.value.values()).filter((row) => row.rowMeta?.selected)
})
@ -297,7 +305,7 @@ export function useInfiniteData(args: {
.map(([index, row]) => ({
currentIndex: index,
row,
pk: extractPkFromRow(row.row, meta.value?.columns),
pk: extractPkFromRow(row.row, meta.value?.columns ?? []),
}))
const sortedRangeEntries = rangeEntries.sort((a, b) => {
@ -318,8 +326,7 @@ export function useInfiniteData(args: {
return a.currentIndex - b.currentIndex
})
const entry = sortedRangeEntries.find((e) => e.pk === extractPkFromRow(inputRow.row, meta.value?.columns))
const entry = sortedRangeEntries.find((e) => e.pk === extractPkFromRow(inputRow.row, meta.value?.columns ?? []))
if (!entry) return
const targetIndex = sourceRange.start + sortedRangeEntries.indexOf(entry)
@ -540,94 +547,6 @@ export function useInfiniteData(args: {
}
}
async function deleteSelectedRows(): Promise<void> {
const removedRowsData: Record<string, any>[] = []
let compositePrimaryKey = ''
for (const row of selectedRows.value) {
const { row: rowData, rowMeta } = row
if (!rowMeta.selected || rowMeta.new) {
continue
}
const extractedPk = extractPk(meta?.value?.columns as ColumnType[])
const compositePkValue = extractPkFromRow(rowData, meta?.value?.columns as ColumnType[]) as string
const pkData = rowPkData(rowData, meta?.value?.columns as ColumnType[])
if (extractedPk && compositePkValue) {
if (!compositePrimaryKey) compositePrimaryKey = extractedPk
removedRowsData.push({
[compositePrimaryKey]: compositePkValue as string,
pkData,
row: clone(row.row),
rowMeta,
})
}
}
if (!removedRowsData.length) return
try {
const { list } = await $api.dbTableRow.list(NOCO, base?.value.id as string, meta.value?.id as string, {
pks: removedRowsData.map((row) => row[compositePrimaryKey]).join(','),
})
for (const deleteRow of removedRowsData) {
const rowObj = deleteRow.row
const rowPk = rowPkData(rowObj.row, meta.value?.columns as ColumnType[])
const fullRecord = list.find((r: Record<string, any>) => {
return Object.keys(rowPk).every((key) => r[key] === rowPk[key])
})
if (!fullRecord) continue
rowObj.row = clone(fullRecord)
}
await bulkDeleteRows(removedRowsData.map((row) => row.pkData))
} catch (e: any) {
const errorMessage = await extractSdkResponseErrorMsg(e)
return message.error(`${t('msg.error.deleteRowFailed')}: ${errorMessage}`)
}
await updateCacheAfterDelete(removedRowsData, false)
addUndo({
undo: {
fn: async (removedRowsData: Record<string, any>[]) => {
const rowsToInsert = removedRowsData
.map((row) => {
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[])
row.row = { ...pkData, ...row.row }
return row
})
.reverse()
const insertedRowIds = await bulkInsertRows(rowsToInsert as Row[], undefined, true)
if (Array.isArray(insertedRowIds)) {
await Promise.all(rowsToInsert.map((row, _index) => recoverLTARRefs(row.row)))
}
},
args: [removedRowsData],
},
redo: {
fn: async (toBeRemovedData: Record<string, any>[]) => {
await bulkDeleteRows(toBeRemovedData.map((row) => row.pkData))
await updateCacheAfterDelete(toBeRemovedData, false)
await syncCount()
},
args: [removedRowsData],
},
scope: defineViewScope({ view: viewMeta.value }),
})
await syncCount()
}
async function insertRow(
currentRow: Row,
ltarState: Record<string, any> = {},
@ -772,96 +691,6 @@ export function useInfiniteData(args: {
}
}
async function bulkInsertRows(
rows: Row[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
undo = false,
): Promise<string[]> {
if (!metaValue || !viewMetaValue) {
throw new Error('Meta value or view meta value is undefined')
}
const autoGeneratedKeys = new Set(
metaValue.columns
?.filter((c) => !c.pk && (isCreatedOrLastModifiedTimeCol(c) || isCreatedOrLastModifiedByCol(c)))
.map((c) => c.title),
)
try {
const rowsToInsert = await Promise.all(
rows.map(async (currentRow) => {
const { missingRequiredColumns, insertObj } = await populateInsertObject({
meta: metaValue,
ltarState: {},
getMeta,
row: currentRow.row,
undo,
})
if (missingRequiredColumns.size === 0) {
for (const key of autoGeneratedKeys) {
delete insertObj[key!]
}
return { insertObj, rowIndex: currentRow.rowMeta.rowIndex }
}
return null
}),
)
const validRowsToInsert = rowsToInsert.filter(Boolean) as { insertObj: Record<string, any>; rowIndex: number }[]
const bulkInsertedIds = await $api.dbDataTableRow.create(
metaValue.id!,
validRowsToInsert.map((row) => row!.insertObj),
{
viewId: viewMetaValue.id,
},
)
validRowsToInsert.sort((a, b) => (a!.rowIndex ?? 0) - (b!.rowIndex ?? 0))
const newCachedRows = new Map<number, Row>()
for (const [index, row] of cachedRows.value) {
newCachedRows.set(index, { ...row, rowMeta: { ...row.rowMeta, rowIndex: index } })
}
for (const { insertObj, rowIndex } of validRowsToInsert) {
// If there's already a row at this index, shift it and all subsequent rows
if (newCachedRows.has(rowIndex!)) {
const rowsToShift = Array.from(newCachedRows.entries())
.filter(([index]) => index >= rowIndex!)
.sort((a, b) => b[0] - a[0]) // Sort in descending order
for (const [index, row] of rowsToShift) {
const newIndex = index + 1
newCachedRows.set(newIndex, { ...row, rowMeta: { ...row.rowMeta, rowIndex: newIndex } })
}
}
const newRow = {
row: { ...insertObj, id: bulkInsertedIds[validRowsToInsert.indexOf({ insertObj, rowIndex })] },
oldRow: {},
rowMeta: { rowIndex: rowIndex!, new: false },
}
newCachedRows.set(rowIndex!, newRow)
}
cachedRows.value = newCachedRows
totalRows.value += validRowsToInsert.length
await syncCount()
callbacks?.syncVisibleData?.()
return bulkInsertedIds
} catch (error: any) {
const errorMessage = await extractSdkResponseErrorMsg(error)
message.error(`Failed to bulk insert rows: ${errorMessage}`)
throw error
}
}
async function updateRowProperty(
toUpdate: Row,
property: string,
@ -991,7 +820,7 @@ export function useInfiniteData(args: {
[...allFilters.value, ...computedWhereFilter.value],
data,
meta.value?.columns as ColumnType[],
getBaseType(viewMeta.value?.view.source_id),
getBaseType(viewMeta.value?.view?.source_id),
)
const changedFields = property ? [property] : Object.keys(row.row)
@ -1025,67 +854,6 @@ export function useInfiniteData(args: {
}
}
async function bulkUpdateRows(
rows: Row[],
props: string[],
{ metaValue = meta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
undo = false,
): Promise<void> {
await Promise.all(
rows.map(async (row) => {
if (row.rowMeta) {
row.rowMeta.changed = false
await until(() => !(row.rowMeta?.new && row.rowMeta?.saving)).toMatch((v) => v)
row.rowMeta.saving = true
}
}),
)
const updateArray = rows.map((row) => {
const pk = rowPkData(row.row, metaValue?.columns as ColumnType[])
const updateData = props.reduce((acc, prop) => ({ ...acc, [prop]: row.row[prop] }), {})
return { ...updateData, ...pk }
})
try {
await $api.dbTableRow.bulkUpdate(NOCO, metaValue?.base_id as string, metaValue?.id as string, updateArray)
reloadAggregate?.trigger({ fields: props.map((p) => ({ title: p })) })
// Update cachedRows with the updated data
rows.forEach((row) => {
if (row.rowMeta.rowIndex !== undefined) {
cachedRows.value.set(row.rowMeta.rowIndex, row)
}
})
} finally {
rows.forEach((row) => {
if (row.rowMeta) row.rowMeta.saving = false
})
}
callbacks?.syncVisibleData?.()
if (!undo) {
addUndo({
undo: {
fn: async (undoRows: Row[], props: string[]) => {
await bulkUpdateRows(undoRows, props, undefined, true)
},
args: [clone(rows.map((row) => ({ row: row.oldRow, oldRow: row.row, rowMeta: row.rowMeta }))), props],
},
redo: {
fn: async (redoRows: Row[], props: string[]) => {
await bulkUpdateRows(redoRows, props, undefined, true)
},
args: [clone(rows), props],
},
scope: defineViewScope({ view: viewMeta.value }),
})
}
applySorting(rows)
}
async function bulkUpdateView(
data: Record<string, any>[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
@ -1136,184 +904,6 @@ export function useInfiniteData(args: {
return false
}
}
const fetchMissingChunks = async (startIndex: number, endIndex: number) => {
const firstChunkId = getChunkIndex(startIndex)
const lastChunkId = getChunkIndex(endIndex)
const chunksToFetch = Array.from({ length: lastChunkId - firstChunkId + 1 }, (_, i) => firstChunkId + i).filter(
(chunkId) => !chunkStates.value[chunkId],
)
await Promise.all(chunksToFetch.map(fetchChunk))
}
async function updateCacheAfterDelete(rowsToDelete: Record<string, any>[], nested = true): Promise<void> {
const maxCachedIndex = Math.max(...cachedRows.value.keys())
const newCachedRows = new Map<number, Row>()
const deleteSet = new Set(rowsToDelete.map((row) => (nested ? row.row : row).rowMeta.rowIndex))
let deletionCount = 0
let lastIndex = -1
for (let i = 0; i <= maxCachedIndex + 1; i++) {
if (deleteSet.has(i)) {
deletionCount++
continue
}
if (cachedRows.value.has(i)) {
const row = cachedRows.value.get(i)
if (row) {
const newIndex = i - deletionCount
if (lastIndex !== -1 && newIndex - lastIndex > 1) {
chunkStates.value[getChunkIndex(lastIndex)] = undefined
}
row.rowMeta.rowIndex = newIndex
newCachedRows.set(newIndex, row)
lastIndex = newIndex
}
}
}
if (lastIndex !== -1) {
chunkStates.value[getChunkIndex(lastIndex)] = undefined
}
cachedRows.value = newCachedRows
totalRows.value = Math.max(0, totalRows.value - rowsToDelete.length)
await syncCount()
callbacks?.syncVisibleData?.()
}
async function deleteRangeOfRows(cellRange: CellRange): Promise<void> {
if (!cellRange._start || !cellRange._end) return
const start = Math.min(cellRange._start.row, cellRange._end.row)
const end = Math.max(cellRange._start.row, cellRange._end.row)
const rowsToDelete: Record<string, any>[] = []
let compositePrimaryKey = ''
const uncachedRows = Array.from({ length: end - start + 1 }, (_, i) => start + i).filter(
(index) => !cachedRows.value.has(index),
)
if (uncachedRows.length > 0) {
await fetchMissingChunks(uncachedRows[0], uncachedRows[uncachedRows.length - 1])
}
for (let i = start; i <= end; i++) {
const cachedRow = cachedRows.value.get(i)
if (!cachedRow) {
console.warn(`Record at index ${i} not found in local cache`)
continue
}
const { row: rowData, rowMeta } = cachedRow
if (!rowMeta.new) {
const extractedPk = extractPk(meta?.value?.columns as ColumnType[])
const compositePkValue = extractPkFromRow(rowData, meta?.value?.columns as ColumnType[])
const pkData = rowPkData(rowData, meta?.value?.columns as ColumnType[])
if (extractedPk && compositePkValue) {
if (!compositePrimaryKey) compositePrimaryKey = extractedPk
rowsToDelete.push({
[compositePrimaryKey]: compositePkValue,
pkData,
row: { ...cachedRow },
rowIndex: i,
})
}
}
}
if (!rowsToDelete.length) return
const { list } = await $api.dbTableRow.list(NOCO, base?.value.id as string, meta.value?.id as string, {
pks: rowsToDelete.map((row) => row[compositePrimaryKey]).join(','),
})
try {
for (const deleteRow of rowsToDelete) {
const rowObj = deleteRow.row
const rowPk = rowPkData(rowObj.row, meta.value?.columns as ColumnType[])
const fullRecord = list.find((r: Record<string, any>) => {
return Object.keys(rowPk).every((key) => r[key] === rowPk[key])
})
if (!fullRecord) {
console.warn(`Full record not found for row with index ${deleteRow.rowIndex}`)
continue
}
rowObj.row = fullRecord
}
await bulkDeleteRows(rowsToDelete.map((row) => row.pkData))
} catch (e: any) {
const errorMessage = await extractSdkResponseErrorMsg(e)
message.error(`${t('msg.error.deleteRowFailed')}: ${errorMessage}`)
throw e
}
addUndo({
undo: {
fn: async (deletedRows: Record<string, any>[]) => {
const rowsToInsert = deletedRows
.map((row) => {
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[])
row.row = { ...pkData, ...row.row }
return row
})
.reverse()
const insertedRowIds = await bulkInsertRows(
rowsToInsert.map((row) => row.row),
undefined,
true,
)
if (Array.isArray(insertedRowIds)) {
await Promise.all(rowsToInsert.map((row, _index) => recoverLTARRefs(row.row)))
}
},
args: [rowsToDelete],
},
redo: {
fn: async (rowsToDelete: Record<string, any>[]) => {
await bulkDeleteRows(rowsToDelete.map((row) => row.pkData))
await updateCacheAfterDelete(rowsToDelete)
},
args: [rowsToDelete],
},
scope: defineViewScope({ view: viewMeta.value }),
})
await updateCacheAfterDelete(rowsToDelete)
}
async function bulkDeleteRows(
rows: Record<string, string>[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
): Promise<any> {
try {
const bulkDeletedRowsData = await $api.dbDataTableRow.delete(metaValue?.id as string, rows.length === 1 ? rows[0] : rows, {
viewId: viewMetaValue?.id as string,
})
reloadAggregate?.trigger()
return rows.length === 1 && bulkDeletedRowsData ? [bulkDeletedRowsData] : bulkDeletedRowsData
} catch (error: any) {
const errorMessage = await extractSdkResponseErrorMsg(error)
message.error(`Bulk delete failed: ${errorMessage}`)
}
}
const removeRowIfNew = (row: Row): boolean => {
const index = Array.from(cachedRows.value.entries()).find(([_, r]) => r.rowMeta.rowIndex === row.rowMeta.rowIndex)?.[0]
@ -1350,15 +940,14 @@ export function useInfiniteData(args: {
addEmptyRow,
deleteRow,
deleteRowById,
deleteSelectedRows,
deleteRangeOfRows,
getChunkIndex,
fetchMissingChunks,
fetchChunk,
updateOrSaveRow,
bulkUpdateRows,
bulkUpdateView,
removeRowIfNew,
bulkDeleteRows,
bulkInsertRows,
cachedRows,
recoverLTARRefs,
totalRows,
clearCache,
syncCount,
@ -1367,5 +956,6 @@ export function useInfiniteData(args: {
isRowSortRequiredRows,
clearInvalidRows,
applySorting,
CHUNK_SIZE,
}
}

232
packages/nc-gui/composables/useMultiSelect/index.ts

@ -21,6 +21,8 @@ import {
timeFormats,
} from 'nocodb-sdk'
import { parse } from 'papaparse'
import type { Row } from '../../lib/types'
import { generateUniqueColumnName } from '../../helpers/parsers/parserHelpers'
import type { Cell } from './cellRange'
import { CellRange } from './cellRange'
import convertCellData from './convertCellData'
@ -43,9 +45,35 @@ export function useMultiSelect(
clearSelectedRangeOfCells: Function,
makeEditable: Function,
scrollToCell?: (row?: number | null, col?: number | null, scrollBehaviour?: ScrollBehavior) => void,
expandRows?: ({
newRows,
newColumns,
cellsOverwritten,
rowsUpdated,
}: {
newRows: number
newColumns: number
cellsOverwritten: number
rowsUpdated: number
}) => Promise<{
continue: boolean
expand: boolean
}>,
keyEventHandler?: Function,
syncCellData?: Function,
bulkUpdateRows?: Function,
bulkUpdateRows?: (
rows: Row[],
props: string[],
metas?: { metaValue?: TableType; viewMetaValue?: ViewType },
undo?: boolean,
) => Promise<void>,
bulkUpsertRows?: (
insertRows: Row[],
updateRows: Row[],
props: string[],
metas?: { metaValue?: TableType; viewMetaValue?: ViewType },
newColumns?: Partial<ColumnType>[],
) => Promise<void>,
fillHandle?: MaybeRef<HTMLElement | undefined>,
view?: MaybeRef<ViewType | undefined>,
paginationData?: MaybeRef<PaginatedType | undefined>,
@ -923,71 +951,185 @@ export function useMultiSelect(
const selectionRowCount = Math.max(clipboardMatrix.length, selectedRange.end.row - selectedRange.start.row + 1)
const pasteMatrixRows = selectionRowCount
const pasteMatrixCols = clipboardMatrix[0].length
const colsToPaste = unref(fields).slice(activeCell.col, activeCell.col + pasteMatrixCols)
let rowsToPaste
const existingFields = unref(fields)
const startColIndex = activeCell.col
const existingColCount = existingFields.length - startColIndex
const newColsNeeded = Math.max(0, pasteMatrixCols - existingColCount)
let tempTotalRows = 0
let totalRowsBeforeActiveCell
let availableRowsToUpdate
let rowsToAdd
if (isArrayStructure) {
rowsToPaste = (unref(data) as Row[]).slice(activeCell.row, activeCell.row + selectionRowCount)
const { totalRows: _tempTr, page = 1, pageSize = 100 } = unref(paginationData)!
tempTotalRows = _tempTr as number
totalRowsBeforeActiveCell = (page - 1) * pageSize + activeCell.row
availableRowsToUpdate = Math.max(0, tempTotalRows - totalRowsBeforeActiveCell)
rowsToAdd = Math.max(0, selectionRowCount - availableRowsToUpdate)
} else {
rowsToPaste = Array.from(unref(data) as Map<number, Row>)
.filter(([index]) => index >= activeCell.row! && index < activeCell.row! + selectionRowCount)
.map(([, row]) => row)
tempTotalRows = unref(_totalRows) as number
totalRowsBeforeActiveCell = activeCell.row
availableRowsToUpdate = Math.max(0, tempTotalRows - totalRowsBeforeActiveCell)
rowsToAdd = Math.max(0, selectionRowCount - availableRowsToUpdate)
}
const propsToPaste: string[] = []
let options = {
continue: false,
expand: (rowsToAdd > 0 || newColsNeeded > 0) && !isArrayStructure,
}
if (options.expand && !isArrayStructure) {
options = await expandRows?.({
newRows: rowsToAdd,
newColumns: newColsNeeded,
cellsOverwritten: Math.min(availableRowsToUpdate, selectionRowCount) * (pasteMatrixCols - newColsNeeded),
rowsUpdated: Math.min(availableRowsToUpdate, selectionRowCount),
})
if (!options.continue) return
}
let pastedRows = 0
let isInfoShown = false
let colsToPaste
const bulkOpsCols = []
if (options.expand) {
colsToPaste = existingFields.slice(startColIndex, startColIndex + pasteMatrixCols)
if (newColsNeeded > 0) {
const columnsHash = (await api.dbTableColumn.hash(meta.value?.id)).hash
const columnsLength = meta.value?.columns?.length || 0
for (let i = 0; i < newColsNeeded; i++) {
const tempCol = {
uidt: UITypes.SingleLineText,
order: columnsLength + i,
column_order: {
order: columnsLength + i,
view_id: activeView.value?.id,
},
view_id: activeView.value?.id,
table_name: meta.value?.table_name,
}
for (let i = 0; i < pasteMatrixRows; i++) {
const pasteRow = rowsToPaste[i]
const newColTitle = generateUniqueColumnName({
metaColumns: [...(meta.value?.columns ?? []), ...bulkOpsCols.map(({ column }) => column)],
formState: tempCol,
})
// TODO handle insert new row
if (!pasteRow || pasteRow.rowMeta.new) break
bulkOpsCols.push({
op: 'add',
column: {
...tempCol,
title: newColTitle,
},
})
}
pastedRows++
await api.dbTableColumn.bulk(meta.value?.id, {
hash: columnsHash,
ops: bulkOpsCols,
})
for (let j = 0; j < pasteMatrixCols; j++) {
const pasteCol = colsToPaste[j]
await getMeta(meta?.value?.id as string, true)
if (!isPasteable(pasteRow, pasteCol)) {
if ((isBt(pasteCol) || isOo(pasteCol) || isMm(pasteCol)) && !isInfoShown) {
message.info(t('msg.info.groupPasteIsNotSupportedOnLinksColumn'))
isInfoShown = true
}
continue
}
colsToPaste = [...colsToPaste, ...bulkOpsCols.map(({ column }) => column)]
}
} else {
colsToPaste = unref(fields).slice(activeCell.col, activeCell.col + pasteMatrixCols)
}
propsToPaste.push(pasteCol.title!)
const dataRef = unref(data)
const pasteValue = convertCellData(
{
// Repeat the clipboard data array if the matrix is smaller than the selection
value: clipboardMatrix[i % clipboardMatrix.length][j],
to: pasteCol.uidt as UITypes,
column: pasteCol,
appInfo: unref(appInfo),
oldValue: pasteCol.uidt === UITypes.Attachment ? pasteRow.row[pasteCol.title!] : undefined,
const updatedRows: Row[] = []
const newRows: Row[] = []
const propsToPaste: string[] = []
let isInfoShown = false
for (let i = 0; i < selectionRowCount; i++) {
const clipboardRowIndex = i % clipboardMatrix.length
let targetRow: any
if (i < availableRowsToUpdate) {
const absoluteRowIndex = totalRowsBeforeActiveCell + i
if (isArrayStructure) {
targetRow =
i < (dataRef as Row[]).length
? (dataRef as Row[])[absoluteRowIndex]
: {
row: {},
oldRow: {},
rowMeta: {
isExistingRow: true,
rowIndex: absoluteRowIndex,
},
}
} else {
targetRow = (dataRef as Map<number, Row>).get(absoluteRowIndex) || {
row: {},
oldRow: {},
rowMeta: {
isExistingRow: true,
rowIndex: absoluteRowIndex,
},
}
}
updatedRows.push(targetRow)
} else {
targetRow = {
row: {},
oldRow: {},
rowMeta: {
isExistingRow: false,
},
isMysql(meta.value?.source_id),
true,
)
}
newRows.push(targetRow)
}
if (pasteValue !== undefined) {
pasteRow.row[pasteCol.title!] = pasteValue
for (let j = 0; j < clipboardMatrix[clipboardRowIndex].length; j++) {
const column = colsToPaste[j]
if (!column) continue
if (column && isPasteable(targetRow, column)) {
propsToPaste.push(column.title!)
const pasteValue = convertCellData(
{
value: clipboardMatrix[clipboardRowIndex][j],
to: column.uidt as UITypes,
column,
appInfo: unref(appInfo),
oldValue: column.uidt === UITypes.Attachment ? targetRow.row[column.title!] : undefined,
},
isMysql(meta.value?.source_id),
true,
)
if (pasteValue !== undefined) {
targetRow.row[column.title!] = pasteValue
}
} else if ((isBt(column) || isOo(column) || isMm(column)) && !isInfoShown) {
message.info(t('msg.info.groupPasteIsNotSupportedOnLinksColumn'))
isInfoShown = true
}
}
}
await bulkUpdateRows?.(rowsToPaste, propsToPaste)
if (pastedRows > 0) {
// highlight the pasted range
selectedRange.startRange({ row: activeCell.row, col: activeCell.col })
selectedRange.endRange({ row: activeCell.row + pastedRows - 1, col: activeCell.col + pasteMatrixCols - 1 })
if (options.expand && !isArrayStructure) {
await bulkUpsertRows?.(
newRows,
updatedRows,
propsToPaste,
undefined,
bulkOpsCols.map(({ column }) => column),
)
clearSelectedRange()
selectedRange.startRange({ row: totalRowsBeforeActiveCell, col: startColIndex })
activeCell.row = totalRowsBeforeActiveCell + selectionRowCount - 1
activeCell.col = startColIndex + pasteMatrixCols
selectedRange.endRange({ row: activeCell.row, col: activeCell.col })
scrollToCell?.()
} else {
await bulkUpdateRows?.(updatedRows, propsToPaste)
}
} else {
if (selectedRange.isSingleCell()) {

1
packages/nc-gui/lang/en.json

@ -627,6 +627,7 @@
"noConditionsAdded": "No conditions added"
},
"labels": {
"continue": "Continue",
"toggleExperimentalFeature": "Enable or disable experimental features with ease, allowing you to explore and evaluate upcoming functionalities.",
"modifiedOn": "Modified on",
"configuration": "Configuration",

18
packages/nocodb/src/controllers/bulk-data-alias.controller.ts

@ -6,6 +6,7 @@ import {
Param,
Patch,
Post,
Put,
Req,
Res,
UseGuards,
@ -114,4 +115,21 @@ export class BulkDataAliasController {
query: req.query,
});
}
@Post(['/api/v1/db/data/bulk/:orgs/:baseName/:tableName/upsert'])
@Acl('bulkDataUpsert')
async bulkDataUpsert(
@TenantContext() context: NcContext,
@Req() req: NcRequest,
@Param('baseName') baseName: string,
@Param('tableName') tableName: string,
@Body() body: any,
) {
return await this.bulkDataAliasService.bulkDataUpsert(context, {
body: body,
cookie: req,
baseName: baseName,
tableName: tableName,
});
}
}

529
packages/nocodb/src/db/BaseModelSqlv2.ts

@ -74,6 +74,7 @@ import generateLookupSelectQuery from '~/db/generateLookupSelectQuery';
import { getAliasGenerator } from '~/utils';
import applyAggregation from '~/db/aggregation';
import { extractMentions } from '~/utils/richTextHelper';
import { chunkArray } from '~/utils/tsUtils';
dayjs.extend(utc);
@ -5268,6 +5269,375 @@ class BaseModelSqlv2 {
return { postInsertOps, preInsertOps };
}
async bulkUpsert(
datas: any[],
{
chunkSize = 100,
cookie,
raw = false,
foreign_key_checks = true,
}: {
chunkSize?: number;
cookie?: any;
raw?: boolean;
foreign_key_checks?: boolean;
} = {},
) {
let trx;
try {
const columns = await this.model.getColumns(this.context);
const insertedDatas = [];
const updatedDatas = [];
const aiPkCol = this.model.primaryKeys.find((pk) => pk.ai);
const agPkCol = this.model.primaryKeys.find((pk) => pk.meta?.ag);
// validate and prepare data
const preparedDatas = raw
? datas
: await Promise.all(
datas.map(async (d) => {
await this.validate(d, columns);
return this.model.mapAliasToColumn(
this.context,
d,
this.clientMeta,
this.dbDriver,
columns,
);
}),
);
const dataWithPks = [];
const dataWithoutPks = [];
for (const data of preparedDatas) {
if (!raw) {
await this.prepareNocoData(data, true, cookie);
}
const pkValues = this.extractPksValues(data);
if (pkValues !== 'N/A' && pkValues !== undefined) {
dataWithPks.push({ pk: pkValues, data });
} else {
// const insertObj = this.handleValidateBulkInsert(data, columns);
dataWithoutPks.push(data);
}
}
trx = await this.dbDriver.transaction();
// Check which records with PKs exist in the database
const existingRecords = await this.chunkList({
pks: dataWithPks.map((v) => v.pk),
});
const existingPkSet = new Set(
existingRecords.map((r) => this.extractPksValues(r, true)),
);
const toInsert = [...dataWithoutPks];
const toUpdate = [];
for (const { pk, data } of dataWithPks) {
if (existingPkSet.has(pk)) {
toUpdate.push(data);
} else {
// const insertObj = this.handleValidateBulkInsert(data, columns);
toInsert.push(data);
}
}
if (toUpdate.length > 0) {
const pks = [];
for (const data of toUpdate) {
if (!raw) await this.validate(data, columns);
const pkValues = this.extractPksValues(data);
pks.push(pkValues);
const wherePk = await this._wherePk(pkValues, true);
await trx(this.tnPath).update(data).where(wherePk);
}
const updatedRecords = await this.chunkList({
pks,
});
updatedDatas.push(...updatedRecords);
}
if (toInsert.length > 0) {
if (!foreign_key_checks) {
if (this.isPg) {
await trx.raw('set session_replication_role to replica;');
} else if (this.isMySQL) {
await trx.raw('SET foreign_key_checks = 0;');
}
}
let responses;
if (this.isSqlite || this.isMySQL) {
responses = [];
for (const insertData of toInsert) {
const query = trx(this.tnPath).insert(insertData);
let id = (await query)[0];
if (agPkCol) {
id = insertData[agPkCol.column_name];
}
responses.push(
this.extractCompositePK({
rowId: id,
ai: aiPkCol,
ag: agPkCol,
insertObj: insertData,
force: true,
}) || insertData,
);
}
} else {
const returningObj: Record<string, string> = {};
for (const col of this.model.primaryKeys) {
returningObj[col.title] = col.column_name;
}
responses =
!raw && (this.isPg || this.isMssql)
? await trx
.batchInsert(this.tnPath, toInsert, chunkSize)
.returning(
this.model.primaryKeys?.length ? returningObj : '*',
)
: await trx.batchInsert(this.tnPath, toInsert, chunkSize);
}
if (!foreign_key_checks) {
if (this.isPg) {
await trx.raw('set session_replication_role to origin;');
} else if (this.isMySQL) {
await trx.raw('SET foreign_key_checks = 1;');
}
}
insertedDatas.push(...responses);
}
await trx.commit();
const insertedDataList =
insertedDatas.length > 0
? await this.chunkList({
pks: insertedDatas.map((d) => this.extractPksValues(d)),
})
: [];
const updatedDataList =
updatedDatas.length > 0
? await this.chunkList({
pks: updatedDatas.map((d) => this.extractPksValues(d)),
})
: [];
if (insertedDatas.length === 1) {
await this.afterInsert(insertedDataList[0], this.dbDriver, cookie);
} else if (insertedDatas.length > 1) {
await this.afterBulkInsert(insertedDataList, this.dbDriver, cookie);
}
if (updatedDataList.length === 1) {
await this.afterUpdate(
existingRecords[0],
updatedDataList[0],
null,
cookie,
datas[0],
);
} else {
await this.afterBulkUpdate(
existingRecords,
updatedDataList,
this.dbDriver,
cookie,
);
}
return [...updatedDataList, ...insertedDataList];
} catch (e) {
await trx?.rollback();
throw e;
}
}
async chunkList(args: { pks: string[]; chunkSize?: number }) {
const { pks, chunkSize = 1000 } = args;
const data = [];
const chunkedPks = chunkArray(pks, chunkSize);
for (const chunk of chunkedPks) {
const chunkData = await this.list(
{
pks: chunk.join(','),
},
{
limitOverride: chunk.length,
ignoreViewFilterAndSort: true,
},
);
data.push(...chunkData);
}
return data;
}
private async handleValidateBulkInsert(
d: Record<string, any>,
columns?: Column[],
params = { allowSystemColumn: false },
) {
const { allowSystemColumn } = params;
const cols = columns || (await this.model.getColumns(this.context));
let insertObj;
for (let i = 0; i < cols.length; ++i) {
const col = cols[i];
if (col.title in d) {
if (
isCreatedOrLastModifiedTimeCol(col) ||
isCreatedOrLastModifiedByCol(col)
) {
NcError.badRequest(
`Column "${col.title}" is auto generated and cannot be updated`,
);
}
if (
col.system &&
!allowSystemColumn &&
col.uidt !== UITypes.ForeignKey
) {
NcError.badRequest(
`Column "${col.title}" is system column and cannot be updated`,
);
}
}
// populate pk columns
if (col.pk) {
if (col.meta?.ag && !d[col.title]) {
d[col.title] = col.meta?.ag === 'nc' ? `rc_${nanoidv2()}` : uuidv4();
}
}
// map alias to column
if (!isVirtualCol(col)) {
let val =
d?.[col.column_name] !== undefined
? d?.[col.column_name]
: d?.[col.title];
if (val !== undefined) {
if (col.uidt === UITypes.Attachment && typeof val !== 'string') {
val = JSON.stringify(val);
}
if (col.uidt === UITypes.DateTime && dayjs(val).isValid()) {
val = this.formatDate(val);
}
insertObj[sanitize(col.column_name)] = val;
}
}
await this.validateOptions(col, insertObj);
// validate data
if (col?.meta?.validate && col?.validate) {
const validate = col.getValidators();
const cn = col.column_name;
const columnTitle = col.title;
if (validate) {
const { func, msg } = validate;
for (let j = 0; j < func.length; ++j) {
const fn =
typeof func[j] === 'string'
? customValidators[func[j]]
? customValidators[func[j]]
: Validator[func[j]]
: func[j];
const columnValue = insertObj?.[cn] || insertObj?.[columnTitle];
const arg =
typeof func[j] === 'string' ? columnValue + '' : columnValue;
if (
![null, undefined, ''].includes(columnValue) &&
!(fn.constructor.name === 'AsyncFunction'
? await fn(arg)
: fn(arg))
) {
NcError.badRequest(
msg[j]
.replace(/\{VALUE}/g, columnValue)
.replace(/\{cn}/g, columnTitle),
);
}
}
}
}
}
return insertObj;
}
// Helper method to format date
private formatDate(val: string): any {
const { isMySQL, isSqlite, isMssql, isPg } = this.clientMeta;
if (val.indexOf('-') < 0 && val.indexOf('+') < 0 && val.slice(-1) !== 'Z') {
// if no timezone is given,
// then append +00:00 to make it as UTC
val += '+00:00';
}
if (isMySQL) {
// first convert the value to utc
// from UI
// e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00
// from API
// e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00
// if timezone info is not found - considered as utc
// e.g. 2022-01-01 20:00:00 -> 2022-01-01 20:00:00
// if timezone info is found
// e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00
// e.g. 2022-01-01 20:00:00+00:00 -> 2022-01-01 20:00:00
// e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00
// then we use CONVERT_TZ to convert that in the db timezone
return this.dbDriver.raw(`CONVERT_TZ(?, '+00:00', @@GLOBAL.time_zone)`, [
dayjs(val).utc().format('YYYY-MM-DD HH:mm:ss'),
]);
} else if (isSqlite) {
// convert to UTC
// e.g. 2022-01-01T10:00:00.000Z -> 2022-01-01 04:30:00+00:00
return dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ');
} else if (isPg) {
// convert to UTC
// e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00
// then convert to db timezone
return this.dbDriver.raw(`? AT TIME ZONE CURRENT_SETTING('timezone')`, [
dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ'),
]);
} else if (isMssql) {
// convert ot UTC
// e.g. 2023-05-10T08:49:32.000Z -> 2023-05-10 08:49:32-08:00
// then convert to db timezone
return this.dbDriver.raw(
`SWITCHOFFSET(CONVERT(datetimeoffset, ?), DATENAME(TzOffset, SYSDATETIMEOFFSET()))`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')],
);
} else {
// e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00
return dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ');
}
}
async bulkInsert(
datas: any[],
{
@ -5292,7 +5662,6 @@ class BaseModelSqlv2 {
) {
let trx;
try {
// TODO: ag column handling for raw bulk insert
const insertDatas = raw ? datas : [];
let postInsertOps: ((rowId: any) => Promise<string>)[] = [];
let preInsertOps: (() => Promise<string>)[] = [];
@ -5304,149 +5673,9 @@ class BaseModelSqlv2 {
const nestedCols = columns.filter((c) => isLinksOrLTAR(c));
for (const d of datas) {
const insertObj = {};
// populate pk, map alias to column, validate data
for (let i = 0; i < this.model.columns.length; ++i) {
const col = this.model.columns[i];
if (col.title in d) {
if (
isCreatedOrLastModifiedTimeCol(col) ||
isCreatedOrLastModifiedByCol(col)
) {
NcError.badRequest(
`Column "${col.title}" is auto generated and cannot be updated`,
);
}
if (
col.system &&
!allowSystemColumn &&
col.uidt !== UITypes.ForeignKey
) {
NcError.badRequest(
`Column "${col.title}" is system column and cannot be updated`,
);
}
}
// populate pk columns
if (col.pk) {
if (col.meta?.ag && !d[col.title]) {
d[col.title] =
col.meta?.ag === 'nc' ? `rc_${nanoidv2()}` : uuidv4();
}
}
// map alias to column
if (!isVirtualCol(col)) {
let val =
d?.[col.column_name] !== undefined
? d?.[col.column_name]
: d?.[col.title];
if (val !== undefined) {
if (
col.uidt === UITypes.Attachment &&
typeof val !== 'string'
) {
val = JSON.stringify(val);
}
if (col.uidt === UITypes.DateTime && dayjs(val).isValid()) {
const { isMySQL, isSqlite, isMssql, isPg } = this.clientMeta;
if (
val.indexOf('-') < 0 &&
val.indexOf('+') < 0 &&
val.slice(-1) !== 'Z'
) {
// if no timezone is given,
// then append +00:00 to make it as UTC
val += '+00:00';
}
if (isMySQL) {
// first convert the value to utc
// from UI
// e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00
// from API
// e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00
// if timezone info is not found - considered as utc
// e.g. 2022-01-01 20:00:00 -> 2022-01-01 20:00:00
// if timezone info is found
// e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00
// e.g. 2022-01-01 20:00:00+00:00 -> 2022-01-01 20:00:00
// e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00
// then we use CONVERT_TZ to convert that in the db timezone
val = this.dbDriver.raw(
`CONVERT_TZ(?, '+00:00', @@GLOBAL.time_zone)`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ss')],
);
} else if (isSqlite) {
// convert to UTC
// e.g. 2022-01-01T10:00:00.000Z -> 2022-01-01 04:30:00+00:00
val = dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ');
} else if (isPg) {
// convert to UTC
// e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00
// then convert to db timezone
val = this.dbDriver.raw(
`? AT TIME ZONE CURRENT_SETTING('timezone')`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')],
);
} else if (isMssql) {
// convert ot UTC
// e.g. 2023-05-10T08:49:32.000Z -> 2023-05-10 08:49:32-08:00
// then convert to db timezone
val = this.dbDriver.raw(
`SWITCHOFFSET(CONVERT(datetimeoffset, ?), DATENAME(TzOffset, SYSDATETIMEOFFSET()))`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')],
);
} else {
// e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00
val = dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ');
}
}
insertObj[sanitize(col.column_name)] = val;
}
}
await this.validateOptions(col, insertObj);
// validate data
if (col?.meta?.validate && col?.validate) {
const validate = col.getValidators();
const cn = col.column_name;
const columnTitle = col.title;
if (validate) {
const { func, msg } = validate;
for (let j = 0; j < func.length; ++j) {
const fn =
typeof func[j] === 'string'
? customValidators[func[j]]
? customValidators[func[j]]
: Validator[func[j]]
: func[j];
const columnValue =
insertObj?.[cn] || insertObj?.[columnTitle];
const arg =
typeof func[j] === 'string'
? columnValue + ''
: columnValue;
if (
![null, undefined, ''].includes(columnValue) &&
!(fn.constructor.name === 'AsyncFunction'
? await fn(arg)
: fn(arg))
) {
NcError.badRequest(
msg[j]
.replace(/\{VALUE}/g, columnValue)
.replace(/\{cn}/g, columnTitle),
);
}
}
}
}
}
const insertObj = await this.handleValidateBulkInsert(d, columns, {
allowSystemColumn,
});
await this.prepareNocoData(insertObj, true, cookie);
@ -5655,15 +5884,9 @@ class BaseModelSqlv2 {
i === updateDatas.length - 1
) {
const tempToRead = pkAndData.splice(0, pkAndData.length);
const oldRecords = await this.list(
{
pks: tempToRead.map((v) => v.pk).join(','),
},
{
limitOverride: tempToRead.length,
ignoreViewFilterAndSort: true,
},
);
const oldRecords = await this.chunkList({
pks: tempToRead.map((v) => v.pk),
});
for (const record of tempToRead) {
const oldRecord = oldRecords.find((r) =>

128
packages/nocodb/src/schema/swagger.json

@ -11539,6 +11539,134 @@
}
}
},
"/api/v1/db/data/bulk/{orgs}/{baseName}/{tableName}/upsert": {
"parameters": [
{
"schema": {
"type": "string"
},
"name": "orgs",
"in": "path",
"required": true,
"description": "Organisation Name. Currently `noco` will be used."
},
{
"schema": {
"type": "string"
},
"name": "baseName",
"in": "path",
"required": true,
"description": "Base Name"
},
{
"schema": {
"type": "string"
},
"name": "tableName",
"in": "path",
"required": true,
"description": "Table Name"
}
],
"post": {
"summary": "Bulk Upsert Table Rows",
"operationId": "db-table-row-bulk-upsert",
"responses": {
"200": {
"description": "OK",
"content": {
"application/json": {
"schema": {
"type": "array",
"description": "List of returned values. 1 means successful. 0 means failed."
},
"examples": {
"Example 1": {
"value": [
{
"Id": 1,
"Title": "abc"
},
{
"Id": 2,
"Title": "def"
}
]
}
}
}
}
},
"400": {
"$ref": "#/components/responses/BadRequest"
}
},
"tags": [
"DB Table Row"
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"type": "array",
"description": "List of data objects",
"items": {
"x-stoplight": {
"id": "7u0mp8nzlvysz"
},
"type": "object"
}
},
"examples": {
"Example 1": {
"value": [
{
"Id": 1,
"Title": "abc"
},
{
"Title": "def"
}
]
}
}
}
},
"description": ""
},
"application/json": {
"schema": {
"type": "array",
"description": "List of data objects",
"items": {
"x-stoplight": {
"id": "7u0mp8nzlvysz"
},
"type": "object"
}
},
"examples": {
"Example 1": {
"value": [
{
"Title": "abc"
},
{
"Title": "def"
}
]
}
}
},
"description": "Bulk upsert table rows in one go.",
"parameters": [
{
"$ref": "#/components/parameters/xc-auth"
}
]
}
},
"/api/v1/db/data/bulk/{orgs}/{baseName}/{tableName}": {
"parameters": [
{

15
packages/nocodb/src/services/bulk-data-alias.service.ts

@ -11,6 +11,7 @@ type BulkOperation =
| 'bulkUpdate'
| 'bulkUpdateAll'
| 'bulkDelete'
| 'bulkUpsert'
| 'bulkDeleteAll';
@Injectable()
@ -126,4 +127,18 @@ export class BulkDataAliasService {
options: [param.query],
});
}
async bulkDataUpsert(
context: NcContext,
param: PathParams & {
body: any;
cookie: any;
},
) {
return await this.executeBulkOperation(context, {
...param,
operation: 'bulkUpsert',
options: [param.body, { cookie: param.cookie }],
});
}
}

18
packages/nocodb/src/utils/tsUtils.ts

@ -0,0 +1,18 @@
/**
* Split array into chunks
* @param array array to split
* @param chunkSize size of each chunk
* @returns
**/
export function chunkArray<K>(
array: Array<K>,
chunkSize: number,
): Array<Array<K>> {
const chunks = [];
for (let i = 0; i < array.length; i += chunkSize) {
chunks.push(array.slice(i, i + chunkSize));
}
return chunks;
}

31
tests/playwright/pages/Dashboard/Grid/ExpandTable.ts

@ -0,0 +1,31 @@
import BasePage from '../../Base';
import { GridPage } from './index';
export class ExpandTablePageObject extends BasePage {
readonly grid: GridPage;
constructor(grid: GridPage) {
super(grid.rootPage);
this.grid = grid;
}
async upsert() {
const expandTableModal = this.rootPage.getByTestId('nc-expand-table-modal');
await expandTableModal.isVisible();
await expandTableModal.getByTestId('nc-table-expand-yes').click();
await this.rootPage.getByTestId('nc-table-expand').click();
}
async updateOnly() {
const expandTableModal = this.rootPage.getByTestId('nc-expand-table-modal');
await expandTableModal.isVisible();
await expandTableModal.getByTestId('nc-table-expand-no').click();
await this.rootPage.getByTestId('nc-table-expand').click();
}
}

3
tests/playwright/pages/Dashboard/Grid/index.ts

@ -14,6 +14,7 @@ import { WorkspaceMenuObject } from '../common/WorkspaceMenu';
import { GroupPageObject } from './Group';
import { ColumnHeaderPageObject } from './columnHeader';
import { AggregaionBarPage } from './AggregationBar';
import { ExpandTablePageObject } from './ExpandTable';
export class GridPage extends BasePage {
readonly dashboard: DashboardPage;
@ -32,6 +33,7 @@ export class GridPage extends BasePage {
readonly rowPage: RowPageObject;
readonly groupPage: GroupPageObject;
readonly aggregationBar: AggregaionBarPage;
readonly expandTableOverlay: ExpandTablePageObject;
readonly btn_addNewRow: Locator;
@ -52,6 +54,7 @@ export class GridPage extends BasePage {
this.rowPage = new RowPageObject(this);
this.groupPage = new GroupPageObject(this);
this.aggregationBar = new AggregaionBarPage(this);
this.expandTableOverlay = new ExpandTablePageObject(this);
this.btn_addNewRow = this.get().locator('.nc-grid-add-new-cell');
}

8
tests/playwright/tests/db/features/keyboardShortcuts.spec.ts

@ -439,9 +439,13 @@ test.describe('Clipboard support', () => {
await page.keyboard.press((await grid.isMacOs()) ? 'Meta+v' : 'Control+v');
// reload page
await dashboard.rootPage.reload();
await page.waitForTimeout(1000);
await grid.expandTableOverlay.upsert();
await page.waitForTimeout(1000);
// reload page
// verify copied data
for (let i = 4; i <= 5; i++) {
await grid.cell.verify({ index: i, columnHeader: 'SingleLineText', value: cellText[i - 4] });

Loading…
Cancel
Save