Browse Source

feat: added bulk delete for range delete rows

pull/7389/head
Ramesh Mane 1 year ago
parent
commit
d6359632b4
  1. 154
      packages/nc-gui/composables/useData.ts

154
packages/nc-gui/composables/useData.ts

@ -45,6 +45,8 @@ export function useData(args: {
const { $api } = useNuxtApp() const { $api } = useNuxtApp()
const { isPaginationLoading } = storeToRefs(useViewsStore())
const selectedAllRecords = computed({ const selectedAllRecords = computed({
get() { get() {
return !!formattedData.value.length && formattedData.value.every((row: Row) => row.rowMeta.selected) return !!formattedData.value.length && formattedData.value.every((row: Row) => row.rowMeta.selected)
@ -112,6 +114,7 @@ export function useData(args: {
) { ) {
row.row = { ...pkData, ...row.row } row.row = { ...pkData, ...row.row }
const insertedData = await insertRow(row, ltarState, undefined, true) const insertedData = await insertRow(row, ltarState, undefined, true)
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) {
if (pg.page === paginationData.value.page) { if (pg.page === paginationData.value.page) {
formattedData.value.splice(rowIndex, 0, { formattedData.value.splice(rowIndex, 0, {
@ -156,61 +159,47 @@ export function useData(args: {
async function bulkInsertRows( async function bulkInsertRows(
rows: Row[], rows: Row[],
ltarState: Record<string, any> = {},
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {}, { metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
undo = false, undo = false,
) { ) {
const rowsToInsert = [] const rowsToInsert = []
isPaginationLoading.value = true
try { try {
for (const currentRow of rows) { for (const currentRow of rows) {
let { missingRequiredColumns, insertObj } = await populateInsertObject({ let { missingRequiredColumns, insertObj } = await populateInsertObject({
meta: metaValue!, meta: metaValue!,
ltarState, ltarState: {},
getMeta, getMeta,
row: currentRow.row, row: currentRow.row,
undo, undo,
}) })
const autoGeneratedKeys = clone(metaValue?.columns || []) const autoGeneratedKeys = clone(metaValue?.columns || [])
.filter((c) => isCreatedOrLastModifiedByCol(c) || isCreatedOrLastModifiedTimeCol(c)) .filter((c) => c.uidt !== UITypes.ID && (isCreatedOrLastModifiedByCol(c) || isCreatedOrLastModifiedTimeCol(c)))
.map((c) => c.title) .map((c) => c.title)
console.log('auto', autoGeneratedKeys, currentRow)
// delete auto generated keys // delete auto generated keys
for (const key of autoGeneratedKeys) { for (const key of autoGeneratedKeys) {
if (key !== 'Id') { delete insertObj[key!]
delete insertObj[key]
}
} }
if (missingRequiredColumns.size) continue if (missingRequiredColumns.size) continue
else rowsToInsert.push({ ...insertObj, ...(ltarState || {}) }) else rowsToInsert.push({ ...insertObj })
} }
const bulkInsertedData = await $api.dbDataTableRow.create(metaValue?.id as string, rowsToInsert, { const bulkInsertedIds = await $api.dbDataTableRow.create(metaValue?.id as string, rowsToInsert, {
viewId: viewMetaValue?.id as string, viewId: viewMetaValue?.id as string,
}) })
// if (!undo) {
// addUndo({
// redo: {
// fn: async function redo() {},
// args: [],
// },
// undo: {
// fn: async function undo(this: UndoRedoAction) {},
// args: [],
// },
// scope: defineViewScope({ view: viewMeta.value }),
// })
// }
await callbacks?.syncCount?.() await callbacks?.syncCount?.()
return bulkInsertedData
return bulkInsertedIds
} catch (error: any) { } catch (error: any) {
message.error(await extractSdkResponseErrorMsg(error)) message.error(await extractSdkResponseErrorMsg(error))
} finally { } finally {
await callbacks?.globalCallback?.() await callbacks?.globalCallback?.()
isPaginationLoading.value = false
} }
} }
@ -653,13 +642,12 @@ export function useData(args: {
} }
}), }),
) )
if (Array.isArray(removedRowIds)) { if (Array.isArray(removedRowIds)) {
const removedRowsMap: Map<string, string | number> = new Map(removedRowIds.map((row) => [row.Id as string, ''])) const removedRowsMap: Map<string, string | number> = new Map(removedRowIds.map((row) => [row.Id as string, '']))
removedRowsData.filter((row) => {
if (removedRowsMap.has(row.Id)) { removedRowsData.filter((row) => removedRowsMap.has(row.Id))
return true
}
})
const rowIndexes = removedRowsData.map((row) => row.rowIndex) const rowIndexes = removedRowsData.map((row) => row.rowIndex)
formattedData.value = formattedData.value.filter((_, index) => rowIndexes.includes(index)) formattedData.value = formattedData.value.filter((_, index) => rowIndexes.includes(index))
} }
@ -707,7 +695,6 @@ export function useData(args: {
const insertedRowIds = await bulkInsertRows( const insertedRowIds = await bulkInsertRows(
rowsToInsert.map((row) => row.row), rowsToInsert.map((row) => row.row),
{},
undefined, undefined,
true, true,
) )
@ -746,7 +733,7 @@ export function useData(args: {
// plus one because we want to include the end row // plus one because we want to include the end row
let row = start + 1 let row = start + 1
const removedRowsData: { id?: string; row: Row; rowIndex: number }[] = [] const removedRowsData: { Id: string; row: Row; rowIndex: number }[] = []
while (row--) { while (row--) {
try { try {
const { row: rowObj, rowMeta } = formattedData.value[row] as Record<string, any> const { row: rowObj, rowMeta } = formattedData.value[row] as Record<string, any>
@ -756,13 +743,10 @@ export function useData(args: {
.map((c) => rowObj[c.title as string]) .map((c) => rowObj[c.title as string])
.join('___') .join('___')
const successfulDeletion = await deleteRowById(id as string) if (id) {
if (!successfulDeletion) { removedRowsData.push({ Id: id, row: clone(formattedData.value[row]), rowIndex: row })
continue
} }
removedRowsData.push({ id, row: clone(formattedData.value[row]), rowIndex: row })
} }
formattedData.value.splice(row, 1)
} catch (e: any) { } catch (e: any) {
return message.error(`${t('msg.error.deleteRowFailed')}: ${await extractSdkResponseErrorMsg(e)}`) return message.error(`${t('msg.error.deleteRowFailed')}: ${await extractSdkResponseErrorMsg(e)}`)
} }
@ -770,16 +754,46 @@ export function useData(args: {
if (row === end) break if (row === end) break
} }
try {
const removedRowIds: { Id: string }[] = await bulkDeleteRows(
removedRowsData.map((row) => {
return {
Id: row.Id,
}
}),
)
if (Array.isArray(removedRowIds)) {
const removedRowsMap: Map<string, string | number> = new Map(removedRowIds.map((row) => [row.Id as string, '']))
removedRowsData.filter((row) => removedRowsMap.has(row.Id))
const rowIndexes = removedRowsData.map((row) => row.rowIndex)
formattedData.value = formattedData.value.filter((_, index) => rowIndexes.includes(index))
}
} catch (e: any) {
return message.error(`${t('msg.error.bulkDeleteRowsFailed')}: ${await extractSdkResponseErrorMsg(e)}`)
}
addUndo({ addUndo({
redo: { redo: {
fn: async function redo(this: UndoRedoAction, removedRowsData: { id?: string; row: Row; rowIndex: number }[]) { fn: async function redo(this: UndoRedoAction, removedRowsData: { Id: string; row: Row; rowIndex: number }[]) {
for (const { id, row } of removedRowsData) { const removedRowIds = await bulkDeleteRows(
await deleteRowById(id as string) removedRowsData.map((row) => {
const pk: Record<string, string> = rowPkData(row.row, meta?.value?.columns as ColumnType[]) return {
const rowIndex = findIndexByPk(pk, formattedData.value) Id: row.Id,
if (rowIndex !== -1) formattedData.value.splice(rowIndex, 1) }
paginationData.value.totalRows = paginationData.value.totalRows! - 1 }),
)
if (Array.isArray(removedRowIds)) {
for (const { row } of removedRowsData) {
const pk: Record<string, string> = rowPkData(row.row, meta?.value?.columns as ColumnType[])
const rowIndex = findIndexByPk(pk, formattedData.value)
if (rowIndex !== -1) formattedData.value.splice(rowIndex, 1)
paginationData.value.totalRows = paginationData.value.totalRows! - 1
}
} }
await callbacks?.syncPagination?.() await callbacks?.syncPagination?.()
}, },
args: [removedRowsData], args: [removedRowsData],
@ -787,21 +801,36 @@ export function useData(args: {
undo: { undo: {
fn: async function undo( fn: async function undo(
this: UndoRedoAction, this: UndoRedoAction,
removedRowsData: { id?: string; row: Row; rowIndex: number }[], removedRowsData: { Id: string; row: Row; rowIndex: number }[],
pg: { page: number; pageSize: number }, pg: { page: number; pageSize: number },
) { ) {
for (const { row, rowIndex } of removedRowsData.slice().reverse()) { const rowsToInsert = removedRowsData
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[]) .map((row) => {
row.row = { ...pkData, ...row.row } const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[])
await insertRow(row, {}, {}, true) row.row = { ...pkData, ...row.row }
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { return row
if (pg.page === paginationData.value.page) { })
formattedData.value.splice(rowIndex, 0, row) .reverse()
const insertedRowIds = await bulkInsertRows(
rowsToInsert.map((row) => row.row),
undefined,
true,
)
if (Array.isArray(insertedRowIds)) {
for (const { row, rowIndex } of rowsToInsert) {
recoverLTARRefs(row.row)
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) {
if (pg.page === paginationData.value.page) {
formattedData.value.splice(rowIndex, 0, row)
} else {
await callbacks?.changePage?.(pg.page)
}
} else { } else {
await callbacks?.changePage?.(pg.page) await callbacks?.loadData?.()
} }
} else {
await callbacks?.loadData?.()
} }
} }
}, },
@ -818,32 +847,21 @@ export function useData(args: {
async function bulkDeleteRows( async function bulkDeleteRows(
rows: { Id: string }[], rows: { Id: string }[],
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {}, { metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {},
undo = false,
) { ) {
isPaginationLoading.value = true
try { try {
const bulkDeletedRowsData = await $api.dbDataTableRow.delete(metaValue?.id as string, rows, { const bulkDeletedRowsData = await $api.dbDataTableRow.delete(metaValue?.id as string, rows, {
viewId: viewMetaValue?.id as string, viewId: viewMetaValue?.id as string,
}) })
// if (!undo) {
// addUndo({
// redo: {
// fn: async function redo() {},
// args: [],
// },
// undo: {
// fn: async function undo(this: UndoRedoAction) {},
// args: [],
// },
// scope: defineViewScope({ view: viewMeta.value }),
// })
// }
await callbacks?.syncCount?.() await callbacks?.syncCount?.()
return bulkDeletedRowsData return bulkDeletedRowsData
} catch (error: any) { } catch (error: any) {
message.error(await extractSdkResponseErrorMsg(error)) message.error(await extractSdkResponseErrorMsg(error))
} finally { } finally {
await callbacks?.globalCallback?.() await callbacks?.globalCallback?.()
isPaginationLoading.value = false
} }
} }

Loading…
Cancel
Save