|
|
|
@ -1,9 +1,10 @@
|
|
|
|
|
import type { ColumnType, LinkToAnotherRecordType, PaginatedType, RelationTypes, TableType, ViewType } from 'nocodb-sdk' |
|
|
|
|
import { UITypes } from 'nocodb-sdk' |
|
|
|
|
import { UITypes, isCreatedOrLastModifiedByCol, isCreatedOrLastModifiedTimeCol } from 'nocodb-sdk' |
|
|
|
|
import type { ComputedRef, Ref } from 'vue' |
|
|
|
|
import { |
|
|
|
|
NOCO, |
|
|
|
|
computed, |
|
|
|
|
extractPk, |
|
|
|
|
extractPkFromRow, |
|
|
|
|
extractSdkResponseErrorMsg, |
|
|
|
|
findIndexByPk, |
|
|
|
@ -45,6 +46,8 @@ export function useData(args: {
|
|
|
|
|
|
|
|
|
|
const { $api } = useNuxtApp() |
|
|
|
|
|
|
|
|
|
const { isPaginationLoading } = storeToRefs(useViewsStore()) |
|
|
|
|
|
|
|
|
|
const selectedAllRecords = computed({ |
|
|
|
|
get() { |
|
|
|
|
return !!formattedData.value.length && formattedData.value.every((row: Row) => row.rowMeta.selected) |
|
|
|
@ -112,6 +115,7 @@ export function useData(args: {
|
|
|
|
|
) { |
|
|
|
|
row.row = { ...pkData, ...row.row } |
|
|
|
|
const insertedData = await insertRow(row, ltarState, undefined, true) |
|
|
|
|
|
|
|
|
|
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { |
|
|
|
|
if (pg.page === paginationData.value.page) { |
|
|
|
|
formattedData.value.splice(rowIndex, 0, { |
|
|
|
@ -154,6 +158,52 @@ export function useData(args: {
|
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
async function bulkInsertRows( |
|
|
|
|
rows: Row[], |
|
|
|
|
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {}, |
|
|
|
|
undo = false, |
|
|
|
|
) { |
|
|
|
|
isPaginationLoading.value = true |
|
|
|
|
|
|
|
|
|
const autoGeneratedKeys = clone(metaValue?.columns || []) |
|
|
|
|
.filter((c) => !c.pk && (isCreatedOrLastModifiedTimeCol(c) || isCreatedOrLastModifiedByCol(c))) |
|
|
|
|
.map((c) => c.title) |
|
|
|
|
|
|
|
|
|
try { |
|
|
|
|
const rowsToInsert = |
|
|
|
|
( |
|
|
|
|
await Promise.all( |
|
|
|
|
rows.map(async (currentRow) => { |
|
|
|
|
const { missingRequiredColumns, insertObj } = await populateInsertObject({ |
|
|
|
|
meta: metaValue!, |
|
|
|
|
ltarState: {}, |
|
|
|
|
getMeta, |
|
|
|
|
row: currentRow.row, |
|
|
|
|
undo, |
|
|
|
|
}) |
|
|
|
|
|
|
|
|
|
if (missingRequiredColumns.size === 0) { |
|
|
|
|
autoGeneratedKeys.forEach((key) => delete insertObj[key!]) |
|
|
|
|
return insertObj |
|
|
|
|
} |
|
|
|
|
}), |
|
|
|
|
) |
|
|
|
|
)?.filter(Boolean) ?? [] // Filter out undefined values (if any)
|
|
|
|
|
|
|
|
|
|
const bulkInsertedIds = await $api.dbDataTableRow.create(metaValue?.id as string, rowsToInsert, { |
|
|
|
|
viewId: viewMetaValue?.id as string, |
|
|
|
|
}) |
|
|
|
|
|
|
|
|
|
await callbacks?.syncCount?.() |
|
|
|
|
return bulkInsertedIds |
|
|
|
|
} catch (error: any) { |
|
|
|
|
message.error(await extractSdkResponseErrorMsg(error)) |
|
|
|
|
} finally { |
|
|
|
|
await callbacks?.globalCallback?.() |
|
|
|
|
isPaginationLoading.value = false |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// inside this method use metaValue and viewMetaValue to refer meta
|
|
|
|
|
// since sometimes we need to pass old metas
|
|
|
|
|
async function updateRowProperty( |
|
|
|
@ -566,64 +616,106 @@ export function useData(args: {
|
|
|
|
|
|
|
|
|
|
async function deleteSelectedRows() { |
|
|
|
|
let row = formattedData.value.length |
|
|
|
|
const removedRowsData: { id?: string; row: Row; rowIndex: number }[] = [] |
|
|
|
|
let removedRowsData: Record<string, any>[] = [] |
|
|
|
|
let compositePrimaryKey = '' |
|
|
|
|
|
|
|
|
|
while (row--) { |
|
|
|
|
try { |
|
|
|
|
const { row: rowObj, rowMeta } = formattedData.value[row] as Record<string, any> |
|
|
|
|
if (!rowMeta.selected) { |
|
|
|
|
continue |
|
|
|
|
} |
|
|
|
|
if (!rowMeta.new) { |
|
|
|
|
const id = meta?.value?.columns |
|
|
|
|
?.filter((c) => c.pk) |
|
|
|
|
.map((c) => rowObj[c.title as string]) |
|
|
|
|
.join('___') |
|
|
|
|
|
|
|
|
|
const successfulDeletion = await deleteRowById(id as string) |
|
|
|
|
if (!successfulDeletion) { |
|
|
|
|
continue |
|
|
|
|
} |
|
|
|
|
removedRowsData.push({ id, row: clone(formattedData.value[row]), rowIndex: row }) |
|
|
|
|
const { row: rowObj, rowMeta } = formattedData.value[row] as Record<string, any> |
|
|
|
|
if (!rowMeta.selected) { |
|
|
|
|
continue |
|
|
|
|
} |
|
|
|
|
if (!rowMeta.new) { |
|
|
|
|
const extractedPk = extractPk(meta?.value?.columns as ColumnType[]) |
|
|
|
|
const compositePkValue = extractPkFromRow(rowObj, meta?.value?.columns as ColumnType[]) |
|
|
|
|
|
|
|
|
|
if (extractedPk && compositePkValue) { |
|
|
|
|
if (!compositePrimaryKey) compositePrimaryKey = extractedPk |
|
|
|
|
|
|
|
|
|
removedRowsData.push({ |
|
|
|
|
[compositePrimaryKey]: compositePkValue as string, |
|
|
|
|
row: clone(formattedData.value[row]) as Row, |
|
|
|
|
rowIndex: row as number, |
|
|
|
|
}) |
|
|
|
|
} |
|
|
|
|
formattedData.value.splice(row, 1) |
|
|
|
|
} catch (e: any) { |
|
|
|
|
return message.error(`${t('msg.error.deleteRowFailed')}: ${await extractSdkResponseErrorMsg(e)}`) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (!removedRowsData.length) return |
|
|
|
|
|
|
|
|
|
try { |
|
|
|
|
const removedRowIds: Record<string, any>[] = await bulkDeleteRows( |
|
|
|
|
removedRowsData.map((row) => ({ [compositePrimaryKey]: row[compositePrimaryKey] as string })), |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
if (Array.isArray(removedRowIds)) { |
|
|
|
|
const removedRowsDataSet = new Set(removedRowIds.map((row) => row[compositePrimaryKey])) |
|
|
|
|
|
|
|
|
|
removedRowsData = removedRowsData.filter((row) => removedRowsDataSet.has(row[compositePrimaryKey] as string)) |
|
|
|
|
|
|
|
|
|
const rowIndexesSet = new Set(removedRowsData.map((row) => row.rowIndex)) |
|
|
|
|
formattedData.value = formattedData.value.filter((_, index) => rowIndexesSet.has(index)) |
|
|
|
|
} else { |
|
|
|
|
removedRowsData = [] |
|
|
|
|
} |
|
|
|
|
} catch (e: any) { |
|
|
|
|
return message.error(`${t('msg.error.deleteRowFailed')}: ${await extractSdkResponseErrorMsg(e)}`) |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (!removedRowsData.length) return |
|
|
|
|
|
|
|
|
|
addUndo({ |
|
|
|
|
redo: { |
|
|
|
|
fn: async function redo(this: UndoRedoAction, removedRowsData: { id?: string; row: Row; rowIndex: number }[]) { |
|
|
|
|
for (const { id, row } of removedRowsData) { |
|
|
|
|
await deleteRowById(id as string) |
|
|
|
|
const pk: Record<string, string> = rowPkData(row.row, meta?.value?.columns as ColumnType[]) |
|
|
|
|
const rowIndex = findIndexByPk(pk, formattedData.value) |
|
|
|
|
if (rowIndex !== -1) formattedData.value.splice(rowIndex, 1) |
|
|
|
|
paginationData.value.totalRows = paginationData.value.totalRows! - 1 |
|
|
|
|
fn: async function redo(this: UndoRedoAction, removedRowsData: Record<string, any>[], compositePrimaryKey: string) { |
|
|
|
|
const removedRowIds = await bulkDeleteRows( |
|
|
|
|
removedRowsData.map((row) => ({ [compositePrimaryKey]: row[compositePrimaryKey] as string })), |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
if (Array.isArray(removedRowIds)) { |
|
|
|
|
for (const { row } of removedRowsData) { |
|
|
|
|
const primaryKey: Record<string, string> = rowPkData(row.row, meta?.value?.columns as ColumnType[]) |
|
|
|
|
const rowIndex = findIndexByPk(primaryKey, formattedData.value) |
|
|
|
|
if (rowIndex !== -1) formattedData.value.splice(rowIndex, 1) |
|
|
|
|
paginationData.value.totalRows = paginationData.value.totalRows! - 1 |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
await callbacks?.syncPagination?.() |
|
|
|
|
}, |
|
|
|
|
args: [removedRowsData], |
|
|
|
|
args: [removedRowsData, compositePrimaryKey], |
|
|
|
|
}, |
|
|
|
|
undo: { |
|
|
|
|
fn: async function undo( |
|
|
|
|
this: UndoRedoAction, |
|
|
|
|
removedRowsData: { id?: string; row: Row; rowIndex: number }[], |
|
|
|
|
removedRowsData: Record<string, any>[], |
|
|
|
|
pg: { page: number; pageSize: number }, |
|
|
|
|
) { |
|
|
|
|
for (const { row, rowIndex } of removedRowsData.slice().reverse()) { |
|
|
|
|
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[]) |
|
|
|
|
row.row = { ...pkData, ...row.row } |
|
|
|
|
await insertRow(row, {}, {}, true) |
|
|
|
|
recoverLTARRefs(row.row) |
|
|
|
|
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { |
|
|
|
|
if (pg.page === paginationData.value.page) { |
|
|
|
|
formattedData.value.splice(rowIndex, 0, row) |
|
|
|
|
const rowsToInsert = removedRowsData |
|
|
|
|
.map((row) => { |
|
|
|
|
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[]) |
|
|
|
|
row.row = { ...pkData, ...row.row } |
|
|
|
|
return row |
|
|
|
|
}) |
|
|
|
|
.reverse() |
|
|
|
|
|
|
|
|
|
const insertedRowIds = await bulkInsertRows( |
|
|
|
|
rowsToInsert.map((row) => row.row), |
|
|
|
|
undefined, |
|
|
|
|
true, |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
if (Array.isArray(insertedRowIds)) { |
|
|
|
|
for (const { row, rowIndex } of rowsToInsert) { |
|
|
|
|
recoverLTARRefs(row.row) |
|
|
|
|
|
|
|
|
|
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { |
|
|
|
|
if (pg.page === paginationData.value.page) { |
|
|
|
|
formattedData.value.splice(rowIndex, 0, row) |
|
|
|
|
} else { |
|
|
|
|
await callbacks?.changePage?.(pg.page) |
|
|
|
|
} |
|
|
|
|
} else { |
|
|
|
|
await callbacks?.changePage?.(pg.page) |
|
|
|
|
await callbacks?.loadData?.() |
|
|
|
|
} |
|
|
|
|
} else { |
|
|
|
|
await callbacks?.loadData?.() |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
}, |
|
|
|
@ -645,23 +737,27 @@ export function useData(args: {
|
|
|
|
|
// plus one because we want to include the end row
|
|
|
|
|
let row = start + 1 |
|
|
|
|
|
|
|
|
|
const removedRowsData: { id?: string; row: Row; rowIndex: number }[] = [] |
|
|
|
|
let removedRowsData: Record<string, any>[] = [] |
|
|
|
|
let compositePrimaryKey = '' |
|
|
|
|
|
|
|
|
|
while (row--) { |
|
|
|
|
try { |
|
|
|
|
const { row: rowObj, rowMeta } = formattedData.value[row] as Record<string, any> |
|
|
|
|
|
|
|
|
|
if (!rowMeta.new) { |
|
|
|
|
const id = meta?.value?.columns |
|
|
|
|
?.filter((c) => c.pk) |
|
|
|
|
.map((c) => rowObj[c.title as string]) |
|
|
|
|
.join('___') |
|
|
|
|
|
|
|
|
|
const successfulDeletion = await deleteRowById(id as string) |
|
|
|
|
if (!successfulDeletion) { |
|
|
|
|
continue |
|
|
|
|
const extractedPk = extractPk(meta?.value?.columns as ColumnType[]) |
|
|
|
|
const compositePkValue = extractPkFromRow(rowObj, meta?.value?.columns as ColumnType[]) |
|
|
|
|
|
|
|
|
|
if (extractedPk && compositePkValue) { |
|
|
|
|
if (!compositePrimaryKey) compositePrimaryKey = extractedPk |
|
|
|
|
|
|
|
|
|
removedRowsData.push({ |
|
|
|
|
[compositePrimaryKey]: compositePkValue as string, |
|
|
|
|
row: clone(formattedData.value[row]) as Row, |
|
|
|
|
rowIndex: row as number, |
|
|
|
|
}) |
|
|
|
|
} |
|
|
|
|
removedRowsData.push({ id, row: clone(formattedData.value[row]), rowIndex: row }) |
|
|
|
|
} |
|
|
|
|
formattedData.value.splice(row, 1) |
|
|
|
|
} catch (e: any) { |
|
|
|
|
return message.error(`${t('msg.error.deleteRowFailed')}: ${await extractSdkResponseErrorMsg(e)}`) |
|
|
|
|
} |
|
|
|
@ -669,38 +765,82 @@ export function useData(args: {
|
|
|
|
|
if (row === end) break |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (!removedRowsData.length) return |
|
|
|
|
|
|
|
|
|
try { |
|
|
|
|
const removedRowIds: Record<string, any>[] = await bulkDeleteRows( |
|
|
|
|
removedRowsData.map((row) => ({ [compositePrimaryKey]: row[compositePrimaryKey] as string })), |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
if (Array.isArray(removedRowIds)) { |
|
|
|
|
const removedRowsDataSet = new Set(removedRowIds.map((row) => row[compositePrimaryKey])) |
|
|
|
|
|
|
|
|
|
removedRowsData = removedRowsData.filter((row) => removedRowsDataSet.has(row[compositePrimaryKey] as string)) |
|
|
|
|
|
|
|
|
|
const rowIndexesSet = new Set(removedRowsData.map((row) => row.rowIndex)) |
|
|
|
|
formattedData.value = formattedData.value.filter((_, index) => rowIndexesSet.has(index)) |
|
|
|
|
} else { |
|
|
|
|
removedRowsData = [] |
|
|
|
|
} |
|
|
|
|
} catch (e: any) { |
|
|
|
|
return message.error(`${t('msg.error.deleteRowFailed')}: ${await extractSdkResponseErrorMsg(e)}`) |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (!removedRowsData.length) return |
|
|
|
|
|
|
|
|
|
addUndo({ |
|
|
|
|
redo: { |
|
|
|
|
fn: async function redo(this: UndoRedoAction, removedRowsData: { id?: string; row: Row; rowIndex: number }[]) { |
|
|
|
|
for (const { id, row } of removedRowsData) { |
|
|
|
|
await deleteRowById(id as string) |
|
|
|
|
const pk: Record<string, string> = rowPkData(row.row, meta?.value?.columns as ColumnType[]) |
|
|
|
|
const rowIndex = findIndexByPk(pk, formattedData.value) |
|
|
|
|
if (rowIndex !== -1) formattedData.value.splice(rowIndex, 1) |
|
|
|
|
paginationData.value.totalRows = paginationData.value.totalRows! - 1 |
|
|
|
|
fn: async function redo(this: UndoRedoAction, removedRowsData: Record<string, any>[], compositePrimaryKey: string) { |
|
|
|
|
const removedRowIds = await bulkDeleteRows( |
|
|
|
|
removedRowsData.map((row) => ({ [compositePrimaryKey]: row[compositePrimaryKey] as string })), |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
if (Array.isArray(removedRowIds)) { |
|
|
|
|
for (const { row } of removedRowsData) { |
|
|
|
|
const primaryKey: Record<string, string> = rowPkData(row.row, meta?.value?.columns as ColumnType[]) |
|
|
|
|
const rowIndex = findIndexByPk(primaryKey, formattedData.value) |
|
|
|
|
if (rowIndex !== -1) formattedData.value.splice(rowIndex, 1) |
|
|
|
|
paginationData.value.totalRows = paginationData.value.totalRows! - 1 |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
await callbacks?.syncPagination?.() |
|
|
|
|
}, |
|
|
|
|
args: [removedRowsData], |
|
|
|
|
args: [removedRowsData, compositePrimaryKey], |
|
|
|
|
}, |
|
|
|
|
undo: { |
|
|
|
|
fn: async function undo( |
|
|
|
|
this: UndoRedoAction, |
|
|
|
|
removedRowsData: { id?: string; row: Row; rowIndex: number }[], |
|
|
|
|
removedRowsData: Record<string, any>[], |
|
|
|
|
pg: { page: number; pageSize: number }, |
|
|
|
|
) { |
|
|
|
|
for (const { row, rowIndex } of removedRowsData.slice().reverse()) { |
|
|
|
|
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[]) |
|
|
|
|
row.row = { ...pkData, ...row.row } |
|
|
|
|
await insertRow(row, {}, {}, true) |
|
|
|
|
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { |
|
|
|
|
if (pg.page === paginationData.value.page) { |
|
|
|
|
formattedData.value.splice(rowIndex, 0, row) |
|
|
|
|
const rowsToInsert = removedRowsData |
|
|
|
|
.map((row) => { |
|
|
|
|
const pkData = rowPkData(row.row, meta.value?.columns as ColumnType[]) |
|
|
|
|
row.row = { ...pkData, ...row.row } |
|
|
|
|
return row |
|
|
|
|
}) |
|
|
|
|
.reverse() |
|
|
|
|
|
|
|
|
|
const insertedRowIds = await bulkInsertRows( |
|
|
|
|
rowsToInsert.map((row) => row.row), |
|
|
|
|
undefined, |
|
|
|
|
true, |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
if (Array.isArray(insertedRowIds)) { |
|
|
|
|
for (const { row, rowIndex } of rowsToInsert) { |
|
|
|
|
recoverLTARRefs(row.row) |
|
|
|
|
|
|
|
|
|
if (rowIndex !== -1 && pg.pageSize === paginationData.value.pageSize) { |
|
|
|
|
if (pg.page === paginationData.value.page) { |
|
|
|
|
formattedData.value.splice(rowIndex, 0, row) |
|
|
|
|
} else { |
|
|
|
|
await callbacks?.changePage?.(pg.page) |
|
|
|
|
} |
|
|
|
|
} else { |
|
|
|
|
await callbacks?.changePage?.(pg.page) |
|
|
|
|
await callbacks?.loadData?.() |
|
|
|
|
} |
|
|
|
|
} else { |
|
|
|
|
await callbacks?.loadData?.() |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
}, |
|
|
|
@ -714,6 +854,27 @@ export function useData(args: {
|
|
|
|
|
await callbacks?.globalCallback?.() |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
async function bulkDeleteRows( |
|
|
|
|
rows: Record<string, string>[], |
|
|
|
|
{ metaValue = meta.value, viewMetaValue = viewMeta.value }: { metaValue?: TableType; viewMetaValue?: ViewType } = {}, |
|
|
|
|
) { |
|
|
|
|
isPaginationLoading.value = true |
|
|
|
|
try { |
|
|
|
|
const bulkDeletedRowsData = await $api.dbDataTableRow.delete(metaValue?.id as string, rows, { |
|
|
|
|
viewId: viewMetaValue?.id as string, |
|
|
|
|
}) |
|
|
|
|
|
|
|
|
|
await callbacks?.syncCount?.() |
|
|
|
|
|
|
|
|
|
return bulkDeletedRowsData |
|
|
|
|
} catch (error: any) { |
|
|
|
|
message.error(await extractSdkResponseErrorMsg(error)) |
|
|
|
|
} finally { |
|
|
|
|
await callbacks?.globalCallback?.() |
|
|
|
|
isPaginationLoading.value = false |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
const removeRowIfNew = (row: Row) => { |
|
|
|
|
const index = formattedData.value.indexOf(row) |
|
|
|
|
|
|
|
|
@ -737,5 +898,7 @@ export function useData(args: {
|
|
|
|
|
bulkUpdateView, |
|
|
|
|
selectedAllRecords, |
|
|
|
|
removeRowIfNew, |
|
|
|
|
bulkDeleteRows, |
|
|
|
|
bulkInsertRows, |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|