Browse Source

Merge branch 'develop' into feat/pnpm

pull/5903/head
Wing-Kam Wong 1 year ago
parent
commit
f984813acb
  1. 20
      .github/workflows/playwright-test-workflow.yml
  2. 3
      packages/nocodb/package.json
  3. 21
      packages/nocodb/src/controllers/test/TestResetService/index.ts
  4. 149
      packages/nocodb/src/db/BaseModelSqlv2.ts
  5. 1
      packages/nocodb/src/models/Column.ts
  6. 14
      packages/nocodb/src/models/Model.ts
  7. 153
      packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts
  8. 2
      packages/nocodb/src/services/bulk-data-alias.service.ts
  9. 4
      tests/playwright/pages/Dashboard/common/Cell/index.ts
  10. 1
      tests/playwright/tests/db/filters.spec.ts
  11. 14
      tests/playwright/tests/db/timezone.spec.ts

20
.github/workflows/playwright-test-workflow.yml

@ -70,10 +70,30 @@ jobs:
working-directory: ./
run: docker-compose -f ./tests/playwright/scripts/docker-compose-pg-pw-quick.yml up -d &
- name: Run backend
if: ${{ inputs.db == 'sqlite' }}
working-directory: ./packages/nocodb
run: |
pnpm install
pnpm run watch:run:playwright > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log &
- name: Run backend:mysql
if: ${{ inputs.db == 'mysql' }}
working-directory: ./packages/nocodb
run: |
pnpm install
pnpm run watch:run:playwright:mysql > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log &
- name: Run backend:pg
if: ${{ inputs.db == 'pg' }}
working-directory: ./packages/nocodb
run: |
pnpm install
pnpm run watch:run:playwright:pg > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log &
- name: Cache playwright npm modules
uses: actions/cache@v3
id: playwright-cache
with:
path: |
**/tests/playwright/node_modules
key: cache-nc-playwright-${{ hashFiles('**/tests/playwright/package-lock.json') }}
- name: Install dependencies
if: steps.playwright-cache.outputs.cache-hit != 'true'
working-directory: ./tests/playwright

3
packages/nocodb/package.json

@ -37,6 +37,8 @@
"watch:run": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"",
"watch:run:mysql": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunMysql --log-error --project tsconfig.json\"",
"watch:run:pg": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG --log-error --project tsconfig.json\"",
"watch:run:playwright:mysql": "rm -f ./test_noco.db; cross-env NC_DB=\"mysql2://localhost:3306?u=root&p=password&d=pw_ncdb\" PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"",
"watch:run:playwright:pg": "rm -f ./test_noco.db; cross-env NC_DB=\"pg://localhost:5432?u=postgres&p=password&d=pw_ncdb\" PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"",
"watch:run:playwright": "rm -f ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"",
"watch:run:playwright:quick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"",
"watch:run:playwright:pg:cyquick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG_CyQuick.ts --log-error --project tsconfig.json\"",
@ -122,6 +124,7 @@
"nocodb-sdk": "file:../nocodb-sdk",
"nodemailer": "^6.4.10",
"object-hash": "^3.0.0",
"object-sizeof": "^2.6.1",
"os-locale": "^6.0.2",
"p-queue": "^6.6.2",
"papaparse": "^5.4.0",

21
packages/nocodb/src/controllers/test/TestResetService/index.ts

@ -116,14 +116,19 @@ export class TestResetService {
if (project) {
await removeProjectUsersFromCache(project);
const bases = await project.getBases();
for (const base of bases) {
await NcConnectionMgrv2.deleteAwait(base);
await base.delete(Noco.ncMeta, { force: true });
}
await Project.delete(project.id);
// Kludge: Soft reset to support PG as root DB in PW tests
// Revisit to fix this later
// const bases = await project.getBases();
//
// for (const base of bases) {
// await NcConnectionMgrv2.deleteAwait(base);
// await base.delete(Noco.ncMeta, { force: true });
// }
//
// await Project.delete(project.id);
await Project.softDelete(project.id);
}
if (dbType == 'sqlite') {

149
packages/nocodb/src/db/BaseModelSqlv2.ts

@ -2244,38 +2244,152 @@ class BaseModelSqlv2 {
chunkSize: _chunkSize = 100,
cookie,
foreign_key_checks = true,
skip_hooks = false,
raw = false,
}: {
chunkSize?: number;
cookie?: any;
foreign_key_checks?: boolean;
skip_hooks?: boolean;
raw?: boolean;
} = {},
) {
let trx;
try {
// TODO: ag column handling for raw bulk insert
const insertDatas = raw
? datas
: await Promise.all(
datas.map(async (d) => {
await populatePk(this.model, d);
return this.model.mapAliasToColumn(
d,
this.clientMeta,
this.dbDriver,
);
}),
);
// await this.beforeInsertb(insertDatas, null);
const insertDatas = raw ? datas : [];
if (!raw) {
for (const data of datas) {
await this.validate(data);
await this.model.getColumns();
for (const d of datas) {
const insertObj = {};
// populate pk, map alias to column, validate data
for (let i = 0; i < this.model.columns.length; ++i) {
const col = this.model.columns[i];
// populate pk columns
if (col.pk) {
if (col.meta?.ag && !d[col.title]) {
d[col.title] =
col.meta?.ag === 'nc' ? `rc_${nanoidv2()}` : uuidv4();
}
}
// map alias to column
if (!isVirtualCol(col)) {
let val =
d?.[col.column_name] !== undefined
? d?.[col.column_name]
: d?.[col.title];
if (val !== undefined) {
if (
col.uidt === UITypes.Attachment &&
typeof val !== 'string'
) {
val = JSON.stringify(val);
}
if (col.uidt === UITypes.DateTime && dayjs(val).isValid()) {
const { isMySQL, isSqlite, isMssql, isPg } = this.clientMeta;
if (
val.indexOf('-') < 0 &&
val.indexOf('+') < 0 &&
val.slice(-1) !== 'Z'
) {
// if no timezone is given,
// then append +00:00 to make it as UTC
val += '+00:00';
}
if (isMySQL) {
// first convert the value to utc
// from UI
// e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00
// from API
// e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00
// if timezone info is not found - considered as utc
// e.g. 2022-01-01 20:00:00 -> 2022-01-01 20:00:00
// if timezone info is found
// e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00
// e.g. 2022-01-01 20:00:00+00:00 -> 2022-01-01 20:00:00
// e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00
// then we use CONVERT_TZ to convert that in the db timezone
val = this.dbDriver.raw(
`CONVERT_TZ(?, '+00:00', @@GLOBAL.time_zone)`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ss')],
);
} else if (isSqlite) {
// convert to UTC
// e.g. 2022-01-01T10:00:00.000Z -> 2022-01-01 04:30:00+00:00
val = dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ');
} else if (isPg) {
// convert to UTC
// e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00
// then convert to db timezone
val = this.dbDriver.raw(
`? AT TIME ZONE CURRENT_SETTING('timezone')`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')],
);
} else if (isMssql) {
// convert ot UTC
// e.g. 2023-05-10T08:49:32.000Z -> 2023-05-10 08:49:32-08:00
// then convert to db timezone
val = this.dbDriver.raw(
`SWITCHOFFSET(CONVERT(datetimeoffset, ?), DATENAME(TzOffset, SYSDATETIMEOFFSET()))`,
[dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')],
);
} else {
// e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00
val = dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ');
}
}
insertObj[sanitize(col.column_name)] = val;
}
}
// validate data
if (col?.meta?.validate && col?.validate) {
const validate = col.getValidators();
const cn = col.column_name;
const columnTitle = col.title;
if (validate) {
const { func, msg } = validate;
for (let j = 0; j < func.length; ++j) {
const fn =
typeof func[j] === 'string'
? customValidators[func[j]]
? customValidators[func[j]]
: Validator[func[j]]
: func[j];
const columnValue =
insertObj?.[cn] || insertObj?.[columnTitle];
const arg =
typeof func[j] === 'string'
? columnValue + ''
: columnValue;
if (
![null, undefined, ''].includes(columnValue) &&
!(fn.constructor.name === 'AsyncFunction'
? await fn(arg)
: fn(arg))
) {
NcError.badRequest(
msg[j]
.replace(/\{VALUE}/g, columnValue)
.replace(/\{cn}/g, columnTitle),
);
}
}
}
}
}
insertDatas.push(insertObj);
}
}
// await this.beforeInsertb(insertDatas, null);
// fallbacks to `10` if database client is sqlite
// to avoid `too many SQL variables` error
// refer : https://www.sqlite.org/limits.html
@ -2308,7 +2422,8 @@ class BaseModelSqlv2 {
await trx.commit();
if (!raw) await this.afterBulkInsert(insertDatas, this.dbDriver, cookie);
if (!raw && !skip_hooks)
await this.afterBulkInsert(insertDatas, this.dbDriver, cookie);
return response;
} catch (e) {

1
packages/nocodb/src/models/Column.ts

@ -760,6 +760,7 @@ export default class Column<T = any> implements ColumnType {
});
}
for (const filter of filters) {
if (filter.fk_parent_id) continue;
await Filter.delete(filter.id, ncMeta);
}
}

14
packages/nocodb/src/models/Model.ts

@ -14,6 +14,7 @@ import {
import { NcError } from '../helpers/catchError';
import { sanitize } from '../helpers/sqlSanitize';
import { extractProps } from '../helpers/extractProps';
import Hook from './Hook';
import Audit from './Audit';
import View from './View';
import Column from './Column';
@ -376,6 +377,11 @@ export default class Model implements TableType {
await view.delete(ncMeta);
}
// delete associated hooks
for (const hook of await Hook.list({ fk_model_id: this.id }, ncMeta)) {
await Hook.delete(hook.id, ncMeta);
}
for (const col of await this.getColumns(ncMeta)) {
let colOptionTableName = null;
let cacheScopeName = null;
@ -402,6 +408,14 @@ export default class Model implements TableType {
colOptionTableName = MetaTable.COL_FORMULA;
cacheScopeName = CacheScope.COL_FORMULA;
break;
case UITypes.QrCode:
colOptionTableName = MetaTable.COL_QRCODE;
cacheScopeName = CacheScope.COL_QRCODE;
break;
case UITypes.Barcode:
colOptionTableName = MetaTable.COL_BARCODE;
cacheScopeName = CacheScope.COL_BARCODE;
break;
}
if (colOptionTableName && cacheScopeName) {
await ncMeta.metaDelete(null, null, colOptionTableName, {

153
packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts

@ -1,5 +1,6 @@
/* eslint-disable no-async-promise-executor */
import { RelationTypes, UITypes } from 'nocodb-sdk';
import sizeof from 'object-sizeof';
import EntityMap from './EntityMap';
import type { BulkDataAliasService } from '../../../../../services/bulk-data-alias.service';
import type { TablesService } from '../../../../../services/tables.service';
@ -7,8 +8,8 @@ import type { TablesService } from '../../../../../services/tables.service';
import type { AirtableBase } from 'airtable/lib/airtable_base';
import type { TableType } from 'nocodb-sdk';
const BULK_DATA_BATCH_SIZE = 500;
const ASSOC_BULK_DATA_BATCH_SIZE = 1000;
const BULK_DATA_BATCH_COUNT = 20; // check size for every 100 records
const BULK_DATA_BATCH_SIZE = 50 * 1024; // in bytes
const BULK_PARALLEL_PROCESS = 5;
interface AirtableImportContext {
@ -42,6 +43,12 @@ async function readAllData({
.eachPage(
async function page(records, fetchNextPage) {
if (!data) {
/*
EntityMap is a sqlite3 table dynamically populated based on json data provided
It is used to store data temporarily and then stream it in bulk to import
This is done to avoid memory issues - heap out of memory - while importing large data
*/
data = new EntityMap();
await data.init();
}
@ -96,8 +103,8 @@ export async function importData({
services: AirtableImportContext;
}): Promise<EntityMap> {
try {
// @ts-ignore
const records = await readAllData({
// returns EntityMap which allows us to stream data
const records: EntityMap = await readAllData({
table,
base,
logDetailed,
@ -108,41 +115,57 @@ export async function importData({
const readable = records.getStream();
const allRecordsCount = await records.getCount();
const promises = [];
let tempData = [];
let importedCount = 0;
let tempCount = 0;
// we keep track of active process to pause and resume the stream as we have async calls within the stream and we don't want to load all data in memory
let activeProcess = 0;
readable.on('data', async (record) => {
promises.push(
new Promise(async (resolve) => {
activeProcess++;
if (activeProcess >= BULK_PARALLEL_PROCESS) readable.pause();
const { id: rid, ...fields } = record;
const r = await nocoBaseDataProcessing_v2(sDB, table, {
id: rid,
fields,
});
tempData.push(r);
if (tempData.length >= BULK_DATA_BATCH_SIZE) {
let insertArray = tempData.splice(0, tempData.length);
await services.bulkDataService.bulkDataInsert({
projectName,
tableName: table.title,
body: insertArray,
cookie: {},
});
logBasic(
`:: Importing '${
table.title
}' data :: ${importedCount} - ${Math.min(
importedCount + BULK_DATA_BATCH_SIZE,
allRecordsCount,
)}`,
);
importedCount += insertArray.length;
insertArray = [];
tempCount++;
if (tempCount >= BULK_DATA_BATCH_COUNT) {
if (sizeof(tempData) >= BULK_DATA_BATCH_SIZE) {
readable.pause();
let insertArray = tempData.splice(0, tempData.length);
await services.bulkDataService.bulkDataInsert({
projectName,
tableName: table.title,
body: insertArray,
cookie: {},
skip_hooks: true,
});
logBasic(
`:: Importing '${
table.title
}' data :: ${importedCount} - ${Math.min(
importedCount + insertArray.length,
allRecordsCount,
)}`,
);
importedCount += insertArray.length;
insertArray = [];
readable.resume();
}
tempCount = 0;
}
activeProcess--;
if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume();
@ -151,26 +174,31 @@ export async function importData({
);
});
readable.on('end', async () => {
// ensure all chunks are processed
await Promise.all(promises);
// insert remaining data
if (tempData.length > 0) {
await services.bulkDataService.bulkDataInsert({
projectName,
tableName: table.title,
body: tempData,
cookie: {},
skip_hooks: true,
});
logBasic(
`:: Importing '${
table.title
}' data :: ${importedCount} - ${Math.min(
importedCount + BULK_DATA_BATCH_SIZE,
importedCount + tempData.length,
allRecordsCount,
)}`,
);
importedCount += tempData.length;
tempData = [];
}
resolve(true);
});
});
@ -219,7 +247,7 @@ export async function importLTARData({
curCol: { title?: string };
refCol: { title?: string };
}> = [];
const allData =
const allData: EntityMap =
records ||
(await readAllData({
table,
@ -277,17 +305,16 @@ export async function importLTARData({
for await (const assocMeta of assocTableMetas) {
let assocTableData = [];
let importedCount = 0;
let tempCount = 0;
// extract insert data from records
// extract link data from records
await new Promise((resolve) => {
const promises = [];
const readable = allData.getStream();
let activeProcess = 0;
readable.on('data', async (record) => {
promises.push(
new Promise(async (resolve) => {
activeProcess++;
if (activeProcess >= BULK_PARALLEL_PROCESS) readable.pause();
const { id: _atId, ...rec } = record;
// todo: use actual alias instead of sanitized
@ -299,42 +326,56 @@ export async function importLTARData({
[assocMeta.refCol.title]: id,
})),
);
if (assocTableData.length >= ASSOC_BULK_DATA_BATCH_SIZE) {
let insertArray = assocTableData.splice(0, assocTableData.length);
logBasic(
`:: Importing '${
table.title
}' LTAR data :: ${importedCount} - ${Math.min(
importedCount + ASSOC_BULK_DATA_BATCH_SIZE,
insertArray.length,
)}`,
);
await services.bulkDataService.bulkDataInsert({
projectName,
tableName: assocMeta.modelMeta.title,
body: insertArray,
cookie: {},
});
importedCount += insertArray.length;
insertArray = [];
tempCount++;
if (tempCount >= BULK_DATA_BATCH_COUNT) {
if (sizeof(assocTableData) >= BULK_DATA_BATCH_SIZE) {
readable.pause();
let insertArray = assocTableData.splice(
0,
assocTableData.length,
);
logBasic(
`:: Importing '${
table.title
}' LTAR data :: ${importedCount} - ${Math.min(
importedCount + insertArray.length,
insertArray.length,
)}`,
);
await services.bulkDataService.bulkDataInsert({
projectName,
tableName: assocMeta.modelMeta.title,
body: insertArray,
cookie: {},
skip_hooks: true,
});
importedCount += insertArray.length;
insertArray = [];
readable.resume();
}
tempCount = 0;
}
activeProcess--;
if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume();
resolve(true);
}),
);
});
readable.on('end', async () => {
// ensure all chunks are processed
await Promise.all(promises);
// insert remaining data
if (assocTableData.length >= 0) {
logBasic(
`:: Importing '${
table.title
}' LTAR data :: ${importedCount} - ${Math.min(
importedCount + ASSOC_BULK_DATA_BATCH_SIZE,
importedCount + assocTableData.length,
assocTableData.length,
)}`,
);
@ -344,11 +385,13 @@ export async function importLTARData({
tableName: assocMeta.modelMeta.title,
body: assocTableData,
cookie: {},
skip_hooks: true,
});
importedCount += assocTableData.length;
assocTableData = [];
}
resolve(true);
});
});

2
packages/nocodb/src/services/bulk-data-alias.service.ts

@ -43,6 +43,7 @@ export class BulkDataAliasService {
cookie: any;
chunkSize?: number;
foreign_key_checks?: boolean;
skip_hooks?: boolean;
raw?: boolean;
},
) {
@ -54,6 +55,7 @@ export class BulkDataAliasService {
{
cookie: param.cookie,
foreign_key_checks: param.foreign_key_checks,
skip_hooks: param.skip_hooks,
raw: param.raw,
},
],

4
tests/playwright/pages/Dashboard/common/Cell/index.ts

@ -288,7 +288,9 @@ export class CellPageObject extends BasePage {
for (let i = 0; i < value.length; ++i) {
await chips.nth(i).locator('.name').waitFor({ state: 'visible' });
await chips.nth(i).locator('.name').scrollIntoViewIfNeeded();
await expect(await chips.nth(i).locator('.name')).toHaveText(value[i]);
await chips.nth(i).locator('.name').waitFor({ state: 'visible' });
const chipText = await chips.nth(i).locator('.name').textContent();
expect(value).toContain(chipText);
}
if (verifyChildList) {

1
tests/playwright/tests/db/filters.spec.ts

@ -7,6 +7,7 @@ import { Api } from 'nocodb-sdk';
import { rowMixedValue } from '../../setup/xcdb-records';
import dayjs from 'dayjs';
import { createDemoTable } from '../../setup/demoTable';
import { isPg } from '../../setup/db';
let dashboard: DashboardPage, toolbar: ToolbarPage;
let context: any;

14
tests/playwright/tests/db/timezone.spec.ts

@ -539,6 +539,8 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a
let dashboard: DashboardPage;
let context: any;
let counter = 0;
const expectedDisplayValues = {
pg: {
// PG ignores timezone information for datetime without timezone
@ -587,8 +589,8 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a
'xc-auth': context.token,
},
});
await createTableWithDateTimeColumn(context.dbType, 'datetimetable01');
counter++;
await createTableWithDateTimeColumn(context.dbType, `datetimetable01${counter}`);
});
// ExtDB : DateAdd, DateTime_Diff verification
@ -596,13 +598,13 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a
// - verify API response value
//
test('Formula, verify display value', async () => {
await connectToExtDb(context, 'datetimetable01');
await connectToExtDb(context, `datetimetable01${counter}`);
await dashboard.rootPage.reload();
await dashboard.rootPage.waitForTimeout(2000);
// insert a record to work with formula experiments
//
await dashboard.treeView.openBase({ title: 'datetimetable01' });
await dashboard.treeView.openBase({ title: `datetimetable01${counter}` });
await dashboard.treeView.openTable({ title: 'MyTable' });
// Create formula column (dummy)
@ -756,14 +758,14 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a
});
test('Verify display value, UI insert, API response', async () => {
await connectToExtDb(context, 'datetimetable01');
await connectToExtDb(context, `datetimetable01${counter}`);
await dashboard.rootPage.reload();
await dashboard.rootPage.waitForTimeout(2000);
// get timezone offset
const formattedOffset = getBrowserTimezoneOffset();
await dashboard.treeView.openBase({ title: 'datetimetable01' });
await dashboard.treeView.openBase({ title: `datetimetable01${counter}` });
await dashboard.treeView.openTable({ title: 'MyTable' });
if (isSqlite(context)) {

Loading…
Cancel
Save