From 853ad3afa1c6b591b720f419fe026446df8a14b4 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Wed, 21 Jun 2023 12:18:07 +0530 Subject: [PATCH 01/15] test: pg-meta-db for pw Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- .github/workflows/playwright-test-workflow.yml | 7 +++++++ packages/nocodb/package.json | 1 + 2 files changed, 8 insertions(+) diff --git a/.github/workflows/playwright-test-workflow.yml b/.github/workflows/playwright-test-workflow.yml index c9e8244c8f..fef81cebcf 100644 --- a/.github/workflows/playwright-test-workflow.yml +++ b/.github/workflows/playwright-test-workflow.yml @@ -69,10 +69,17 @@ jobs: working-directory: ./packages/nc-gui run: npm run ci:run - name: Run backend + if: ${{ inputs.db != 'pg' }} working-directory: ./packages/nocodb run: | npm install npm run watch:run:playwright > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log & + - name: Run backend:pg + if: ${{ inputs.db == 'pg' }} + working-directory: ./packages/nocodb + run: | + npm install + npm run watch:run:playwright:pg > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log & - name: Cache playwright npm modules uses: actions/cache@v3 id: playwright-cache diff --git a/packages/nocodb/package.json b/packages/nocodb/package.json index cad93f1793..f431bd9ac9 100644 --- a/packages/nocodb/package.json +++ b/packages/nocodb/package.json @@ -34,6 +34,7 @@ "watch:run": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"", "watch:run:mysql": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunMysql --log-error --project tsconfig.json\"", "watch:run:pg": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG --log-error --project tsconfig.json\"", + "watch:run:playwright:pg": "rm -f ./test_noco.db; cross-env NC_DB=pg://localhost:5432?u=postgres&p=password&d=pw_ncdb PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright": "rm -f ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright:quick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"", "watch:run:playwright:pg:cyquick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG_CyQuick.ts --log-error --project tsconfig.json\"", From 08568fa2d978f63099e507bc541379f6600b3947 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 12:34:37 +0530 Subject: [PATCH 02/15] test: NC_DB in quotes Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- packages/nocodb/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/nocodb/package.json b/packages/nocodb/package.json index 1bed1bcafd..e657bd17f8 100644 --- a/packages/nocodb/package.json +++ b/packages/nocodb/package.json @@ -34,7 +34,7 @@ "watch:run": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"", "watch:run:mysql": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunMysql --log-error --project tsconfig.json\"", "watch:run:pg": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG --log-error --project tsconfig.json\"", - "watch:run:playwright:pg": "rm -f ./test_noco.db; cross-env NC_DB=pg://localhost:5432?u=postgres&p=password&d=pw_ncdb PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", + "watch:run:playwright:pg": "rm -f ./test_noco.db; cross-env NC_DB=\"pg://localhost:5432?u=postgres&p=password&d=pw_ncdb\" PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright": "rm -f ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright:quick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"", "watch:run:playwright:pg:cyquick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG_CyQuick.ts --log-error --project tsconfig.json\"", From e78428a9c1aecc27ed5444c1581036ec75286b63 Mon Sep 17 00:00:00 2001 From: mertmit Date: Sat, 24 Jun 2023 10:34:12 +0300 Subject: [PATCH 03/15] fix: dynamic batch based on byte size Signed-off-by: mertmit --- packages/nocodb/package-lock.json | 51 ++++++++++ packages/nocodb/package.json | 1 + .../at-import/helpers/readAndProcessData.ts | 97 ++++++++++--------- 3 files changed, 104 insertions(+), 45 deletions(-) diff --git a/packages/nocodb/package-lock.json b/packages/nocodb/package-lock.json index 01c10b6501..5dd6210c1c 100644 --- a/packages/nocodb/package-lock.json +++ b/packages/nocodb/package-lock.json @@ -86,6 +86,7 @@ "nocodb-sdk": "file:../nocodb-sdk", "nodemailer": "^6.4.10", "object-hash": "^3.0.0", + "object-sizeof": "^2.6.1", "os-locale": "^6.0.2", "p-queue": "^6.6.2", "papaparse": "^5.3.1", @@ -13568,6 +13569,37 @@ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz", "integrity": "sha512-ncrLw+X55z7bkl5PnUvHwFK9FcGuFYo9gtjws2XtSzL+aZ8tm830P60WJ0dSmFVaSalWieW5MD7kEdnXda9yJw==" }, + "node_modules/object-sizeof": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/object-sizeof/-/object-sizeof-2.6.1.tgz", + "integrity": "sha512-a7VJ1Zx7ZuHceKwjgfsSqzV/X0PVGvpZz7ho3Dn4Cs0LLcR5e5WuV+gsbizmplD8s0nAXMJmckKB2rkSiPm/Gg==", + "dependencies": { + "buffer": "^6.0.3" + } + }, + "node_modules/object-sizeof/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, "node_modules/object.assign": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", @@ -28765,6 +28797,25 @@ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz", "integrity": "sha512-ncrLw+X55z7bkl5PnUvHwFK9FcGuFYo9gtjws2XtSzL+aZ8tm830P60WJ0dSmFVaSalWieW5MD7kEdnXda9yJw==" }, + "object-sizeof": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/object-sizeof/-/object-sizeof-2.6.1.tgz", + "integrity": "sha512-a7VJ1Zx7ZuHceKwjgfsSqzV/X0PVGvpZz7ho3Dn4Cs0LLcR5e5WuV+gsbizmplD8s0nAXMJmckKB2rkSiPm/Gg==", + "requires": { + "buffer": "^6.0.3" + }, + "dependencies": { + "buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + } + } + }, "object.assign": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", diff --git a/packages/nocodb/package.json b/packages/nocodb/package.json index 000e060a0a..61c99793b5 100644 --- a/packages/nocodb/package.json +++ b/packages/nocodb/package.json @@ -119,6 +119,7 @@ "nocodb-sdk": "file:../nocodb-sdk", "nodemailer": "^6.4.10", "object-hash": "^3.0.0", + "object-sizeof": "^2.6.1", "os-locale": "^6.0.2", "p-queue": "^6.6.2", "papaparse": "^5.3.1", diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts index 659470d00c..c4a284c0e1 100644 --- a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts +++ b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts @@ -1,5 +1,6 @@ /* eslint-disable no-async-promise-executor */ import { RelationTypes, UITypes } from 'nocodb-sdk'; +import sizeof from 'object-sizeof'; import EntityMap from './EntityMap'; import type { BulkDataAliasService } from '../../../../../services/bulk-data-alias.service'; import type { TablesService } from '../../../../../services/tables.service'; @@ -7,8 +8,8 @@ import type { TablesService } from '../../../../../services/tables.service'; import type { AirtableBase } from 'airtable/lib/airtable_base'; import type { TableType } from 'nocodb-sdk'; -const BULK_DATA_BATCH_SIZE = 500; -const ASSOC_BULK_DATA_BATCH_SIZE = 1000; +const BULK_DATA_BATCH_COUNT = 100; // check size for every 100 records +const BULK_DATA_BATCH_SIZE = 102400; // in bytes const BULK_PARALLEL_PROCESS = 5; interface AirtableImportContext { @@ -122,27 +123,28 @@ export async function importData({ fields, }); tempData.push(r); - - if (tempData.length >= BULK_DATA_BATCH_SIZE) { - let insertArray = tempData.splice(0, tempData.length); - - await services.bulkDataService.bulkDataInsert({ - projectName, - tableName: table.title, - body: insertArray, - cookie: {}, - }); - - logBasic( - `:: Importing '${ - table.title - }' data :: ${importedCount} - ${Math.min( - importedCount + BULK_DATA_BATCH_SIZE, - allRecordsCount, - )}`, - ); - importedCount += insertArray.length; - insertArray = []; + if (tempData.length % BULK_DATA_BATCH_COUNT === 0) { + if (sizeof(tempData) >= BULK_DATA_BATCH_SIZE) { + let insertArray = tempData.splice(0, tempData.length); + + await services.bulkDataService.bulkDataInsert({ + projectName, + tableName: table.title, + body: insertArray, + cookie: {}, + }); + + logBasic( + `:: Importing '${ + table.title + }' data :: ${importedCount} - ${Math.min( + importedCount + insertArray.length, + allRecordsCount, + )}`, + ); + importedCount += insertArray.length; + insertArray = []; + } } activeProcess--; if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); @@ -164,7 +166,7 @@ export async function importData({ `:: Importing '${ table.title }' data :: ${importedCount} - ${Math.min( - importedCount + BULK_DATA_BATCH_SIZE, + importedCount + tempData.length, allRecordsCount, )}`, ); @@ -300,26 +302,31 @@ export async function importLTARData({ })), ); - if (assocTableData.length >= ASSOC_BULK_DATA_BATCH_SIZE) { - let insertArray = assocTableData.splice(0, assocTableData.length); - logBasic( - `:: Importing '${ - table.title - }' LTAR data :: ${importedCount} - ${Math.min( - importedCount + ASSOC_BULK_DATA_BATCH_SIZE, - insertArray.length, - )}`, - ); - - await services.bulkDataService.bulkDataInsert({ - projectName, - tableName: assocMeta.modelMeta.title, - body: insertArray, - cookie: {}, - }); - - importedCount += insertArray.length; - insertArray = []; + if (assocTableData.length % BULK_DATA_BATCH_COUNT === 0) { + if (sizeof(assocTableData) >= BULK_DATA_BATCH_SIZE) { + let insertArray = assocTableData.splice( + 0, + assocTableData.length, + ); + logBasic( + `:: Importing '${ + table.title + }' LTAR data :: ${importedCount} - ${Math.min( + importedCount + insertArray.length, + insertArray.length, + )}`, + ); + + await services.bulkDataService.bulkDataInsert({ + projectName, + tableName: assocMeta.modelMeta.title, + body: insertArray, + cookie: {}, + }); + + importedCount += insertArray.length; + insertArray = []; + } } activeProcess--; if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); @@ -334,7 +341,7 @@ export async function importLTARData({ `:: Importing '${ table.title }' LTAR data :: ${importedCount} - ${Math.min( - importedCount + ASSOC_BULK_DATA_BATCH_SIZE, + importedCount + assocTableData.length, assocTableData.length, )}`, ); From 764cd102aafed686d8b6a0fcab19e86046bf0d1f Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 13:22:14 +0530 Subject: [PATCH 04/15] test: clear hooks during project reset Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- packages/nocodb/src/models/Model.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/nocodb/src/models/Model.ts b/packages/nocodb/src/models/Model.ts index 9ed0b189ca..5fa4ac4ae9 100644 --- a/packages/nocodb/src/models/Model.ts +++ b/packages/nocodb/src/models/Model.ts @@ -14,6 +14,7 @@ import { import { NcError } from '../helpers/catchError'; import { sanitize } from '../helpers/sqlSanitize'; import { extractProps } from '../helpers/extractProps'; +import Hook from './Hook'; import Audit from './Audit'; import View from './View'; import Column from './Column'; @@ -376,6 +377,11 @@ export default class Model implements TableType { await view.delete(ncMeta); } + // delete associated hooks + for (const hook of await Hook.list({ fk_model_id: this.id }, ncMeta)) { + await Hook.delete(hook.id, ncMeta); + } + for (const col of await this.getColumns(ncMeta)) { let colOptionTableName = null; let cacheScopeName = null; From c86bdae4c0fa32a8926970184f7da3a187e12495 Mon Sep 17 00:00:00 2001 From: mertmit Date: Sat, 24 Jun 2023 11:31:27 +0300 Subject: [PATCH 05/15] fix: keep track of batch count instead of mod Signed-off-by: mertmit --- .../at-import/helpers/readAndProcessData.ts | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts index c4a284c0e1..43e1ed5a1c 100644 --- a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts +++ b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts @@ -8,8 +8,8 @@ import type { TablesService } from '../../../../../services/tables.service'; import type { AirtableBase } from 'airtable/lib/airtable_base'; import type { TableType } from 'nocodb-sdk'; -const BULK_DATA_BATCH_COUNT = 100; // check size for every 100 records -const BULK_DATA_BATCH_SIZE = 102400; // in bytes +const BULK_DATA_BATCH_COUNT = 20; // check size for every 100 records +const BULK_DATA_BATCH_SIZE = 51200; // in bytes const BULK_PARALLEL_PROCESS = 5; interface AirtableImportContext { @@ -112,19 +112,25 @@ export async function importData({ let tempData = []; let importedCount = 0; let activeProcess = 0; + let tempCount = 0; readable.on('data', async (record) => { promises.push( new Promise(async (resolve) => { activeProcess++; if (activeProcess >= BULK_PARALLEL_PROCESS) readable.pause(); + const { id: rid, ...fields } = record; const r = await nocoBaseDataProcessing_v2(sDB, table, { id: rid, fields, }); tempData.push(r); - if (tempData.length % BULK_DATA_BATCH_COUNT === 0) { + tempCount++; + + if (tempCount >= BULK_DATA_BATCH_COUNT) { if (sizeof(tempData) >= BULK_DATA_BATCH_SIZE) { + readable.pause(); + let insertArray = tempData.splice(0, tempData.length); await services.bulkDataService.bulkDataInsert({ @@ -144,7 +150,10 @@ export async function importData({ ); importedCount += insertArray.length; insertArray = []; + + readable.resume(); } + tempCount = 0; } activeProcess--; if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); @@ -285,6 +294,7 @@ export async function importLTARData({ const promises = []; const readable = allData.getStream(); let activeProcess = 0; + let tempCount = 0; readable.on('data', async (record) => { promises.push( new Promise(async (resolve) => { @@ -301,9 +311,12 @@ export async function importLTARData({ [assocMeta.refCol.title]: id, })), ); + tempCount++; - if (assocTableData.length % BULK_DATA_BATCH_COUNT === 0) { + if (tempCount >= BULK_DATA_BATCH_COUNT) { if (sizeof(assocTableData) >= BULK_DATA_BATCH_SIZE) { + readable.pause(); + let insertArray = assocTableData.splice( 0, assocTableData.length, @@ -326,7 +339,10 @@ export async function importLTARData({ importedCount += insertArray.length; insertArray = []; + + readable.resume(); } + tempCount = 0; } activeProcess--; if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); From acb0f8a6e812619bb29284d3e3282f1e59766339 Mon Sep 17 00:00:00 2001 From: mertmit Date: Sat, 24 Jun 2023 11:39:51 +0300 Subject: [PATCH 06/15] fix: remove unnecessary logic Signed-off-by: mertmit --- .../jobs/jobs/at-import/helpers/readAndProcessData.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts index 43e1ed5a1c..9848331a56 100644 --- a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts +++ b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts @@ -293,13 +293,10 @@ export async function importLTARData({ await new Promise((resolve) => { const promises = []; const readable = allData.getStream(); - let activeProcess = 0; let tempCount = 0; readable.on('data', async (record) => { promises.push( new Promise(async (resolve) => { - activeProcess++; - if (activeProcess >= BULK_PARALLEL_PROCESS) readable.pause(); const { id: _atId, ...rec } = record; // todo: use actual alias instead of sanitized @@ -344,8 +341,6 @@ export async function importLTARData({ } tempCount = 0; } - activeProcess--; - if (activeProcess < BULK_PARALLEL_PROCESS) readable.resume(); resolve(true); }), ); From 3a4036f8ac6b9a6fcb97da3aa5b26a39e42d10aa Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 15:24:18 +0530 Subject: [PATCH 07/15] test: cleanup for barcode & Qr code meta Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- packages/nocodb/src/models/Model.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/nocodb/src/models/Model.ts b/packages/nocodb/src/models/Model.ts index 5fa4ac4ae9..00eed04002 100644 --- a/packages/nocodb/src/models/Model.ts +++ b/packages/nocodb/src/models/Model.ts @@ -408,6 +408,14 @@ export default class Model implements TableType { colOptionTableName = MetaTable.COL_FORMULA; cacheScopeName = CacheScope.COL_FORMULA; break; + case UITypes.QrCode: + colOptionTableName = MetaTable.COL_QRCODE; + cacheScopeName = CacheScope.COL_QRCODE; + break; + case UITypes.Barcode: + colOptionTableName = MetaTable.COL_BARCODE; + cacheScopeName = CacheScope.COL_BARCODE; + break; } if (colOptionTableName && cacheScopeName) { await ncMeta.metaDelete(null, null, colOptionTableName, { From 018f69948ab9867ffecd24da0fa0f47156910bd0 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 15:55:18 +0530 Subject: [PATCH 08/15] test: filter delete only if its parent Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- packages/nocodb/src/models/Column.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/nocodb/src/models/Column.ts b/packages/nocodb/src/models/Column.ts index 78ae4edc9f..1695d1b4db 100644 --- a/packages/nocodb/src/models/Column.ts +++ b/packages/nocodb/src/models/Column.ts @@ -760,6 +760,7 @@ export default class Column implements ColumnType { }); } for (const filter of filters) { + if (filter.fk_parent_id) continue; await Filter.delete(filter.id, ncMeta); } } From b0fa9278ef9c4fb5bf289c7cb74fdaec3fbcbca7 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 16:53:19 +0530 Subject: [PATCH 09/15] test: temporarily disable filter group tests Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- tests/playwright/tests/db/filters.spec.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/playwright/tests/db/filters.spec.ts b/tests/playwright/tests/db/filters.spec.ts index 55ec5c7e7c..4a56c34d4d 100644 --- a/tests/playwright/tests/db/filters.spec.ts +++ b/tests/playwright/tests/db/filters.spec.ts @@ -7,6 +7,7 @@ import { Api } from 'nocodb-sdk'; import { rowMixedValue } from '../../setup/xcdb-records'; import dayjs from 'dayjs'; import { createDemoTable } from '../../setup/demoTable'; +import { isPg } from '../../setup/db'; let dashboard: DashboardPage, toolbar: ToolbarPage; let context: any; @@ -1162,6 +1163,10 @@ test.describe('Filter Tests: Filter groups', () => { }); test('Filter: Empty filters', async () => { + if (isPg(context)) { + test.skip(); + } + await dashboard.closeTab({ title: 'Team & Auth' }); await dashboard.treeView.openTable({ title: 'Country', networkResponse: false }); From 8fe8449071e51b93fd341ca464ce4886f67ea2c8 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 17:13:26 +0530 Subject: [PATCH 10/15] test: trigger soft delete instead of full project delete Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- .../test/TestResetService/index.ts | 21 ++++++++++++------- tests/playwright/tests/db/filters.spec.ts | 4 ---- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/packages/nocodb/src/controllers/test/TestResetService/index.ts b/packages/nocodb/src/controllers/test/TestResetService/index.ts index 6a5d67ec46..ddf1d8d108 100644 --- a/packages/nocodb/src/controllers/test/TestResetService/index.ts +++ b/packages/nocodb/src/controllers/test/TestResetService/index.ts @@ -116,14 +116,19 @@ export class TestResetService { if (project) { await removeProjectUsersFromCache(project); - const bases = await project.getBases(); - - for (const base of bases) { - await NcConnectionMgrv2.deleteAwait(base); - await base.delete(Noco.ncMeta, { force: true }); - } - - await Project.delete(project.id); + // Kludge: Soft reset to support PG as root DB in PW tests + // Revisit to fix this later + + // const bases = await project.getBases(); + // + // for (const base of bases) { + // await NcConnectionMgrv2.deleteAwait(base); + // await base.delete(Noco.ncMeta, { force: true }); + // } + // + // await Project.delete(project.id); + + await Project.softDelete(project.id); } if (dbType == 'sqlite') { diff --git a/tests/playwright/tests/db/filters.spec.ts b/tests/playwright/tests/db/filters.spec.ts index 4a56c34d4d..162cfda00e 100644 --- a/tests/playwright/tests/db/filters.spec.ts +++ b/tests/playwright/tests/db/filters.spec.ts @@ -1163,10 +1163,6 @@ test.describe('Filter Tests: Filter groups', () => { }); test('Filter: Empty filters', async () => { - if (isPg(context)) { - test.skip(); - } - await dashboard.closeTab({ title: 'Team & Auth' }); await dashboard.treeView.openTable({ title: 'Country', networkResponse: false }); From 77adb45f03e31cda6168ca9bc4c660f0d6609d51 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 21:29:03 +0530 Subject: [PATCH 11/15] test: link order scattered in pg Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- .../pages/Dashboard/common/Cell/index.ts | 3 ++- tests/playwright/tests/db/timezone.spec.ts | 14 ++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/tests/playwright/pages/Dashboard/common/Cell/index.ts b/tests/playwright/pages/Dashboard/common/Cell/index.ts index dcab70063e..2462ad8184 100644 --- a/tests/playwright/pages/Dashboard/common/Cell/index.ts +++ b/tests/playwright/pages/Dashboard/common/Cell/index.ts @@ -288,7 +288,8 @@ export class CellPageObject extends BasePage { for (let i = 0; i < value.length; ++i) { await chips.nth(i).locator('.name').waitFor({ state: 'visible' }); await chips.nth(i).locator('.name').scrollIntoViewIfNeeded(); - await expect(await chips.nth(i).locator('.name')).toHaveText(value[i]); + const chipText = await chips.nth(i).locator('.name').textContent(); + expect(value.includes(chipText)).toBe(true); } if (verifyChildList) { diff --git a/tests/playwright/tests/db/timezone.spec.ts b/tests/playwright/tests/db/timezone.spec.ts index 269c1781ce..562914cec3 100644 --- a/tests/playwright/tests/db/timezone.spec.ts +++ b/tests/playwright/tests/db/timezone.spec.ts @@ -539,6 +539,8 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a let dashboard: DashboardPage; let context: any; + let counter = 0; + const expectedDisplayValues = { pg: { // PG ignores timezone information for datetime without timezone @@ -587,8 +589,8 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a 'xc-auth': context.token, }, }); - - await createTableWithDateTimeColumn(context.dbType, 'datetimetable01'); + counter++; + await createTableWithDateTimeColumn(context.dbType, `datetimetable01${counter}`); }); // ExtDB : DateAdd, DateTime_Diff verification @@ -596,13 +598,13 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a // - verify API response value // test('Formula, verify display value', async () => { - await connectToExtDb(context, 'datetimetable01'); + await connectToExtDb(context, `datetimetable01${counter}`); await dashboard.rootPage.reload(); await dashboard.rootPage.waitForTimeout(2000); // insert a record to work with formula experiments // - await dashboard.treeView.openBase({ title: 'datetimetable01' }); + await dashboard.treeView.openBase({ title: `datetimetable01${counter}` }); await dashboard.treeView.openTable({ title: 'MyTable' }); // Create formula column (dummy) @@ -756,14 +758,14 @@ test.describe.serial('Timezone- ExtDB : DateTime column, Browser Timezone same a }); test('Verify display value, UI insert, API response', async () => { - await connectToExtDb(context, 'datetimetable01'); + await connectToExtDb(context, `datetimetable01${counter}`); await dashboard.rootPage.reload(); await dashboard.rootPage.waitForTimeout(2000); // get timezone offset const formattedOffset = getBrowserTimezoneOffset(); - await dashboard.treeView.openBase({ title: 'datetimetable01' }); + await dashboard.treeView.openBase({ title: `datetimetable01${counter}` }); await dashboard.treeView.openTable({ title: 'MyTable' }); if (isSqlite(context)) { From 98762eecd2b5b668d01c1ec82b0c410098d94c48 Mon Sep 17 00:00:00 2001 From: mertmit Date: Sat, 24 Jun 2023 19:03:37 +0300 Subject: [PATCH 12/15] refactor: comments and spacing Signed-off-by: mertmit --- .../at-import/helpers/readAndProcessData.ts | 35 +++++++++++++++---- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts index 9848331a56..4186a4d2e3 100644 --- a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts +++ b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts @@ -9,7 +9,7 @@ import type { AirtableBase } from 'airtable/lib/airtable_base'; import type { TableType } from 'nocodb-sdk'; const BULK_DATA_BATCH_COUNT = 20; // check size for every 100 records -const BULK_DATA_BATCH_SIZE = 51200; // in bytes +const BULK_DATA_BATCH_SIZE = 50 * 1024; // in bytes const BULK_PARALLEL_PROCESS = 5; interface AirtableImportContext { @@ -43,6 +43,12 @@ async function readAllData({ .eachPage( async function page(records, fetchNextPage) { if (!data) { + /* + EntityMap is a sqlite3 table dynamically populated based on json data provided + It is used to store data temporarily and then stream it in bulk to import + + This is done to avoid memory issues - heap out of memory - while importing large data + */ data = new EntityMap(); await data.init(); } @@ -97,8 +103,8 @@ export async function importData({ services: AirtableImportContext; }): Promise { try { - // @ts-ignore - const records = await readAllData({ + // returns EntityMap which allows us to stream data + const records: EntityMap = await readAllData({ table, base, logDetailed, @@ -109,10 +115,14 @@ export async function importData({ const readable = records.getStream(); const allRecordsCount = await records.getCount(); const promises = []; + let tempData = []; let importedCount = 0; - let activeProcess = 0; let tempCount = 0; + + // we keep track of active process to pause and resume the stream as we have async calls within the stream and we don't want to load all data in memory + let activeProcess = 0; + readable.on('data', async (record) => { promises.push( new Promise(async (resolve) => { @@ -148,6 +158,7 @@ export async function importData({ allRecordsCount, )}`, ); + importedCount += insertArray.length; insertArray = []; @@ -162,7 +173,10 @@ export async function importData({ ); }); readable.on('end', async () => { + // ensure all chunks are processed await Promise.all(promises); + + // insert remaining data if (tempData.length > 0) { await services.bulkDataService.bulkDataInsert({ projectName, @@ -182,6 +196,7 @@ export async function importData({ importedCount += tempData.length; tempData = []; } + resolve(true); }); }); @@ -230,7 +245,7 @@ export async function importLTARData({ curCol: { title?: string }; refCol: { title?: string }; }> = []; - const allData = + const allData: EntityMap = records || (await readAllData({ table, @@ -288,12 +303,13 @@ export async function importLTARData({ for await (const assocMeta of assocTableMetas) { let assocTableData = []; let importedCount = 0; + let tempCount = 0; - // extract insert data from records + // extract link data from records await new Promise((resolve) => { const promises = []; const readable = allData.getStream(); - let tempCount = 0; + readable.on('data', async (record) => { promises.push( new Promise(async (resolve) => { @@ -318,6 +334,7 @@ export async function importLTARData({ 0, assocTableData.length, ); + logBasic( `:: Importing '${ table.title @@ -346,7 +363,10 @@ export async function importLTARData({ ); }); readable.on('end', async () => { + // ensure all chunks are processed await Promise.all(promises); + + // insert remaining data if (assocTableData.length >= 0) { logBasic( `:: Importing '${ @@ -367,6 +387,7 @@ export async function importLTARData({ importedCount += assocTableData.length; assocTableData = []; } + resolve(true); }); }); From c49b5946b77f1c42ef897b8e6178bf9274b3133a Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Sat, 24 Jun 2023 22:24:22 +0530 Subject: [PATCH 13/15] test: wait for chip to render before check Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- tests/playwright/pages/Dashboard/common/Cell/index.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/playwright/pages/Dashboard/common/Cell/index.ts b/tests/playwright/pages/Dashboard/common/Cell/index.ts index 2462ad8184..ad3daaedb6 100644 --- a/tests/playwright/pages/Dashboard/common/Cell/index.ts +++ b/tests/playwright/pages/Dashboard/common/Cell/index.ts @@ -288,8 +288,9 @@ export class CellPageObject extends BasePage { for (let i = 0; i < value.length; ++i) { await chips.nth(i).locator('.name').waitFor({ state: 'visible' }); await chips.nth(i).locator('.name').scrollIntoViewIfNeeded(); + await chips.nth(i).locator('.name').waitFor({ state: 'visible' }); const chipText = await chips.nth(i).locator('.name').textContent(); - expect(value.includes(chipText)).toBe(true); + expect(value).toContain(chipText); } if (verifyChildList) { From f0f9d8839586b8a92f31d8d31d365502ee68c45e Mon Sep 17 00:00:00 2001 From: mertmit Date: Sat, 24 Jun 2023 22:32:14 +0300 Subject: [PATCH 14/15] feat: optimize bulkUpdate Signed-off-by: mertmit --- packages/nocodb/src/db/BaseModelSqlv2.ts | 149 ++++++++++++++++-- .../at-import/helpers/readAndProcessData.ts | 4 + .../src/services/bulk-data-alias.service.ts | 2 + 3 files changed, 138 insertions(+), 17 deletions(-) diff --git a/packages/nocodb/src/db/BaseModelSqlv2.ts b/packages/nocodb/src/db/BaseModelSqlv2.ts index b15aa4b355..c67a2ea1de 100644 --- a/packages/nocodb/src/db/BaseModelSqlv2.ts +++ b/packages/nocodb/src/db/BaseModelSqlv2.ts @@ -2244,38 +2244,152 @@ class BaseModelSqlv2 { chunkSize: _chunkSize = 100, cookie, foreign_key_checks = true, + skip_hooks = false, raw = false, }: { chunkSize?: number; cookie?: any; foreign_key_checks?: boolean; + skip_hooks?: boolean; raw?: boolean; } = {}, ) { let trx; try { // TODO: ag column handling for raw bulk insert - const insertDatas = raw - ? datas - : await Promise.all( - datas.map(async (d) => { - await populatePk(this.model, d); - return this.model.mapAliasToColumn( - d, - this.clientMeta, - this.dbDriver, - ); - }), - ); - - // await this.beforeInsertb(insertDatas, null); + const insertDatas = raw ? datas : []; if (!raw) { - for (const data of datas) { - await this.validate(data); + await this.model.getColumns(); + + for (const d of datas) { + const insertObj = {}; + + // populate pk, map alias to column, validate data + for (let i = 0; i < this.model.columns.length; ++i) { + const col = this.model.columns[i]; + + // populate pk columns + if (col.pk) { + if (col.meta?.ag && !d[col.title]) { + d[col.title] = + col.meta?.ag === 'nc' ? `rc_${nanoidv2()}` : uuidv4(); + } + } + + // map alias to column + if (!isVirtualCol(col)) { + let val = + d?.[col.column_name] !== undefined + ? d?.[col.column_name] + : d?.[col.title]; + if (val !== undefined) { + if ( + col.uidt === UITypes.Attachment && + typeof val !== 'string' + ) { + val = JSON.stringify(val); + } + if (col.uidt === UITypes.DateTime && dayjs(val).isValid()) { + const { isMySQL, isSqlite, isMssql, isPg } = this.clientMeta; + if ( + val.indexOf('-') < 0 && + val.indexOf('+') < 0 && + val.slice(-1) !== 'Z' + ) { + // if no timezone is given, + // then append +00:00 to make it as UTC + val += '+00:00'; + } + if (isMySQL) { + // first convert the value to utc + // from UI + // e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00 + // from API + // e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00 + // if timezone info is not found - considered as utc + // e.g. 2022-01-01 20:00:00 -> 2022-01-01 20:00:00 + // if timezone info is found + // e.g. 2022-01-01 20:00:00Z -> 2022-01-01 20:00:00 + // e.g. 2022-01-01 20:00:00+00:00 -> 2022-01-01 20:00:00 + // e.g. 2022-01-01 20:00:00+08:00 -> 2022-01-01 12:00:00 + // then we use CONVERT_TZ to convert that in the db timezone + val = this.dbDriver.raw( + `CONVERT_TZ(?, '+00:00', @@GLOBAL.time_zone)`, + [dayjs(val).utc().format('YYYY-MM-DD HH:mm:ss')], + ); + } else if (isSqlite) { + // convert to UTC + // e.g. 2022-01-01T10:00:00.000Z -> 2022-01-01 04:30:00+00:00 + val = dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ'); + } else if (isPg) { + // convert to UTC + // e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00 + // then convert to db timezone + val = this.dbDriver.raw( + `? AT TIME ZONE CURRENT_SETTING('timezone')`, + [dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')], + ); + } else if (isMssql) { + // convert ot UTC + // e.g. 2023-05-10T08:49:32.000Z -> 2023-05-10 08:49:32-08:00 + // then convert to db timezone + val = this.dbDriver.raw( + `SWITCHOFFSET(CONVERT(datetimeoffset, ?), DATENAME(TzOffset, SYSDATETIMEOFFSET()))`, + [dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ')], + ); + } else { + // e.g. 2023-01-01T12:00:00.000Z -> 2023-01-01 12:00:00+00:00 + val = dayjs(val).utc().format('YYYY-MM-DD HH:mm:ssZ'); + } + } + insertObj[sanitize(col.column_name)] = val; + } + } + + // validate data + if (col?.meta?.validate && col?.validate) { + const validate = col.getValidators(); + const cn = col.column_name; + const columnTitle = col.title; + if (validate) { + const { func, msg } = validate; + for (let j = 0; j < func.length; ++j) { + const fn = + typeof func[j] === 'string' + ? customValidators[func[j]] + ? customValidators[func[j]] + : Validator[func[j]] + : func[j]; + const columnValue = + insertObj?.[cn] || insertObj?.[columnTitle]; + const arg = + typeof func[j] === 'string' + ? columnValue + '' + : columnValue; + if ( + ![null, undefined, ''].includes(columnValue) && + !(fn.constructor.name === 'AsyncFunction' + ? await fn(arg) + : fn(arg)) + ) { + NcError.badRequest( + msg[j] + .replace(/\{VALUE}/g, columnValue) + .replace(/\{cn}/g, columnTitle), + ); + } + } + } + } + } + + insertDatas.push(insertObj); } } + // await this.beforeInsertb(insertDatas, null); + // fallbacks to `10` if database client is sqlite // to avoid `too many SQL variables` error // refer : https://www.sqlite.org/limits.html @@ -2308,7 +2422,8 @@ class BaseModelSqlv2 { await trx.commit(); - if (!raw) await this.afterBulkInsert(insertDatas, this.dbDriver, cookie); + if (!raw && !skip_hooks) + await this.afterBulkInsert(insertDatas, this.dbDriver, cookie); return response; } catch (e) { diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts index 4186a4d2e3..5f3cd98efb 100644 --- a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts +++ b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/readAndProcessData.ts @@ -148,6 +148,7 @@ export async function importData({ tableName: table.title, body: insertArray, cookie: {}, + skip_hooks: true, }); logBasic( @@ -183,6 +184,7 @@ export async function importData({ tableName: table.title, body: tempData, cookie: {}, + skip_hooks: true, }); logBasic( @@ -349,6 +351,7 @@ export async function importLTARData({ tableName: assocMeta.modelMeta.title, body: insertArray, cookie: {}, + skip_hooks: true, }); importedCount += insertArray.length; @@ -382,6 +385,7 @@ export async function importLTARData({ tableName: assocMeta.modelMeta.title, body: assocTableData, cookie: {}, + skip_hooks: true, }); importedCount += assocTableData.length; diff --git a/packages/nocodb/src/services/bulk-data-alias.service.ts b/packages/nocodb/src/services/bulk-data-alias.service.ts index bf65ef44a6..ed0a457466 100644 --- a/packages/nocodb/src/services/bulk-data-alias.service.ts +++ b/packages/nocodb/src/services/bulk-data-alias.service.ts @@ -43,6 +43,7 @@ export class BulkDataAliasService { cookie: any; chunkSize?: number; foreign_key_checks?: boolean; + skip_hooks?: boolean; raw?: boolean; }, ) { @@ -54,6 +55,7 @@ export class BulkDataAliasService { { cookie: param.cookie, foreign_key_checks: param.foreign_key_checks, + skip_hooks: param.skip_hooks, raw: param.raw, }, ], From d5c20a06d7c28e6f832fd38451c2c1d72bccce11 Mon Sep 17 00:00:00 2001 From: Raju Udava <86527202+dstala@users.noreply.github.com> Date: Mon, 26 Jun 2023 12:48:22 +0530 Subject: [PATCH 15/15] test: mysql as root DB for mysql suite Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com> --- .github/workflows/playwright-test-workflow.yml | 8 +++++++- packages/nocodb/package.json | 1 + 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/playwright-test-workflow.yml b/.github/workflows/playwright-test-workflow.yml index fef81cebcf..eb2ec03eca 100644 --- a/.github/workflows/playwright-test-workflow.yml +++ b/.github/workflows/playwright-test-workflow.yml @@ -69,11 +69,17 @@ jobs: working-directory: ./packages/nc-gui run: npm run ci:run - name: Run backend - if: ${{ inputs.db != 'pg' }} + if: ${{ inputs.db == 'sqlite' }} working-directory: ./packages/nocodb run: | npm install npm run watch:run:playwright > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log & + - name: Run backend:mysql + if: ${{ inputs.db == 'mysql' }} + working-directory: ./packages/nocodb + run: | + npm install + npm run watch:run:playwright:mysql > ${{ inputs.db }}_${{ inputs.shard }}_test_backend.log & - name: Run backend:pg if: ${{ inputs.db == 'pg' }} working-directory: ./packages/nocodb diff --git a/packages/nocodb/package.json b/packages/nocodb/package.json index d0a2078fbc..e6d92ed68b 100644 --- a/packages/nocodb/package.json +++ b/packages/nocodb/package.json @@ -34,6 +34,7 @@ "watch:run": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"", "watch:run:mysql": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunMysql --log-error --project tsconfig.json\"", "watch:run:pg": "cross-env NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/dockerRunPG --log-error --project tsconfig.json\"", + "watch:run:playwright:mysql": "rm -f ./test_noco.db; cross-env NC_DB=\"mysql2://localhost:3306?u=root&p=password&d=pw_ncdb\" PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright:pg": "rm -f ./test_noco.db; cross-env NC_DB=\"pg://localhost:5432?u=postgres&p=password&d=pw_ncdb\" PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright": "rm -f ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"", "watch:run:playwright:quick": "rm -f ./test_noco.db; cp ../../tests/playwright/fixtures/noco_0_91_7.db ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/docker --log-error --project tsconfig.json\"",