Browse Source

Merge pull request #3358 from nocodb/refactor/api-tests

API unit tests
pull/3690/head
navi 2 years ago committed by GitHub
parent
commit
ad37d284aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 39
      .github/workflows/ci-cd.yml
  2. 5
      packages/nocodb-sdk/src/lib/globals.ts
  3. 2
      packages/nocodb/.gitignore
  4. 5
      packages/nocodb/package.json
  5. 70
      packages/nocodb/src/__tests__/tsconfig.json
  6. 140
      packages/nocodb/src/lib/db/sql-data-mapper/lib/sql/BaseModelSqlv2.ts
  7. 6
      packages/nocodb/src/lib/meta/api/columnApis.ts
  8. 10
      packages/nocodb/src/lib/meta/api/dataApis/bulkDataAliasApis.ts
  9. 4
      packages/nocodb/src/lib/meta/api/dataApis/dataAliasApis.ts
  10. 11
      packages/nocodb/src/lib/meta/api/dataApis/dataAliasNestedApis.ts
  11. 2
      packages/nocodb/src/lib/meta/api/dataApis/dataApis.ts
  12. 37
      packages/nocodb/src/lib/migrations/v2/nc_011.ts
  13. 2
      packages/nocodb/src/lib/models/Model.ts
  14. 2
      packages/nocodb/src/lib/models/Project.ts
  15. 9
      packages/nocodb/src/lib/utils/common/NcConnectionMgrv2.ts
  16. 35
      packages/nocodb/src/lib/utils/globals.ts
  17. 658
      packages/nocodb/tests/mysql-sakila-db/03-test-sakila-schema.sql
  18. 46449
      packages/nocodb/tests/mysql-sakila-db/04-test-sakila-data.sql
  19. 243
      packages/nocodb/tests/unit/TestDbMngr.ts
  20. 203
      packages/nocodb/tests/unit/factory/column.ts
  21. 64
      packages/nocodb/tests/unit/factory/project.ts
  22. 181
      packages/nocodb/tests/unit/factory/row.ts
  23. 42
      packages/nocodb/tests/unit/factory/table.ts
  24. 18
      packages/nocodb/tests/unit/factory/user.ts
  25. 35
      packages/nocodb/tests/unit/factory/view.ts
  26. 20
      packages/nocodb/tests/unit/index.test.ts
  27. 56
      packages/nocodb/tests/unit/init/cleanupMeta.ts
  28. 81
      packages/nocodb/tests/unit/init/cleanupSakila.ts
  29. 12
      packages/nocodb/tests/unit/init/db.ts
  30. 42
      packages/nocodb/tests/unit/init/index.ts
  31. 10
      packages/nocodb/tests/unit/model/index.test.ts
  32. 500
      packages/nocodb/tests/unit/model/tests/baseModelSql.test.ts
  33. 18
      packages/nocodb/tests/unit/rest/index.test.ts
  34. 169
      packages/nocodb/tests/unit/rest/tests/auth.test.ts
  35. 268
      packages/nocodb/tests/unit/rest/tests/project.test.ts
  36. 253
      packages/nocodb/tests/unit/rest/tests/table.test.ts
  37. 2031
      packages/nocodb/tests/unit/rest/tests/tableRow.test.ts
  38. 1232
      packages/nocodb/tests/unit/rest/tests/viewRow.test.ts
  39. 72
      packages/nocodb/tests/unit/tsconfig.json

39
.github/workflows/ci-cd.yml

@ -635,3 +635,42 @@ jobs:
name: cy-quick-pg-snapshots name: cy-quick-pg-snapshots
path: scripts/cypress/screenshots path: scripts/cypress/screenshots
retention-days: 2 retention-days: 2
unit-tests:
runs-on: ubuntu-20.04
steps:
- name: Setup Node
uses: actions/setup-node@v1
with:
node-version: 16.15.0
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Cache node modules
uses: actions/cache@v2
env:
cache-name: cache-node-modules
with:
# npm cache files are stored in `~/.npm` on Linux/macOS
path: ~/.npm
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
- name: install dependencies nocodb-sdk
working-directory: ./packages/nocodb-sdk
run: npm install
- name: build nocodb-sdk
working-directory: ./packages/nocodb-sdk
run: npm run build:main
- name: Install dependencies
working-directory: ./packages/nocodb
run: npm install
- name: setup mysql
working-directory: ./
run: docker-compose -f ./scripts/docker-compose-cypress.yml up -d
- name: run unit tests
working-directory: ./packages/nocodb
run: npm run test:unit

5
packages/nocodb-sdk/src/lib/globals.ts

@ -39,6 +39,11 @@ export enum AuditOperationTypes {
export enum AuditOperationSubTypes { export enum AuditOperationSubTypes {
UPDATE = 'UPDATE', UPDATE = 'UPDATE',
INSERT = 'INSERT', INSERT = 'INSERT',
BULK_INSERT = 'BULK_INSERT',
BULK_UPDATE = 'BULK_UPDATE',
BULK_DELETE = 'BULK_DELETE',
LINK_RECORD = 'LINK_RECORD',
UNLINK_RECORD = 'UNLINK_RECORD',
DELETE = 'DELETE', DELETE = 'DELETE',
CREATED = 'CREATED', CREATED = 'CREATED',
DELETED = 'DELETED', DELETED = 'DELETED',

2
packages/nocodb/.gitignore vendored

@ -16,3 +16,5 @@ xc.db*
noco.db* noco.db*
/nc/ /nc/
/docker/main.js /docker/main.js
test_meta.db
test_sakila.db

5
packages/nocodb/package.json

@ -21,11 +21,10 @@
"local:test:graphql": "cross-env DATABASE_URL=mysql://root:password@localhost:3306/sakila TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/graphql.test.ts --recursive --timeout 10000 --exit", "local:test:graphql": "cross-env DATABASE_URL=mysql://root:password@localhost:3306/sakila TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/graphql.test.ts --recursive --timeout 10000 --exit",
"test:graphql": "cross-env TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/graphql.test.ts --recursive --timeout 10000 --exit", "test:graphql": "cross-env TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/graphql.test.ts --recursive --timeout 10000 --exit",
"test:grpc": "cross-env TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/grpc.test.ts --recursive --timeout 10000 --exit", "test:grpc": "cross-env TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/grpc.test.ts --recursive --timeout 10000 --exit",
"local:test:rest": "cross-env DATABASE_URL=mysql://root:password@localhost:3306/sakila TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/rest.test.ts --recursive --timeout 10000 --exit", "local:test:unit": "cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay",
"test:rest": "cross-env TS_NODE_PROJECT=tsconfig.json mocha -r ts-node/register src/__tests__/rest.test.ts --recursive --timeout 10000 --exit", "test:unit": "cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay",
"test1": "run-s build test:*", "test1": "run-s build test:*",
"test:lint": "tslint --project . && prettier \"src/**/*.ts\" --list-different", "test:lint": "tslint --project . && prettier \"src/**/*.ts\" --list-different",
"test:unit": "nyc --silent ava",
"watch": "run-s clean build:main && run-p \"build:main -- -w\" \"test:unit -- --watch\"", "watch": "run-s clean build:main && run-p \"build:main -- -w\" \"test:unit -- --watch\"",
"cov": "run-s build test:unit cov:html && open-cli coverage/index.html", "cov": "run-s build test:unit cov:html && open-cli coverage/index.html",
"cov:html": "nyc report --reporter=html", "cov:html": "nyc report --reporter=html",

70
packages/nocodb/src/__tests__/tsconfig.json

@ -0,0 +1,70 @@
{
"compilerOptions": {
"skipLibCheck": true,
"composite": true,
"target": "es2017",
"outDir": "build/main",
"rootDir": "src",
"moduleResolution": "node",
"module": "commonjs",
"declaration": true,
"inlineSourceMap": true,
"esModuleInterop": true
/* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
"allowJs": false,
// "strict": true /* Enable all strict type-checking options. */,
/* Strict Type-Checking Options */
// "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
// "strictNullChecks": true /* Enable strict null checks. */,
// "strictFunctionTypes": true /* Enable strict checking of function types. */,
// "strictPropertyInitialization": true /* Enable strict checking of property initialization in classes. */,
// "noImplicitThis": true /* Raise error on 'this' expressions with an implied 'any' type. */,
// "alwaysStrict": true /* Parse in strict mode and emit "use strict" for each source file. */,
"resolveJsonModule": true,
/* Additional Checks */
"noUnusedLocals": false
/* Report errors on unused locals. */,
"noUnusedParameters": false
/* Report errors on unused parameters. */,
"noImplicitReturns": false
/* Report error when not all code paths in function return a value. */,
"noFallthroughCasesInSwitch": false
/* Report errors for fallthrough cases in switch statement. */,
/* Debugging Options */
"traceResolution": false
/* Report module resolution log messages. */,
"listEmittedFiles": false
/* Print names of generated files part of the compilation. */,
"listFiles": false
/* Print names of files part of the compilation. */,
"pretty": true
/* Stylize errors and messages using color and context. */,
/* Experimental Options */
// "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */,
// "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */,
"lib": [
"es2017"
],
"types": [
"mocha", "node"
],
"typeRoots": [
"node_modules/@types",
"src/types"
]
},
"include": [
"src/**/*.ts",
// "src/lib/xgene/migrations/*.js",
"src/**/*.json"
],
"exclude": [
"node_modules/**",
"node_modules",
"../../../xc-lib-private/**",
"../../../xc-lib-private"
],
"compileOnSave": false
}

140
packages/nocodb/src/lib/db/sql-data-mapper/lib/sql/BaseModelSqlv2.ts

@ -116,6 +116,7 @@ class BaseModelSqlv2 {
return !!(await qb.where(_wherePk(pks, id)).first()); return !!(await qb.where(_wherePk(pks, id)).first());
} }
// todo: add support for sortArrJson
public async findOne( public async findOne(
args: { args: {
where?: string; where?: string;
@ -143,7 +144,6 @@ class BaseModelSqlv2 {
is_group: true, is_group: true,
logical_op: 'and', logical_op: 'and',
}), }),
...(args.filterArr || []),
], ],
qb, qb,
this.dbDriver this.dbDriver
@ -205,7 +205,6 @@ class BaseModelSqlv2 {
is_group: true, is_group: true,
logical_op: 'and', logical_op: 'and',
}), }),
...(args.filterArr || []),
], ],
qb, qb,
this.dbDriver this.dbDriver
@ -230,7 +229,6 @@ class BaseModelSqlv2 {
is_group: true, is_group: true,
logical_op: 'and', logical_op: 'and',
}), }),
...(args.filterArr || []),
], ],
qb, qb,
this.dbDriver this.dbDriver
@ -322,6 +320,7 @@ class BaseModelSqlv2 {
return (this.isPg ? res.rows[0] : res[0][0] ?? res[0]).count; return (this.isPg ? res.rows[0] : res[0][0] ?? res[0]).count;
} }
// todo: add support for sortArrJson and filterArrJson
async groupBy( async groupBy(
args: { args: {
where?: string; where?: string;
@ -1667,8 +1666,10 @@ class BaseModelSqlv2 {
datas: any[], datas: any[],
{ {
chunkSize: _chunkSize = 100, chunkSize: _chunkSize = 100,
cookie,
}: { }: {
chunkSize?: number; chunkSize?: number;
cookie?: any;
} = {} } = {}
) { ) {
try { try {
@ -1699,7 +1700,7 @@ class BaseModelSqlv2 {
.batchInsert(this.model.table_name, insertDatas, chunkSize) .batchInsert(this.model.table_name, insertDatas, chunkSize)
.returning(this.model.primaryKey?.column_name); .returning(this.model.primaryKey?.column_name);
// await this.afterInsertb(insertDatas, null); await this.afterBulkInsert(insertDatas, this.dbDriver, cookie);
return response; return response;
} catch (e) { } catch (e) {
@ -1708,7 +1709,7 @@ class BaseModelSqlv2 {
} }
} }
async bulkUpdate(datas: any[]) { async bulkUpdate(datas: any[], { cookie }: { cookie?: any } = {}) {
let transaction; let transaction;
try { try {
const updateDatas = await Promise.all( const updateDatas = await Promise.all(
@ -1733,7 +1734,7 @@ class BaseModelSqlv2 {
res.push(response); res.push(response);
} }
// await this.afterUpdateb(res, transaction); await this.afterBulkUpdate(updateDatas.length, this.dbDriver, cookie);
transaction.commit(); transaction.commit();
return res; return res;
@ -1747,13 +1748,14 @@ class BaseModelSqlv2 {
async bulkUpdateAll( async bulkUpdateAll(
args: { where?: string; filterArr?: Filter[] } = {}, args: { where?: string; filterArr?: Filter[] } = {},
data data,
{ cookie }: { cookie?: any } = {}
) { ) {
let queryResponse;
try { try {
const updateData = await this.model.mapAliasToColumn(data); const updateData = await this.model.mapAliasToColumn(data);
await this.validate(updateData); await this.validate(updateData);
const pkValues = await this._extractPksValues(updateData); const pkValues = await this._extractPksValues(updateData);
let res = null;
if (pkValues) { if (pkValues) {
// pk is specified - by pass // pk is specified - by pass
} else { } else {
@ -1775,21 +1777,25 @@ class BaseModelSqlv2 {
is_group: true, is_group: true,
logical_op: 'and', logical_op: 'and',
}), }),
...(args.filterArr || []),
], ],
qb, qb,
this.dbDriver this.dbDriver
); );
qb.update(updateData); qb.update(updateData);
res = ((await qb) as any).count; queryResponse = (await qb) as any;
} }
return res;
const count = queryResponse ?? 0;
await this.afterBulkUpdate(count, this.dbDriver, cookie);
return count;
} catch (e) { } catch (e) {
throw e; throw e;
} }
} }
async bulkDelete(ids: any[]) { async bulkDelete(ids: any[], { cookie }: { cookie?: any } = {}) {
let transaction; let transaction;
try { try {
transaction = await this.dbDriver.transaction(); transaction = await this.dbDriver.transaction();
@ -1808,6 +1814,8 @@ class BaseModelSqlv2 {
transaction.commit(); transaction.commit();
await this.afterBulkDelete(ids.length, this.dbDriver, cookie);
return res; return res;
} catch (e) { } catch (e) {
if (transaction) transaction.rollback(); if (transaction) transaction.rollback();
@ -1817,7 +1825,10 @@ class BaseModelSqlv2 {
} }
} }
async bulkDeleteAll(args: { where?: string; filterArr?: Filter[] } = {}) { async bulkDeleteAll(
args: { where?: string; filterArr?: Filter[] } = {},
{ cookie }: { cookie?: any } = {}
) {
try { try {
await this.model.getColumns(); await this.model.getColumns();
const { where } = this._getListArgs(args); const { where } = this._getListArgs(args);
@ -1837,13 +1848,16 @@ class BaseModelSqlv2 {
is_group: true, is_group: true,
logical_op: 'and', logical_op: 'and',
}), }),
...(args.filterArr || []),
], ],
qb, qb,
this.dbDriver this.dbDriver
); );
qb.del(); qb.del();
return ((await qb) as any).count; const count = (await qb) as any;
await this.afterBulkDelete(count, this.dbDriver, cookie);
return count;
} catch (e) { } catch (e) {
throw e; throw e;
} }
@ -1861,7 +1875,7 @@ class BaseModelSqlv2 {
await this.handleHooks('After.insert', data, req); await this.handleHooks('After.insert', data, req);
// if (req?.headers?.['xc-gui']) { // if (req?.headers?.['xc-gui']) {
const id = this._extractPksValues(data); const id = this._extractPksValues(data);
Audit.insert({ await Audit.insert({
fk_model_id: this.model.id, fk_model_id: this.model.id,
row_id: id, row_id: id,
op_type: AuditOperationTypes.DATA, op_type: AuditOperationTypes.DATA,
@ -1876,6 +1890,48 @@ class BaseModelSqlv2 {
// } // }
} }
public async afterBulkUpdate(count: number, _trx: any, req): Promise<void> {
await Audit.insert({
fk_model_id: this.model.id,
op_type: AuditOperationTypes.DATA,
op_sub_type: AuditOperationSubTypes.BULK_UPDATE,
description: DOMPurify.sanitize(
`${count} records bulk updated in ${this.model.title}`
),
// details: JSON.stringify(data),
ip: req?.clientIp,
user: req?.user?.email,
});
}
public async afterBulkDelete(count: number, _trx: any, req): Promise<void> {
await Audit.insert({
fk_model_id: this.model.id,
op_type: AuditOperationTypes.DATA,
op_sub_type: AuditOperationSubTypes.BULK_DELETE,
description: DOMPurify.sanitize(
`${count} records bulk deleted in ${this.model.title}`
),
// details: JSON.stringify(data),
ip: req?.clientIp,
user: req?.user?.email,
});
}
public async afterBulkInsert(data: any[], _trx: any, req): Promise<void> {
await Audit.insert({
fk_model_id: this.model.id,
op_type: AuditOperationTypes.DATA,
op_sub_type: AuditOperationSubTypes.BULK_INSERT,
description: DOMPurify.sanitize(
`${data.length} records bulk inserted into ${this.model.title}`
),
// details: JSON.stringify(data),
ip: req?.clientIp,
user: req?.user?.email,
});
}
public async beforeUpdate(data: any, _trx: any, req): Promise<void> { public async beforeUpdate(data: any, _trx: any, req): Promise<void> {
const ignoreWebhook = req.query?.ignoreWebhook; const ignoreWebhook = req.query?.ignoreWebhook;
if (ignoreWebhook) { if (ignoreWebhook) {
@ -1889,6 +1945,18 @@ class BaseModelSqlv2 {
} }
public async afterUpdate(data: any, _trx: any, req): Promise<void> { public async afterUpdate(data: any, _trx: any, req): Promise<void> {
const id = this._extractPksValues(data);
Audit.insert({
fk_model_id: this.model.id,
row_id: id,
op_type: AuditOperationTypes.DATA,
op_sub_type: AuditOperationSubTypes.UPDATE,
description: DOMPurify.sanitize(`${id} updated in ${this.model.title}`),
// details: JSON.stringify(data),
ip: req?.clientIp,
user: req?.user?.email,
});
const ignoreWebhook = req.query?.ignoreWebhook; const ignoreWebhook = req.query?.ignoreWebhook;
if (ignoreWebhook) { if (ignoreWebhook) {
if (ignoreWebhook != 'true' && ignoreWebhook != 'false') { if (ignoreWebhook != 'true' && ignoreWebhook != 'false') {
@ -1907,7 +1975,7 @@ class BaseModelSqlv2 {
public async afterDelete(data: any, _trx: any, req): Promise<void> { public async afterDelete(data: any, _trx: any, req): Promise<void> {
// if (req?.headers?.['xc-gui']) { // if (req?.headers?.['xc-gui']) {
const id = req?.params?.id; const id = req?.params?.id;
Audit.insert({ await Audit.insert({
fk_model_id: this.model.id, fk_model_id: this.model.id,
row_id: id, row_id: id,
op_type: AuditOperationTypes.DATA, op_type: AuditOperationTypes.DATA,
@ -2070,10 +2138,12 @@ class BaseModelSqlv2 {
colId, colId,
rowId, rowId,
childId, childId,
cookie,
}: { }: {
colId: string; colId: string;
rowId: string; rowId: string;
childId: string; childId: string;
cookie?: any;
}) { }) {
const columns = await this.model.getColumns(); const columns = await this.model.getColumns();
const column = columns.find((c) => c.id === colId); const column = columns.find((c) => c.id === colId);
@ -2140,16 +2210,35 @@ class BaseModelSqlv2 {
} }
break; break;
} }
await this.afterAddChild(rowId, childId, cookie);
}
public async afterAddChild(rowId, childId, req): Promise<void> {
await Audit.insert({
fk_model_id: this.model.id,
op_type: AuditOperationTypes.DATA,
op_sub_type: AuditOperationSubTypes.LINK_RECORD,
row_id: rowId,
description: DOMPurify.sanitize(
`Record [id:${childId}] record linked with record [id:${rowId}] record in ${this.model.title}`
),
// details: JSON.stringify(data),
ip: req?.clientIp,
user: req?.user?.email,
});
} }
async removeChild({ async removeChild({
colId, colId,
rowId, rowId,
childId, childId,
cookie,
}: { }: {
colId: string; colId: string;
rowId: string; rowId: string;
childId: string; childId: string;
cookie?: any;
}) { }) {
const columns = await this.model.getColumns(); const columns = await this.model.getColumns();
const column = columns.find((c) => c.id === colId); const column = columns.find((c) => c.id === colId);
@ -2214,6 +2303,23 @@ class BaseModelSqlv2 {
} }
break; break;
} }
await this.afterRemoveChild(rowId, childId, cookie);
}
public async afterRemoveChild(rowId, childId, req): Promise<void> {
await Audit.insert({
fk_model_id: this.model.id,
op_type: AuditOperationTypes.DATA,
op_sub_type: AuditOperationSubTypes.UNLINK_RECORD,
row_id: rowId,
description: DOMPurify.sanitize(
`Record [id:${childId}] record unlinked with record [id:${rowId}] record in ${this.model.title}`
),
// details: JSON.stringify(data),
ip: req?.clientIp,
user: req?.user?.email,
});
} }
private async extractRawQueryAndExec(qb: QueryBuilder) { private async extractRawQueryAndExec(qb: QueryBuilder) {

6
packages/nocodb/src/lib/meta/api/columnApis.ts

@ -835,7 +835,7 @@ export async function columnUpdate(req: Request, res: Response<TableType>) {
if (driverType === 'mssql') { if (driverType === 'mssql') {
await dbDriver.raw(`UPDATE ?? SET ?? = NULL WHERE ?? LIKE ?`, [table.table_name, column.column_name, column.column_name, option.title]); await dbDriver.raw(`UPDATE ?? SET ?? = NULL WHERE ?? LIKE ?`, [table.table_name, column.column_name, column.column_name, option.title]);
} else { } else {
await baseModel.bulkUpdateAll({ where: `(${column.column_name},eq,${option.title})` }, { [column.column_name]: null }); await baseModel.bulkUpdateAll({ where: `(${column.column_name},eq,${option.title})` }, { [column.column_name]: null }, { cookie: req});
} }
} else if (column.uidt === UITypes.MultiSelect) { } else if (column.uidt === UITypes.MultiSelect) {
if (driverType === 'mysql' || driverType === 'mysql2') { if (driverType === 'mysql' || driverType === 'mysql2') {
@ -933,7 +933,7 @@ export async function columnUpdate(req: Request, res: Response<TableType>) {
if (driverType === 'mssql') { if (driverType === 'mssql') {
await dbDriver.raw(`UPDATE ?? SET ?? = ? WHERE ?? LIKE ?`, [table.table_name, column.column_name, newOp.title, column.column_name, option.title]); await dbDriver.raw(`UPDATE ?? SET ?? = ? WHERE ?? LIKE ?`, [table.table_name, column.column_name, newOp.title, column.column_name, option.title]);
} else { } else {
await baseModel.bulkUpdateAll({ where: `(${column.column_name},eq,${option.title})` }, { [column.column_name]: newOp.title }); await baseModel.bulkUpdateAll({ where: `(${column.column_name},eq,${option.title})` }, { [column.column_name]: newOp.title }, { cookie: req});
} }
} else if (column.uidt === UITypes.MultiSelect) { } else if (column.uidt === UITypes.MultiSelect) {
if (driverType === 'mysql' || driverType === 'mysql2') { if (driverType === 'mysql' || driverType === 'mysql2') {
@ -954,7 +954,7 @@ export async function columnUpdate(req: Request, res: Response<TableType>) {
if (driverType === 'mssql') { if (driverType === 'mssql') {
await dbDriver.raw(`UPDATE ?? SET ?? = ? WHERE ?? LIKE ?`, [table.table_name, column.column_name, newOp.title, column.column_name, ch.temp_title]); await dbDriver.raw(`UPDATE ?? SET ?? = ? WHERE ?? LIKE ?`, [table.table_name, column.column_name, newOp.title, column.column_name, ch.temp_title]);
} else { } else {
await baseModel.bulkUpdateAll({ where: `(${column.column_name},eq,${ch.temp_title})` }, { [column.column_name]: newOp.title }); await baseModel.bulkUpdateAll({ where: `(${column.column_name},eq,${ch.temp_title})` }, { [column.column_name]: newOp.title }, { cookie: req});
} }
} else if (column.uidt === UITypes.MultiSelect) { } else if (column.uidt === UITypes.MultiSelect) {
if (driverType === 'mysql' || driverType === 'mysql2') { if (driverType === 'mysql' || driverType === 'mysql2') {

10
packages/nocodb/src/lib/meta/api/dataApis/bulkDataAliasApis.ts

@ -17,7 +17,7 @@ async function bulkDataInsert(req: Request, res: Response) {
dbDriver: NcConnectionMgrv2.get(base), dbDriver: NcConnectionMgrv2.get(base),
}); });
res.json(await baseModel.bulkInsert(req.body)); res.json(await baseModel.bulkInsert(req.body, { cookie: req }));
} }
async function bulkDataUpdate(req: Request, res: Response) { async function bulkDataUpdate(req: Request, res: Response) {
@ -30,9 +30,10 @@ async function bulkDataUpdate(req: Request, res: Response) {
dbDriver: NcConnectionMgrv2.get(base), dbDriver: NcConnectionMgrv2.get(base),
}); });
res.json(await baseModel.bulkUpdate(req.body)); res.json(await baseModel.bulkUpdate(req.body, { cookie: req }));
} }
// todo: Integrate with filterArrJson bulkDataUpdateAll
async function bulkDataUpdateAll(req: Request, res: Response) { async function bulkDataUpdateAll(req: Request, res: Response) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
const base = await Base.get(model.base_id); const base = await Base.get(model.base_id);
@ -43,7 +44,7 @@ async function bulkDataUpdateAll(req: Request, res: Response) {
dbDriver: NcConnectionMgrv2.get(base), dbDriver: NcConnectionMgrv2.get(base),
}); });
res.json(await baseModel.bulkUpdateAll(req.query, req.body)); res.json(await baseModel.bulkUpdateAll(req.query, req.body, { cookie: req }));
} }
async function bulkDataDelete(req: Request, res: Response) { async function bulkDataDelete(req: Request, res: Response) {
@ -55,9 +56,10 @@ async function bulkDataDelete(req: Request, res: Response) {
dbDriver: NcConnectionMgrv2.get(base), dbDriver: NcConnectionMgrv2.get(base),
}); });
res.json(await baseModel.bulkDelete(req.body)); res.json(await baseModel.bulkDelete(req.body, { cookie: req }));
} }
// todo: Integrate with filterArrJson bulkDataDeleteAll
async function bulkDataDeleteAll(req: Request, res: Response) { async function bulkDataDeleteAll(req: Request, res: Response) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
const base = await Base.get(model.base_id); const base = await Base.get(model.base_id);

4
packages/nocodb/src/lib/meta/api/dataApis/dataAliasApis.ts

@ -10,6 +10,7 @@ import { getViewAndModelFromRequestByAliasOrId } from './helpers';
import apiMetrics from '../../helpers/apiMetrics'; import apiMetrics from '../../helpers/apiMetrics';
import getAst from '../../../db/sql-data-mapper/lib/sql/helpers/getAst'; import getAst from '../../../db/sql-data-mapper/lib/sql/helpers/getAst';
// todo: Handle the error case where view doesnt belong to model
async function dataList(req: Request, res: Response) { async function dataList(req: Request, res: Response) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
res.json(await getDataList(model, view, req)); res.json(await getDataList(model, view, req));
@ -46,6 +47,7 @@ async function dataCount(req: Request, res: Response) {
res.json({ count }); res.json({ count });
} }
// todo: Handle the error case where view doesnt belong to model
async function dataInsert(req: Request, res: Response) { async function dataInsert(req: Request, res: Response) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
@ -81,6 +83,8 @@ async function dataDelete(req: Request, res: Response) {
viewId: view?.id, viewId: view?.id,
dbDriver: NcConnectionMgrv2.get(base), dbDriver: NcConnectionMgrv2.get(base),
}); });
// todo: Should have error http status code
const message = await baseModel.hasLTARData(req.params.rowId, model); const message = await baseModel.hasLTARData(req.params.rowId, model);
if (message.length) { if (message.length) {
res.json({ message }); res.json({ message });

11
packages/nocodb/src/lib/meta/api/dataApis/dataAliasNestedApis.ts

@ -4,10 +4,14 @@ import Base from '../../../models/Base';
import NcConnectionMgrv2 from '../../../utils/common/NcConnectionMgrv2'; import NcConnectionMgrv2 from '../../../utils/common/NcConnectionMgrv2';
import { PagedResponseImpl } from '../../helpers/PagedResponse'; import { PagedResponseImpl } from '../../helpers/PagedResponse';
import ncMetaAclMw from '../../helpers/ncMetaAclMw'; import ncMetaAclMw from '../../helpers/ncMetaAclMw';
import { getColumnByIdOrName, getViewAndModelFromRequestByAliasOrId } from './helpers' import {
getColumnByIdOrName,
getViewAndModelFromRequestByAliasOrId,
} from './helpers';
import { NcError } from '../../helpers/catchError'; import { NcError } from '../../helpers/catchError';
import apiMetrics from '../../helpers/apiMetrics'; import apiMetrics from '../../helpers/apiMetrics';
// todo: handle case where the given column is not ltar
export async function mmList(req: Request, res: Response, next) { export async function mmList(req: Request, res: Response, next) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
@ -157,6 +161,7 @@ export async function btExcludedList(req: Request, res: Response, next) {
); );
} }
// todo: handle case where the given column is not ltar
export async function hmList(req: Request, res: Response, next) { export async function hmList(req: Request, res: Response, next) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
if (!model) return next(new Error('Table not found')); if (!model) return next(new Error('Table not found'));
@ -212,12 +217,14 @@ async function relationDataRemove(req, res) {
colId: column.id, colId: column.id,
childId: req.params.refRowId, childId: req.params.refRowId,
rowId: req.params.rowId, rowId: req.params.rowId,
cookie: req,
}); });
res.json({ msg: 'success' }); res.json({ msg: 'success' });
} }
//@ts-ignore //@ts-ignore
// todo: Give proper error message when reference row is already related and handle duplicate ref row id in hm
async function relationDataAdd(req, res) { async function relationDataAdd(req, res) {
const { model, view } = await getViewAndModelFromRequestByAliasOrId(req); const { model, view } = await getViewAndModelFromRequestByAliasOrId(req);
if (!model) NcError.notFound('Table not found'); if (!model) NcError.notFound('Table not found');
@ -235,12 +242,12 @@ async function relationDataAdd(req, res) {
colId: column.id, colId: column.id,
childId: req.params.refRowId, childId: req.params.refRowId,
rowId: req.params.rowId, rowId: req.params.rowId,
cookie: req,
}); });
res.json({ msg: 'success' }); res.json({ msg: 'success' });
} }
const router = Router({ mergeParams: true }); const router = Router({ mergeParams: true });
router.get( router.get(

2
packages/nocodb/src/lib/meta/api/dataApis/dataApis.ts

@ -494,6 +494,7 @@ async function relationDataDelete(req, res) {
colId: req.params.colId, colId: req.params.colId,
childId: req.params.childId, childId: req.params.childId,
rowId: req.params.rowId, rowId: req.params.rowId,
cookie: req,
}); });
res.json({ msg: 'success' }); res.json({ msg: 'success' });
@ -521,6 +522,7 @@ async function relationDataAdd(req, res) {
colId: req.params.colId, colId: req.params.colId,
childId: req.params.childId, childId: req.params.childId,
rowId: req.params.rowId, rowId: req.params.rowId,
cookie: req,
}); });
res.json({ msg: 'success' }); res.json({ msg: 'success' });

37
packages/nocodb/src/lib/migrations/v2/nc_011.ts

@ -1,4 +1,4 @@
import { MetaTable } from '../../utils/globals'; import { MetaTable, orderedMetaTables } from '../../utils/globals';
// import googleAuth from '../plugins/googleAuth'; // import googleAuth from '../plugins/googleAuth';
// import ses from '../plugins/ses'; // import ses from '../plugins/ses';
// import cache from '../plugins/cache'; // import cache from '../plugins/cache';
@ -806,38 +806,9 @@ const up = async (knex) => {
}; };
const down = async (knex) => { const down = async (knex) => {
await knex.schema.dropTable(MetaTable.MODEL_ROLE_VISIBILITY); for (const tableName of orderedMetaTables) {
await knex.schema.dropTable(MetaTable.PLUGIN); await knex.schema.dropTable(tableName);
await knex.schema.dropTable(MetaTable.AUDIT); }
await knex.schema.dropTable(MetaTable.TEAM_USERS);
await knex.schema.dropTable(MetaTable.TEAMS);
await knex.schema.dropTable(MetaTable.ORGS);
await knex.schema.dropTable(MetaTable.PROJECT_USERS);
await knex.schema.dropTable(MetaTable.USERS);
await knex.schema.dropTable(MetaTable.KANBAN_VIEW_COLUMNS);
await knex.schema.dropTable(MetaTable.KANBAN_VIEW);
await knex.schema.dropTable(MetaTable.GRID_VIEW_COLUMNS);
await knex.schema.dropTable(MetaTable.GRID_VIEW);
await knex.schema.dropTable(MetaTable.GALLERY_VIEW_COLUMNS);
await knex.schema.dropTable(MetaTable.GALLERY_VIEW);
await knex.schema.dropTable(MetaTable.FORM_VIEW_COLUMNS);
await knex.schema.dropTable(MetaTable.FORM_VIEW);
await knex.schema.dropTable(MetaTable.SHARED_VIEWS);
await knex.schema.dropTable(MetaTable.SORT);
await knex.schema.dropTable(MetaTable.FILTER_EXP);
await knex.schema.dropTable(MetaTable.HOOK_LOGS);
await knex.schema.dropTable(MetaTable.HOOKS);
await knex.schema.dropTable(MetaTable.VIEWS);
await knex.schema.dropTable(MetaTable.COL_FORMULA);
await knex.schema.dropTable(MetaTable.COL_ROLLUP);
await knex.schema.dropTable(MetaTable.COL_LOOKUP);
await knex.schema.dropTable(MetaTable.COL_SELECT_OPTIONS);
await knex.schema.dropTable(MetaTable.COL_RELATIONS);
await knex.schema.dropTable(MetaTable.COLUMN_VALIDATIONS);
await knex.schema.dropTable(MetaTable.COLUMNS);
await knex.schema.dropTable(MetaTable.MODELS);
await knex.schema.dropTable(MetaTable.BASES);
await knex.schema.dropTable(MetaTable.PROJECT);
}; };
export { up, down }; export { up, down };

2
packages/nocodb/src/lib/models/Model.ts

@ -628,11 +628,13 @@ export default class Model implements TableType {
], ],
} }
); );
if (model) {
await NocoCache.set( await NocoCache.set(
`${CacheScope.MODEL}:${project_id}:${aliasOrId}`, `${CacheScope.MODEL}:${project_id}:${aliasOrId}`,
model.id model.id
); );
await NocoCache.set(`${CacheScope.MODEL}:${model.id}`, model); await NocoCache.set(`${CacheScope.MODEL}:${model.id}`, model);
}
return model && new Model(model); return model && new Model(model);
} }
return modelId && this.get(modelId); return modelId && this.get(modelId);

2
packages/nocodb/src/lib/models/Project.ts

@ -173,6 +173,7 @@ export default class Project implements ProjectType {
return null; return null;
} }
// Todo: Remove the project entry from the connection pool in NcConnectionMgrv2
// @ts-ignore // @ts-ignore
static async softDelete( static async softDelete(
projectId: string, projectId: string,
@ -273,6 +274,7 @@ export default class Project implements ProjectType {
); );
} }
// Todo: Remove the project entry from the connection pool in NcConnectionMgrv2
static async delete(projectId, ncMeta = Noco.ncMeta): Promise<any> { static async delete(projectId, ncMeta = Noco.ncMeta): Promise<any> {
const bases = await Base.list({ projectId }); const bases = await Base.list({ projectId });
for (const base of bases) { for (const base of bases) {

9
packages/nocodb/src/lib/utils/common/NcConnectionMgrv2.ts

@ -22,6 +22,15 @@ export default class NcConnectionMgrv2 {
// this.metaKnex = ncMeta; // this.metaKnex = ncMeta;
// } // }
public static async destroyAll() {
for (const projectId in this.connectionRefs) {
for (const baseId in this.connectionRefs[projectId]) {
await this.connectionRefs[projectId][baseId].destroy();
}
}
}
// Todo: Should await on connection destroy
public static delete(base: Base) { public static delete(base: Base) {
// todo: ignore meta projects // todo: ignore meta projects
if (this.connectionRefs?.[base.project_id]?.[base.id]) { if (this.connectionRefs?.[base.project_id]?.[base.id]) {

35
packages/nocodb/src/lib/utils/globals.ts

@ -39,6 +39,41 @@ export enum MetaTable {
SYNC_LOGS = 'nc_sync_logs_v2', SYNC_LOGS = 'nc_sync_logs_v2',
} }
export const orderedMetaTables = [
MetaTable.MODEL_ROLE_VISIBILITY,
MetaTable.PLUGIN,
MetaTable.AUDIT,
MetaTable.TEAM_USERS,
MetaTable.TEAMS,
MetaTable.ORGS,
MetaTable.PROJECT_USERS,
MetaTable.USERS,
MetaTable.KANBAN_VIEW_COLUMNS,
MetaTable.KANBAN_VIEW,
MetaTable.GRID_VIEW_COLUMNS,
MetaTable.GRID_VIEW,
MetaTable.GALLERY_VIEW_COLUMNS,
MetaTable.GALLERY_VIEW,
MetaTable.FORM_VIEW_COLUMNS,
MetaTable.FORM_VIEW,
MetaTable.SHARED_VIEWS,
MetaTable.SORT,
MetaTable.FILTER_EXP,
MetaTable.HOOK_LOGS,
MetaTable.HOOKS,
MetaTable.VIEWS,
MetaTable.COL_FORMULA,
MetaTable.COL_ROLLUP,
MetaTable.COL_LOOKUP,
MetaTable.COL_SELECT_OPTIONS,
MetaTable.COL_RELATIONS,
MetaTable.COLUMN_VALIDATIONS,
MetaTable.COLUMNS,
MetaTable.MODELS,
MetaTable.BASES,
MetaTable.PROJECT,
];
export enum CacheScope { export enum CacheScope {
PROJECT = 'project', PROJECT = 'project',
BASE = 'base', BASE = 'base',

658
packages/nocodb/tests/mysql-sakila-db/03-test-sakila-schema.sql

@ -0,0 +1,658 @@
-- Sakila Sample Database Schema
-- Version 1.2
-- Copyright (c) 2006, 2019, Oracle and/or its affiliates.
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of Oracle nor the names of its contributors may be used
-- to endorse or promote products derived from this software without
-- specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
-- IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-- THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-- PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-- CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
SET NAMES utf8mb4;
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL';
DROP SCHEMA IF EXISTS test_sakila;
CREATE SCHEMA test_sakila;
USE test_sakila;
--
-- Table structure for table `actor`
--
CREATE TABLE actor (
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id),
KEY idx_actor_last_name (last_name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `address`
--
CREATE TABLE address (
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id SMALLINT UNSIGNED NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
-- Add GEOMETRY column for MySQL 5.7.5 and higher
-- Also include SRID attribute for MySQL 8.0.3 and higher
/*!50705 location GEOMETRY */ /*!80003 SRID 0 */ /*!50705 NOT NULL,*/
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (address_id),
KEY idx_fk_city_id (city_id),
/*!50705 SPATIAL KEY `idx_location` (location),*/
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `category`
--
CREATE TABLE category (
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name VARCHAR(25) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (category_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `city`
--
CREATE TABLE city (
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
city VARCHAR(50) NOT NULL,
country_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (city_id),
KEY idx_fk_country_id (country_id),
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `country`
--
CREATE TABLE country (
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
country VARCHAR(50) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (country_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `customer`
--
CREATE TABLE customer (
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
store_id TINYINT UNSIGNED NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
create_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (customer_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
KEY idx_last_name (last_name),
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film`
--
CREATE TABLE film (
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
title VARCHAR(128) NOT NULL,
description TEXT DEFAULT NULL,
release_year YEAR DEFAULT NULL,
language_id TINYINT UNSIGNED NOT NULL,
original_language_id TINYINT UNSIGNED DEFAULT NULL,
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3,
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99,
length SMALLINT UNSIGNED DEFAULT NULL,
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99,
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G',
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id),
KEY idx_title (title),
KEY idx_fk_language_id (language_id),
KEY idx_fk_original_language_id (original_language_id),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film_actor`
--
CREATE TABLE film_actor (
actor_id SMALLINT UNSIGNED NOT NULL,
film_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id,film_id),
KEY idx_fk_film_id (`film_id`),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film_category`
--
CREATE TABLE film_category (
film_id SMALLINT UNSIGNED NOT NULL,
category_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film_text`
--
-- InnoDB added FULLTEXT support in 5.6.10. If you use an
-- earlier version, then consider upgrading (recommended) or
-- changing InnoDB to MyISAM as the film_text engine
--
-- Use InnoDB for film_text as of 5.6.10, MyISAM prior to 5.6.10.
SET @old_default_storage_engine = @@default_storage_engine;
SET @@default_storage_engine = 'MyISAM';
/*!50610 SET @@default_storage_engine = 'InnoDB'*/;
CREATE TABLE film_text (
film_id SMALLINT NOT NULL,
title VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (film_id),
FULLTEXT KEY idx_title_description (title,description)
) DEFAULT CHARSET=utf8mb4;
SET @@default_storage_engine = @old_default_storage_engine;
--
-- Triggers for loading film_text from film
--
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN
INSERT INTO film_text (film_id, title, description)
VALUES (new.film_id, new.title, new.description);
END;
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN
IF (old.title != new.title) OR (old.description != new.description) OR (old.film_id != new.film_id)
THEN
UPDATE film_text
SET title=new.title,
description=new.description,
film_id=new.film_id
WHERE film_id=old.film_id;
END IF;
END;
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN
DELETE FROM film_text WHERE film_id = old.film_id;
END;
--
-- Table structure for table `inventory`
--
CREATE TABLE inventory (
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT,
film_id SMALLINT UNSIGNED NOT NULL,
store_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (inventory_id),
KEY idx_fk_film_id (film_id),
KEY idx_store_id_film_id (store_id,film_id),
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `language`
--
CREATE TABLE language (
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name CHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (language_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `payment`
--
CREATE TABLE payment (
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
customer_id SMALLINT UNSIGNED NOT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (payment_id),
KEY idx_fk_staff_id (staff_id),
KEY idx_fk_customer_id (customer_id),
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `rental`
--
CREATE TABLE rental (
rental_id INT NOT NULL AUTO_INCREMENT,
rental_date DATETIME NOT NULL,
inventory_id MEDIUMINT UNSIGNED NOT NULL,
customer_id SMALLINT UNSIGNED NOT NULL,
return_date DATETIME DEFAULT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rental_id),
UNIQUE KEY (rental_date,inventory_id,customer_id),
KEY idx_fk_inventory_id (inventory_id),
KEY idx_fk_customer_id (customer_id),
KEY idx_fk_staff_id (staff_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `staff`
--
CREATE TABLE staff (
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
picture BLOB DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id TINYINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (staff_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `store`
--
CREATE TABLE store (
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
manager_staff_id TINYINT UNSIGNED NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (store_id),
UNIQUE KEY idx_unique_manager (manager_staff_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- View structure for view `customer_list`
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8mb4' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`,
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8mb4'active',_utf8mb4'') AS notes, cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `film_list`
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8mb4' ', actor.last_name) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id, category.name;
--
-- View structure for view `nicer_but_slower_film_list`
--
CREATE VIEW nicer_but_slower_film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)),
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8mb4' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)),
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id, category.name;
--
-- View structure for view `staff_list`
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8mb4' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone,
city.city AS city, country.country AS country, s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `sales_by_store`
--
CREATE VIEW sales_by_store
AS
SELECT
CONCAT(c.city, _utf8mb4',', cy.country) AS store
, CONCAT(m.first_name, _utf8mb4' ', m.last_name) AS manager
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY s.store_id
ORDER BY cy.country, c.city;
--
-- View structure for view `sales_by_film_category`
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
ORDER BY total_sales DESC;
--
-- View structure for view `actor_info`
--
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM test_sakila.film f
INNER JOIN test_sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN test_sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM test_sakila.actor a
LEFT JOIN test_sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN test_sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN test_sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
--
-- Procedure structure for procedure `rewards_report`
--
CREATE PROCEDURE rewards_report (
IN min_monthly_purchases TINYINT UNSIGNED
, IN min_dollar_amount_purchased DECIMAL(10,2)
, OUT count_rewardees INT
)
LANGUAGE SQL
NOT DETERMINISTIC
READS SQL DATA
SQL SECURITY DEFINER
COMMENT 'Provides a customizable report on best customers'
proc: BEGIN
DECLARE last_month_start DATE;
DECLARE last_month_end DATE;
/* Some sanity checks... */
IF min_monthly_purchases = 0 THEN
SELECT 'Minimum monthly purchases parameter must be > 0';
LEAVE proc;
END IF;
IF min_dollar_amount_purchased = 0.00 THEN
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00';
LEAVE proc;
END IF;
/* Determine start and end time periods */
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH);
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d');
SET last_month_end = LAST_DAY(last_month_start);
/*
Create a temporary storage area for
Customer IDs.
*/
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY);
/*
Find all customers meeting the
monthly purchase requirements
*/
INSERT INTO tmpCustomer (customer_id)
SELECT p.customer_id
FROM payment AS p
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end
GROUP BY customer_id
HAVING SUM(p.amount) > min_dollar_amount_purchased
AND COUNT(customer_id) > min_monthly_purchases;
/* Populate OUT parameter with count of found customers */
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees;
/*
Output ALL customer information of matching rewardees.
Customize output as needed.
*/
SELECT c.*
FROM tmpCustomer AS t
INNER JOIN customer AS c ON t.customer_id = c.customer_id;
/* Clean up */
DROP TABLE tmpCustomer;
END;
CREATE FUNCTION IF NOT EXISTS get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2)
DETERMINISTIC
READS SQL DATA
BEGIN
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS:
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees
FROM film, inventory, rental
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration,
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees
FROM rental, inventory, film
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments
FROM payment
WHERE payment.payment_date <= p_effective_date
AND payment.customer_id = p_customer_id;
RETURN v_rentfees + v_overfees - v_payments;
END;
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND inventory_in_stock(inventory_id);
SELECT COUNT(*)
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND inventory_in_stock(inventory_id)
INTO p_film_count;
END;
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND NOT inventory_in_stock(inventory_id);
SELECT COUNT(*)
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND NOT inventory_in_stock(inventory_id)
INTO p_film_count;
END;
CREATE FUNCTION IF NOT EXISTS inventory_held_by_customer(p_inventory_id INT) RETURNS INT
READS SQL DATA
BEGIN
DECLARE v_customer_id INT;
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL;
SELECT customer_id INTO v_customer_id
FROM rental
WHERE return_date IS NULL
AND inventory_id = p_inventory_id;
RETURN v_customer_id;
END;
CREATE FUNCTION IF NOT EXISTS inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN
READS SQL DATA
BEGIN
DECLARE v_rentals INT;
DECLARE v_out INT;
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED
SELECT COUNT(*) INTO v_rentals
FROM rental
WHERE inventory_id = p_inventory_id;
IF v_rentals = 0 THEN
RETURN TRUE;
END IF;
SELECT COUNT(rental_id) INTO v_out
FROM inventory LEFT JOIN rental USING(inventory_id)
WHERE inventory.inventory_id = p_inventory_id
AND rental.return_date IS NULL;
IF v_out > 0 THEN
RETURN FALSE;
ELSE
RETURN TRUE;
END IF;
END;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;

46449
packages/nocodb/tests/mysql-sakila-db/04-test-sakila-data.sql

File diff suppressed because one or more lines are too long

243
packages/nocodb/tests/unit/TestDbMngr.ts

@ -0,0 +1,243 @@
import { DbConfig } from "../../src/interface/config";
import { NcConfigFactory } from "../../src/lib";
import SqlMgrv2 from "../../src/lib/db/sql-mgr/v2/SqlMgrv2";
import fs from 'fs';
import knex from "knex";
import process from "process";
export default class TestDbMngr {
public static readonly dbName = 'test_meta';
public static readonly sakilaDbName = 'test_sakila';
public static metaKnex: knex;
public static sakilaKnex: knex;
public static defaultConnection = {
user: process.env['DB_USER'] || 'root',
password: process.env['DB_PASSWORD'] || 'password',
host: process.env['DB_HOST'] || 'localhost',
port: Number(process.env['DB_PORT']) || 3306,
client: 'mysql2',
}
public static dbConfig: DbConfig;
static async testConnection(config: DbConfig) {
try {
return await SqlMgrv2.testConnection(config);
} catch (e) {
console.log(e);
return { code: -1, message: 'Connection invalid' };
}
}
static async init() {
if(await TestDbMngr.isMysqlConfigured()){
await TestDbMngr.connectMysql();
} else {
await TestDbMngr.switchToSqlite();
}
}
static async isMysqlConfigured() {
const { user, password, host, port, client } = TestDbMngr.defaultConnection;
const config = NcConfigFactory.urlToDbConfig(`${client}://${user}:${password}@${host}:${port}`);
config.connection = {
user,
password,
host,
port,
}
const result = await TestDbMngr.testConnection(config);
return result.code !== -1;
}
static async connectMysql() {
const { user, password, host, port, client } = TestDbMngr.defaultConnection;
if(!process.env[`DATABASE_URL`]){
process.env[`DATABASE_URL`] = `${client}://${user}:${password}@${host}:${port}/${TestDbMngr.dbName}`;
}
TestDbMngr.dbConfig = NcConfigFactory.urlToDbConfig(
NcConfigFactory.extractXcUrlFromJdbc(process.env[`DATABASE_URL`])
);
this.dbConfig.meta = {
tn: 'nc_evolutions',
dbAlias: 'db',
api: {
type: 'rest',
prefix: '',
graphqlDepthLimit: 10,
},
inflection: {
tn: 'camelize',
cn: 'camelize',
},
}
await TestDbMngr.setupMeta();
await TestDbMngr.setupSakila();
}
static async setupMeta() {
if(TestDbMngr.metaKnex){
await TestDbMngr.metaKnex.destroy();
}
if(TestDbMngr.isSqlite()){
await TestDbMngr.resetMetaSqlite();
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig());
return
}
TestDbMngr.metaKnex = knex(TestDbMngr.getDbConfigWithNoDb());
await TestDbMngr.resetDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName);
await TestDbMngr.metaKnex.destroy();
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig());
await TestDbMngr.useDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName);
}
static async setupSakila () {
if(TestDbMngr.sakilaKnex) {
await TestDbMngr.sakilaKnex.destroy();
}
if(TestDbMngr.isSqlite()){
await TestDbMngr.seedSakila();
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig());
return
}
TestDbMngr.sakilaKnex = knex(TestDbMngr.getDbConfigWithNoDb());
await TestDbMngr.resetDatabase(TestDbMngr.sakilaKnex, TestDbMngr.sakilaDbName);
await TestDbMngr.sakilaKnex.destroy();
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig());
await TestDbMngr.useDatabase(TestDbMngr.sakilaKnex, TestDbMngr.sakilaDbName);
}
static async switchToSqlite() {
// process.env[`DATABASE_URL`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.sqlite`;
TestDbMngr.dbConfig = {
client: 'sqlite3',
connection: {
filename: `${__dirname}/${TestDbMngr.dbName}.db`,
database: TestDbMngr.dbName,
},
useNullAsDefault: true,
meta: {
tn: 'nc_evolutions',
dbAlias: 'db',
api: {
type: 'rest',
prefix: '',
graphqlDepthLimit: 10,
},
inflection: {
tn: 'camelize',
cn: 'camelize',
},
},
}
process.env[`NC_DB`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.db`;
await TestDbMngr.setupMeta();
await TestDbMngr.setupSakila();
}
private static async resetDatabase(knexClient, dbName) {
if(TestDbMngr.isSqlite()){
// return knexClient.raw(`DELETE FROM sqlite_sequence`);
} else {
try {
await knexClient.raw(`DROP DATABASE ${dbName}`);
} catch(e) {}
await knexClient.raw(`CREATE DATABASE ${dbName}`);
console.log(`Database ${dbName} created`);
await knexClient.raw(`USE ${dbName}`);
}
}
static isSqlite() {
return TestDbMngr.dbConfig.client === 'sqlite3';
}
private static async useDatabase(knexClient, dbName) {
if(!TestDbMngr.isSqlite()){
await knexClient.raw(`USE ${dbName}`);
}
}
static getDbConfigWithNoDb() {
const dbConfig =JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
delete dbConfig.connection.database;
return dbConfig;
}
static getMetaDbConfig() {
return TestDbMngr.dbConfig;
}
private static resetMetaSqlite() {
if(fs.existsSync(`${__dirname}/test_meta.db`)){
fs.unlinkSync(`${__dirname}/test_meta.db`);
}
}
static getSakilaDbConfig() {
const sakilaDbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
sakilaDbConfig.connection.database = TestDbMngr.sakilaDbName;
sakilaDbConfig.connection.multipleStatements = true
if(TestDbMngr.isSqlite()){
sakilaDbConfig.connection.filename = `${__dirname}/test_sakila.db`;
}
return sakilaDbConfig;
}
static async seedSakila() {
const testsDir = __dirname.replace('tests/unit', 'tests');
if(TestDbMngr.isSqlite()){
if(fs.existsSync(`${__dirname}/test_sakila.db`)){
fs.unlinkSync(`${__dirname}/test_sakila.db`);
}
fs.copyFileSync(`${testsDir}/sqlite-sakila-db/sakila.db`, `${__dirname}/test_sakila.db`);
} else {
const schemaFile = fs.readFileSync(`${testsDir}/mysql-sakila-db/03-test-sakila-schema.sql`).toString();
const dataFile = fs.readFileSync(`${testsDir}/mysql-sakila-db/04-test-sakila-data.sql`).toString();
await TestDbMngr.sakilaKnex.raw(schemaFile);
await TestDbMngr.sakilaKnex.raw(dataFile);
}
}
static async disableForeignKeyChecks(knexClient) {
if(TestDbMngr.isSqlite()){
await knexClient.raw("PRAGMA foreign_keys = OFF");
}
else {
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 0`);
}
}
static async enableForeignKeyChecks(knexClient) {
if(TestDbMngr.isSqlite()){
await knexClient.raw(`PRAGMA foreign_keys = ON;`);
}
else {
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 1`);
}
}
static async showAllTables(knexClient) {
if(TestDbMngr.isSqlite()){
const tables = await knexClient.raw(`SELECT name FROM sqlite_master WHERE type='table'`);
return tables.filter(t => t.name !== 'sqlite_sequence' && t.name !== '_evolutions').map(t => t.name);
}
else {
const response = await knexClient.raw(`SHOW TABLES`);
return response[0].map(
(table) => Object.values(table)[0]
);
}
}
}

203
packages/nocodb/tests/unit/factory/column.ts

@ -0,0 +1,203 @@
import { UITypes } from 'nocodb-sdk';
import request from 'supertest';
import Column from '../../../src/lib/models/Column';
import FormViewColumn from '../../../src/lib/models/FormViewColumn';
import GalleryViewColumn from '../../../src/lib/models/GalleryViewColumn';
import GridViewColumn from '../../../src/lib/models/GridViewColumn';
import Model from '../../../src/lib/models/Model';
import Project from '../../../src/lib/models/Project';
import View from '../../../src/lib/models/View';
import { isSqlite } from '../init/db';
const defaultColumns = function(context) {
return [
{
column_name: 'id',
title: 'Id',
uidt: 'ID',
},
{
column_name: 'title',
title: 'Title',
uidt: 'SingleLineText',
},
{
cdf: 'CURRENT_TIMESTAMP',
column_name: 'created_at',
title: 'CreatedAt',
dtxp: '',
dtxs: '',
uidt: 'DateTime',
},
{
cdf: isSqlite(context) ? 'CURRENT_TIMESTAMP': 'CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP',
column_name: 'updated_at',
title: 'UpdatedAt',
dtxp: '',
dtxs: '',
uidt: 'DateTime',
},
]
};
const createColumn = async (context, table, columnAttr) => {
await request(context.app)
.post(`/api/v1/db/meta/tables/${table.id}/columns`)
.set('xc-auth', context.token)
.send({
...columnAttr,
});
const column: Column = (await table.getColumns()).find(
(column) => column.title === columnAttr.title
);
return column;
};
const createRollupColumn = async (
context,
{
project,
title,
rollupFunction,
table,
relatedTableName,
relatedTableColumnTitle,
}: {
project: Project;
title: string;
rollupFunction: string;
table: Model;
relatedTableName: string;
relatedTableColumnTitle: string;
}
) => {
const childBases = await project.getBases();
const childTable = await Model.getByIdOrName({
project_id: project.id,
base_id: childBases[0].id!,
table_name: relatedTableName,
});
const childTableColumns = await childTable.getColumns();
const childTableColumn = await childTableColumns.find(
(column) => column.title === relatedTableColumnTitle
);
const ltarColumn = (await table.getColumns()).find(
(column) =>
column.uidt === UITypes.LinkToAnotherRecord &&
column.colOptions?.fk_related_model_id === childTable.id
);
const rollupColumn = await createColumn(context, table, {
title: title,
uidt: UITypes.Rollup,
fk_relation_column_id: ltarColumn?.id,
fk_rollup_column_id: childTableColumn?.id,
rollup_function: rollupFunction,
table_name: table.table_name,
column_name: title,
});
return rollupColumn;
};
const createLookupColumn = async (
context,
{
project,
title,
table,
relatedTableName,
relatedTableColumnTitle,
}: {
project: Project;
title: string;
table: Model;
relatedTableName: string;
relatedTableColumnTitle: string;
}
) => {
const childBases = await project.getBases();
const childTable = await Model.getByIdOrName({
project_id: project.id,
base_id: childBases[0].id!,
table_name: relatedTableName,
});
const childTableColumns = await childTable.getColumns();
const childTableColumn = await childTableColumns.find(
(column) => column.title === relatedTableColumnTitle
);
if (!childTableColumn) {
throw new Error(
`Could not find column ${relatedTableColumnTitle} in ${relatedTableName}`
);
}
const ltarColumn = (await table.getColumns()).find(
(column) =>
column.uidt === UITypes.LinkToAnotherRecord &&
column.colOptions?.fk_related_model_id === childTable.id
);
const lookupColumn = await createColumn(context, table, {
title: title,
uidt: UITypes.Lookup,
fk_relation_column_id: ltarColumn?.id,
fk_lookup_column_id: childTableColumn?.id,
table_name: table.table_name,
column_name: title,
});
return lookupColumn;
};
const createLtarColumn = async (
context,
{
title,
parentTable,
childTable,
type,
}: {
title: string;
parentTable: Model;
childTable: Model;
type: string;
}
) => {
const ltarColumn = await createColumn(context, parentTable, {
title: title,
column_name: title,
uidt: UITypes.LinkToAnotherRecord,
parentId: parentTable.id,
childId: childTable.id,
type: type,
});
return ltarColumn;
};
const updateViewColumn = async (context, {view, column, attr}: {column: Column, view: View, attr: any}) => {
const res = await request(context.app)
.patch(`/api/v1/db/meta/views/${view.id}/columns/${column.id}`)
.set('xc-auth', context.token)
.send({
...attr,
});
const updatedColumn: FormViewColumn | GridViewColumn | GalleryViewColumn = (await view.getColumns()).find(
(column) => column.id === column.id
)!;
return updatedColumn;
}
export {
defaultColumns,
createColumn,
createRollupColumn,
createLookupColumn,
createLtarColumn,
updateViewColumn
};

64
packages/nocodb/tests/unit/factory/project.ts

@ -0,0 +1,64 @@
import request from 'supertest';
import Project from '../../../src/lib/models/Project';
import TestDbMngr from '../TestDbMngr';
const externalProjectConfig = {
title: 'sakila',
bases: [
{
type: 'mysql2',
config: {
client: 'mysql2',
connection: {
host: 'localhost',
port: '3306',
user: 'root',
password: 'password',
database: TestDbMngr.sakilaDbName,
},
},
inflection_column: 'camelize',
inflection_table: 'camelize',
},
],
external: true,
};
const defaultProjectValue = {
title: 'Title',
};
const defaultSharedBaseValue = {
roles: 'viewer',
password: 'test',
};
const createSharedBase = async (app, token, project, sharedBaseArgs = {}) => {
await request(app)
.post(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', token)
.send({
...defaultSharedBaseValue,
...sharedBaseArgs,
});
};
const createSakilaProject = async (context) => {
const response = await request(context.app)
.post('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send(externalProjectConfig);
return (await Project.getByTitleOrId(response.body.id)) as Project;
};
const createProject = async (context, projectArgs = defaultProjectValue) => {
const response = await request(context.app)
.post('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send(projectArgs);
return (await Project.getByTitleOrId(response.body.id)) as Project;
};
export { createProject, createSharedBase, createSakilaProject };

181
packages/nocodb/tests/unit/factory/row.ts

@ -0,0 +1,181 @@
import { ColumnType, UITypes } from 'nocodb-sdk';
import request from 'supertest';
import Column from '../../../src/lib/models/Column';
import Filter from '../../../src/lib/models/Filter';
import Model from '../../../src/lib/models/Model';
import Project from '../../../src/lib/models/Project';
import Sort from '../../../src/lib/models/Sort';
import NcConnectionMgrv2 from '../../../src/lib/utils/common/NcConnectionMgrv2';
const rowValue = (column: ColumnType, index: number) => {
switch (column.uidt) {
case UITypes.Number:
return index;
case UITypes.SingleLineText:
return `test-${index}`;
case UITypes.Date:
return '2020-01-01';
case UITypes.DateTime:
return '2020-01-01 00:00:00';
case UITypes.Email:
return `test-${index}@example.com`;
default:
return `test-${index}`;
}
};
const getRow = async (context, {project, table, id}) => {
const response = await request(context.app)
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/${id}`)
.set('xc-auth', context.token);
return response.body;
};
const listRow = async ({
project,
table,
options,
}: {
project: Project;
table: Model;
options?: {
limit?: any;
offset?: any;
filterArr?: Filter[];
sortArr?: Sort[];
};
}) => {
const bases = await project.getBases();
const baseModel = await Model.getBaseModelSQL({
id: table.id,
dbDriver: NcConnectionMgrv2.get(bases[0]!),
});
const ignorePagination = !options;
return await baseModel.list(options, ignorePagination);
};
const getOneRow = async (
context,
{ project, table }: { project: Project; table: Model }
) => {
const response = await request(context.app)
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/find-one`)
.set('xc-auth', context.token);
return response.body;
};
const generateDefaultRowAttributes = ({
columns,
index = 0,
}: {
columns: ColumnType[];
index?: number;
}) =>
columns.reduce((acc, column) => {
if (
column.uidt === UITypes.LinkToAnotherRecord ||
column.uidt === UITypes.ForeignKey ||
column.uidt === UITypes.ID
) {
return acc;
}
acc[column.title!] = rowValue(column, index);
return acc;
}, {});
const createRow = async (
context,
{
project,
table,
index = 0,
}: {
project: Project;
table: Model;
index?: number;
}
) => {
const columns = await table.getColumns();
const rowData = generateDefaultRowAttributes({ columns, index });
const response = await request(context.app)
.post(`/api/v1/db/data/noco/${project.id}/${table.id}`)
.set('xc-auth', context.token)
.send(rowData);
return response.body;
};
const createBulkRows = async (
context,
{
project,
table,
values
}: {
project: Project;
table: Model;
values: any[];
}) => {
await request(context.app)
.post(`/api/v1/db/data/bulk/noco/${project.id}/${table.id}`)
.set('xc-auth', context.token)
.send(values)
.expect(200);
}
// Links 2 table rows together. Will create rows if ids are not provided
const createChildRow = async (
context,
{
project,
table,
childTable,
column,
rowId,
childRowId,
type,
}: {
project: Project;
table: Model;
childTable: Model;
column: Column;
rowId?: string;
childRowId?: string;
type: string;
}
) => {
if (!rowId) {
const row = await createRow(context, { project, table });
rowId = row['Id'];
}
if (!childRowId) {
const row = await createRow(context, { table: childTable, project });
childRowId = row['Id'];
}
await request(context.app)
.post(
`/api/v1/db/data/noco/${project.id}/${table.id}/${rowId}/${type}/${column.title}/${childRowId}`
)
.set('xc-auth', context.token);
const row = await getRow(context, { project, table, id: rowId });
return row;
};
export {
createRow,
getRow,
createChildRow,
getOneRow,
listRow,
generateDefaultRowAttributes,
createBulkRows
};

42
packages/nocodb/tests/unit/factory/table.ts

@ -0,0 +1,42 @@
import request from 'supertest';
import Model from '../../../src/lib/models/Model';
import Project from '../../../src/lib/models/Project';
import { defaultColumns } from './column';
const defaultTableValue = (context) => ({
table_name: 'Table1',
title: 'Table1_Title',
columns: defaultColumns(context),
});
const createTable = async (context, project, args = {}) => {
const defaultValue = defaultTableValue(context);
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({ ...defaultValue, ...args });
const table: Model = await Model.get(response.body.id);
return table;
};
const getTable = async ({project, name}: {project: Project, name: string}) => {
const bases = await project.getBases();
return await Model.getByIdOrName({
project_id: project.id,
base_id: bases[0].id!,
table_name: name,
});
}
const getAllTables = async ({project}: {project: Project}) => {
const bases = await project.getBases();
const tables = await Model.list({
project_id: project.id,
base_id: bases[0].id!,
});
return tables;
}
export { createTable, getTable, getAllTables };

18
packages/nocodb/tests/unit/factory/user.ts

@ -0,0 +1,18 @@
import request from 'supertest';
import User from '../../../src/lib/models/User';
const defaultUserArgs = {
email: 'test@example.com',
password: 'A1234abh2@dsad',
};
const createUser = async (context, userArgs = {}) => {
const args = { ...defaultUserArgs, ...userArgs };
const response = await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args);
const user = User.getByEmail(args.email);
return { token: response.body.token, user };
};
export { createUser, defaultUserArgs };

35
packages/nocodb/tests/unit/factory/view.ts

@ -0,0 +1,35 @@
import { ViewTypes } from 'nocodb-sdk';
import request from 'supertest';
import Model from '../../../src/lib/models/Model';
import View from '../../../src/lib/models/View';
const createView = async (context, {title, table, type}: {title: string, table: Model, type: ViewTypes}) => {
const viewTypeStr = (type) => {
switch (type) {
case ViewTypes.GALLERY:
return 'galleries';
case ViewTypes.FORM:
return 'forms';
case ViewTypes.GRID:
return 'grids';
case ViewTypes.KANBAN:
return 'kanbans';
default:
throw new Error('Invalid view type');
}
};
await request(context.app)
.post(`/api/v1/db/meta/tables/${table.id}/${viewTypeStr(type)}`)
.set('xc-auth', context.token)
.send({
title,
type,
});
const view = await View.getByTitleOrId({fk_model_id: table.id, titleOrId:title}) as View;
return view
}
export {createView}

20
packages/nocodb/tests/unit/index.test.ts

@ -0,0 +1,20 @@
import 'mocha';
import restTests from './rest/index.test';
import modelTests from './model/index.test';
import TestDbMngr from './TestDbMngr'
process.env.NODE_ENV = 'test';
process.env.TEST = 'test';
process.env.NC_DISABLE_CACHE = 'true';
process.env.NC_DISABLE_TELE = 'true';
(async function() {
await TestDbMngr.init();
modelTests();
restTests();
run();
})();

56
packages/nocodb/tests/unit/init/cleanupMeta.ts

@ -0,0 +1,56 @@
import Model from "../../../src/lib/models/Model";
import Project from "../../../src/lib/models/Project";
import NcConnectionMgrv2 from "../../../src/lib/utils/common/NcConnectionMgrv2";
import { orderedMetaTables } from "../../../src/lib/utils/globals";
import TestDbMngr from "../TestDbMngr";
const dropTablesAllNonExternalProjects = async () => {
const projects = await Project.list({});
const userCreatedTableNames: string[] = [];
await Promise.all(
projects
.filter((project) => project.is_meta)
.map(async (project) => {
await project.getBases();
const base = project.bases && project.bases[0];
if (!base) return;
const models = await Model.list({
project_id: project.id,
base_id: base.id!,
});
models.forEach((model) => {
userCreatedTableNames.push(model.table_name);
});
})
);
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex);
for (const tableName of userCreatedTableNames) {
await TestDbMngr.metaKnex.raw(`DROP TABLE ${tableName}`);
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex);
};
const cleanupMetaTables = async () => {
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex);
for (const tableName of orderedMetaTables) {
try {
await TestDbMngr.metaKnex.raw(`DELETE FROM ${tableName}`);
} catch (e) {}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex);
};
export default async function () {
try {
await NcConnectionMgrv2.destroyAll();
await dropTablesAllNonExternalProjects();
await cleanupMetaTables();
} catch (e) {
console.error('cleanupMeta', e);
}
}

81
packages/nocodb/tests/unit/init/cleanupSakila.ts

@ -0,0 +1,81 @@
import Audit from '../../../src/lib/models/Audit';
import Project from '../../../src/lib/models/Project';
import TestDbMngr from '../TestDbMngr';
const dropTablesOfSakila = async () => {
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.sakilaKnex);
for(const tableName of sakilaTableNames){
try {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
} catch(e){}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.sakilaKnex);
}
const resetAndSeedSakila = async () => {
try {
await dropTablesOfSakila();
await TestDbMngr.seedSakila();
} catch (e) {
console.error('resetSakila', e);
throw e
}
}
const cleanUpSakila = async () => {
try {
const sakilaProject = await Project.getByTitle('sakila');
const audits = sakilaProject && await Audit.projectAuditList(sakilaProject.id, {});
if(audits?.length > 0) {
return await resetAndSeedSakila();
}
const tablesInSakila = await TestDbMngr.showAllTables(TestDbMngr.sakilaKnex);
await Promise.all(
tablesInSakila
.filter((tableName) => !sakilaTableNames.includes(tableName))
.map(async (tableName) => {
try {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
} catch (e) {
console.error(e);
}
})
);
} catch (e) {
console.error('cleanUpSakila', e);
}
};
const sakilaTableNames = [
'actor',
'address',
'category',
'city',
'country',
'customer',
'film',
'film_actor',
'film_category',
'film_text',
'inventory',
'language',
'payment',
'rental',
'staff',
'store',
'actor_info',
'customer_list',
'film_list',
'nicer_but_slower_film_list',
'sales_by_film_category',
'sales_by_store',
'staff_list',
];
export { cleanUpSakila, resetAndSeedSakila };

12
packages/nocodb/tests/unit/init/db.ts

@ -0,0 +1,12 @@
import { DbConfig } from "../../../src/interface/config";
const isSqlite = (context) =>{
return (context.dbConfig as DbConfig).client === 'sqlite' || (context.dbConfig as DbConfig).client === 'sqlite3';
}
const isMysql = (context) =>
(context.dbConfig as DbConfig).client === 'mysql' ||
(context.dbConfig as DbConfig).client === 'mysql2';
export { isSqlite, isMysql };

42
packages/nocodb/tests/unit/init/index.ts

@ -0,0 +1,42 @@
import express from 'express';
import { Noco } from '../../../src/lib';
import cleanupMeta from './cleanupMeta';
import {cleanUpSakila, resetAndSeedSakila} from './cleanupSakila';
import { createUser } from '../factory/user';
let server;
const serverInit = async () => {
const serverInstance = express();
serverInstance.enable('trust proxy');
serverInstance.use(await Noco.init());
serverInstance.use(function(req, res, next){
// 50 sec timeout
req.setTimeout(500000, function(){
console.log('Request has timed out.');
res.send(408);
});
next();
});
return serverInstance;
};
const isFirstTimeRun = () => !server
export default async function () {
const {default: TestDbMngr} = await import('../TestDbMngr');
if (isFirstTimeRun()) {
await resetAndSeedSakila();
server = await serverInit();
}
await cleanUpSakila();
await cleanupMeta();
const { token } = await createUser({ app: server }, { roles: 'editor' });
return { app: server, token, dbConfig: TestDbMngr.dbConfig };
}

10
packages/nocodb/tests/unit/model/index.test.ts

@ -0,0 +1,10 @@
import 'mocha';
import baseModelSqlTest from './tests/baseModelSql.test';
function modelTests() {
baseModelSqlTest();
}
export default function () {
describe('Model', modelTests);
}

500
packages/nocodb/tests/unit/model/tests/baseModelSql.test.ts

@ -0,0 +1,500 @@
import 'mocha';
import init from '../../init';
import { BaseModelSqlv2 } from '../../../../src/lib/db/sql-data-mapper/lib/sql/BaseModelSqlv2';
import { createProject } from '../../factory/project';
import { createTable } from '../../factory/table';
import NcConnectionMgrv2 from '../../../../src/lib/utils/common/NcConnectionMgrv2';
import Base from '../../../../src/lib/models/Base';
import Model from '../../../../src/lib/models/Model';
import Project from '../../../../src/lib/models/Project';
import View from '../../../../src/lib/models/View';
import { createRow, generateDefaultRowAttributes } from '../../factory/row';
import Audit from '../../../../src/lib/models/Audit';
import { expect } from 'chai';
import Filter from '../../../../src/lib/models/Filter';
import { createLtarColumn } from '../../factory/column';
import LinkToAnotherRecordColumn from '../../../../src/lib/models/LinkToAnotherRecordColumn';
function baseModelSqlTests() {
let context;
let project: Project;
let table: Model;
let view: View;
let baseModelSql: BaseModelSqlv2;
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project);
view = await table.getViews()[0];
const base = await Base.get(table.base_id);
baseModelSql = new BaseModelSqlv2({
dbDriver: NcConnectionMgrv2.get(base),
model: table,
view
})
});
it('Insert record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const columns = await table.getColumns();
const inputData = generateDefaultRowAttributes({columns})
const response = await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request);
const insertedRow = (await baseModelSql.list())[0];
expect(insertedRow).to.include(inputData);
expect(insertedRow).to.include(response);
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'INSERT');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'INSERT',
description: '1 inserted into Table1_Title',
});
});
it('Bulk insert record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const insertedRows = await baseModelSql.list();
bulkData.forEach((inputData, index) => {
expect(insertedRows[index]).to.include(inputData);
});
const rowBulkInsertedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_INSERT');;
expect(rowBulkInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_INSERT',
status: null,
description: '10 records bulk inserted into Table1_Title',
details: null,
});
});
it('Update record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const columns = await table.getColumns();
await baseModelSql.insert(generateDefaultRowAttributes({columns}));
const rowId = 1;
await baseModelSql.updateByPk(rowId, {Title: 'test'},undefined, request);
const updatedRow = await baseModelSql.readByPk(1);
expect(updatedRow).to.include({Id: rowId, Title: 'test'});
const rowUpdatedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'UPDATE');
expect(rowUpdatedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'UPDATE',
description: '1 updated in Table1_Title',
});
});
it('Bulk update record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const insertedRows: any[] = await baseModelSql.list();
await baseModelSql.bulkUpdate(insertedRows.map((row)=> ({...row, Title: `new-${row['Title']}`})), { cookie: request });
const updatedRows = await baseModelSql.list();
updatedRows.forEach((row, index) => {
expect(row['Title']).to.equal(`new-test-${index}`);
})
const rowBulkUpdateAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
expect(rowBulkUpdateAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_UPDATE',
status: null,
description: '10 records bulk updated in Table1_Title',
details: null,
});
});
it('Bulk update all record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const idColumn = columns.find((column) => column.title === 'Id')!;
await baseModelSql.bulkUpdateAll({filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
})
]}, ({Title: 'new-1'}), { cookie: request });
const updatedRows = await baseModelSql.list();
updatedRows.forEach((row) => {
if(row.id < 5) expect(row['Title']).to.equal('new-1');
})
const rowBulkUpdateAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
expect(rowBulkUpdateAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_UPDATE',
status: null,
description: '4 records bulk updated in Table1_Title',
details: null,
});
});
it('Delete record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'},
params: {id: 1}
}
const columns = await table.getColumns();
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const rowIdToDeleted = 1;
await baseModelSql.delByPk(rowIdToDeleted,undefined ,request);
const deletedRow = await baseModelSql.readByPk(rowIdToDeleted);
expect(deletedRow).to.be.undefined;
console.log('Delete record', await Audit.projectAuditList(project.id, {}));
const rowDeletedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'DELETE');
expect(rowDeletedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'DELETE',
description: '1 deleted from Table1_Title',
});
});
it('Bulk delete records', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const insertedRows: any[] = await baseModelSql.list();
await baseModelSql.bulkDelete(
insertedRows
.filter((row) => row['Id'] < 5)
.map((row)=> ({'id': row['Id']})),
{ cookie: request }
);
const remainingRows = await baseModelSql.list();
expect(remainingRows).to.length(6);
const rowBulkDeleteAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_DELETE');
expect(rowBulkDeleteAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_DELETE',
status: null,
description: '4 records bulk deleted in Table1_Title',
details: null,
});
});
it('Bulk delete all record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const idColumn = columns.find((column) => column.title === 'Id')!;
await baseModelSql.bulkDeleteAll({filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
})
]}, { cookie: request });
const remainingRows = await baseModelSql.list();
expect(remainingRows).to.length(6);
const rowBulkDeleteAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_DELETE');
expect(rowBulkDeleteAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_DELETE',
status: null,
description: '4 records bulk deleted in Table1_Title',
details: null,
});
});
it('Nested insert', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
})
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: "hm"
})
const childRow = await createRow(context, {
project,
table: childTable,
})
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
await baseModelSql.nestedInsert(
{...generateDefaultRowAttributes({columns}), [ltarColumn.title]: [{'Id': childRow['Id']}]},
undefined,
request
);
const childBaseModel = new BaseModelSqlv2({
dbDriver: NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view
})
const insertedChildRow = await childBaseModel.readByPk(childRow['Id']);
expect(insertedChildRow[childCol.column_name]).to.equal(childRow['Id']);
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {}))
.filter((audit) => audit.fk_model_id === table.id)
.find((audit) => audit.op_sub_type === 'INSERT');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'INSERT',
description: '1 inserted into Table1_Title',
});
})
it('Link child', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
})
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: "hm"
})
const insertedChildRow = await createRow(context, {
project,
table: childTable,
})
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request);
const insertedRow = await baseModelSql.readByPk(1);
await baseModelSql.addChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request
});
const childBaseModel = new BaseModelSqlv2({
dbDriver: NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view
})
const updatedChildRow = await childBaseModel.readByPk(insertedChildRow['Id']);
expect(updatedChildRow[childCol.column_name]).to.equal(insertedRow['Id']);
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {}))
.filter((audit) => audit.fk_model_id === table.id)
.find((audit) => audit.op_sub_type === 'LINK_RECORD');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'LINK_RECORD',
description: 'Record [id:1] record linked with record [id:1] record in Table1_Title',
});
})
it('Unlink child', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
})
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: "hm"
})
const insertedChildRow = await createRow(context, {
project,
table: childTable,
})
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request);
const insertedRow = await baseModelSql.readByPk(1);
await baseModelSql.addChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request
});
await baseModelSql.removeChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request
});
const childBaseModel = new BaseModelSqlv2({
dbDriver: NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view
})
const updatedChildRow = await childBaseModel.readByPk(insertedChildRow['Id']);
expect(updatedChildRow[childCol.column_name]).to.be.null;
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {}))
.filter((audit) => audit.fk_model_id === table.id)
.find((audit) => audit.op_sub_type === 'UNLINK_RECORD');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'UNLINK_RECORD',
description: 'Record [id:1] record unlinked with record [id:1] record in Table1_Title',
});
})
}
export default function () {
describe('BaseModelSql', baseModelSqlTests);
}

18
packages/nocodb/tests/unit/rest/index.test.ts

@ -0,0 +1,18 @@
import 'mocha';
import authTests from './tests/auth.test';
import projectTests from './tests/project.test';
import tableTests from './tests/table.test';
import tableRowTests from './tests/tableRow.test';
import viewRowTests from './tests/viewRow.test';
function restTests() {
authTests();
projectTests();
tableTests();
tableRowTests();
viewRowTests();
}
export default function () {
describe('Rest', restTests);
}

169
packages/nocodb/tests/unit/rest/tests/auth.test.ts

@ -0,0 +1,169 @@
import { expect } from 'chai';
import 'mocha';
import request from 'supertest';
import init from '../../init';
import { defaultUserArgs } from '../../factory/user';
function authTests() {
let context;
beforeEach(async function () {
context = await init();
});
it('Signup with valid email', async () => {
const response = await request(context.app)
.post('/api/v1/auth/user/signup')
.send({ email: 'new@example.com', password: defaultUserArgs.password })
.expect(200)
const token = response.body.token;
expect(token).to.be.a('string');
});
it('Signup with invalid email', async () => {
await request(context.app)
.post('/api/v1/auth/user/signup')
.send({ email: 'test', password: defaultUserArgs.password })
.expect(400);
});
it('Signup with invalid passsword', async () => {
await request(context.app)
.post('/api/v1/auth/user/signup')
.send({ email: defaultUserArgs.email, password: 'weakpass' })
.expect(400);
});
it('Signin with valid credentials', async () => {
const response = await request(context.app)
.post('/api/v1/auth/user/signin')
.send({
email: defaultUserArgs.email,
password: defaultUserArgs.password,
})
.expect(200);
const token = response.body.token;
expect(token).to.be.a('string');
});
it('Signup without email and password', async () => {
await request(context.app)
.post('/api/v1/auth/user/signin')
// pass empty data in await request
.send({})
.expect(400);
});
it('Signin with invalid credentials', async () => {
await request(context.app)
.post('/api/v1/auth/user/signin')
.send({ email: 'abc@abc.com', password: defaultUserArgs.password })
.expect(400);
});
it('Signin with invalid password', async () => {
await request(context.app)
.post('/api/v1/auth/user/signin')
.send({ email: defaultUserArgs.email, password: 'wrongPassword' })
.expect(400);
});
it('me without token', async () => {
const response = await request(context.app)
.get('/api/v1/auth/user/me')
.unset('xc-auth')
.expect(200);
if (!response.body?.roles?.guest) {
return new Error('User should be guest');
}
});
it('me with token', async () => {
const response = await request(context.app)
.get('/api/v1/auth/user/me')
.set('xc-auth', context.token)
.expect(200);
const email = response.body.email;
expect(email).to.equal(defaultUserArgs.email);
});
it('Forgot password with a non-existing email id', async () => {
await request(context.app)
.post('/api/v1/auth/password/forgot')
.send({ email: 'nonexisting@email.com' })
.expect(400);
});
// todo: fix mailer issues
// it('Forgot password with an existing email id', function () {});
it('Change password', async () => {
await request(context.app)
.post('/api/v1/auth/password/change')
.set('xc-auth', context.token)
.send({
currentPassword: defaultUserArgs.password,
newPassword: 'NEW' + defaultUserArgs.password,
})
.expect(200);
});
it('Change password - after logout', async () => {
await request(context.app)
.post('/api/v1/auth/password/change')
.unset('xc-auth')
.send({
currentPassword: defaultUserArgs.password,
newPassword: 'NEW' + defaultUserArgs.password,
})
.expect(401);
});
// todo:
it('Reset Password with an invalid token', async () => {
await request(context.app)
.post('/api/v1/auth/password/reset/someRandomValue')
.send({ email: defaultUserArgs.email })
.expect(400);
});
it('Email validate with an invalid token', async () => {
await request(context.app)
.post('/api/v1/auth/email/validate/someRandomValue')
.send({ email: defaultUserArgs.email })
.expect(400);
});
// todo:
// it('Email validate with a valid token', async () => {
// // await request(context.app)
// // .post('/auth/email/validate/someRandomValue')
// // .send({email: EMAIL_ID})
// // .expect(500, done);
// });
// todo:
// it('Forgot password validate with a valid token', async () => {
// // await request(context.app)
// // .post('/auth/token/validate/someRandomValue')
// // .send({email: EMAIL_ID})
// // .expect(500, done);
// });
// todo:
// it('Reset Password with an valid token', async () => {
// // await request(context.app)
// // .post('/auth/password/reset/someRandomValue')
// // .send({password: 'anewpassword'})
// // .expect(500, done);
// });
// todo: refresh token api
}
export default function () {
describe('Auth', authTests);
}

268
packages/nocodb/tests/unit/rest/tests/project.test.ts

@ -0,0 +1,268 @@
import 'mocha';
import request from 'supertest';
import init from '../../init/index';
import { createProject, createSharedBase } from '../../factory/project';
import { beforeEach } from 'mocha';
import { Exception } from 'handlebars';
import Project from '../../../../src/lib/models/Project';
function projectTest() {
let context;
let project;
beforeEach(async function () {
context = await init();
project = await createProject(context);
});
it('Get project info', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/info`)
.set('xc-auth', context.token)
.send({})
.expect(200);
});
// todo: Test by creating models under project and check if the UCL is working
it('UI ACL', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/visibility-rules`)
.set('xc-auth', context.token)
.send({})
.expect(200);
});
// todo: Test creating visibility set
it('List projects', async () => {
const response = await request(context.app)
.get('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send({})
.expect(200);
if (response.body.list.length !== 1) new Error('Should list only 1 project');
if (!response.body.pageInfo) new Error('Should have pagination info');
});
it('Create project', async () => {
const response = await request(context.app)
.post('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send({
title: 'Title1',
})
.expect(200);
const newProject = await Project.getByTitleOrId(response.body.id);
if (!newProject) return new Error('Project not created');
});
it('Create projects with existing title', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/`)
.set('xc-auth', context.token)
.send({
title: project.title,
})
.expect(400);
});
// todo: fix passport user role popluation bug
// it('Delete project', async async () => {
// const toBeDeletedProject = await createProject(app, token, {
// title: 'deletedTitle',
// });
// await request(app)
// .delete('/api/v1/db/meta/projects/${toBeDeletedProject.id}')
// .set('xc-auth', token)
// .send({
// title: 'Title1',
// })
// .expect(200, async (err) => {
// // console.log(res);
//
// const deletedProject = await Project.getByTitleOrId(
// toBeDeletedProject.id
// );
// if (deletedProject) return new Error('Project not delete');
// new Error();
// });
// });
it('Read project', async () => {
const response = await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}`)
.set('xc-auth', context.token)
.send()
.expect(200);
if (response.body.id !== project.id) return new Error('Got the wrong project');
});
it('Update projects', async () => {
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}`)
.set('xc-auth', context.token)
.send({
title: 'NewTitle',
})
.expect(200);
const newProject = await Project.getByTitleOrId(project.id);
if (newProject.title !== 'NewTitle') {
return new Error('Project not updated');
}
});
it('Update projects with existing title', async function () {
const newProject = await createProject(context, {
title: 'NewTitle1',
});
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}`)
.set('xc-auth', context.token)
.send({
title: newProject.title,
})
.expect(400);
});
it('Create project shared base', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'viewer',
password: 'test',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (
!updatedProject.uuid ||
updatedProject.roles !== 'viewer' ||
updatedProject.password !== 'test'
) {
return new Error('Shared base not configured properly');
}
});
it('Created project shared base should have only editor or viewer role', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'commenter',
password: 'test',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.roles === 'commenter') {
return new Error('Shared base not configured properly');
}
});
it('Updated project shared base should have only editor or viewer role', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'commenter',
password: 'test',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.roles === 'commenter') {
throw new Exception('Shared base not updated properly');
}
});
it('Updated project shared base', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'editor',
password: 'test',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.roles !== 'editor') {
throw new Exception('Shared base not updated properly');
}
});
it('Get project shared base', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send()
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (!updatedProject.uuid) {
throw new Exception('Shared base not created');
}
});
it('Delete project shared base', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.delete(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send()
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.uuid) {
throw new Exception('Shared base not deleted');
}
});
// todo: Do compare api test
it('Meta diff sync', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/meta-diff`)
.set('xc-auth', context.token)
.send()
.expect(200);
});
it('Meta diff sync', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/meta-diff`)
.set('xc-auth', context.token)
.send()
.expect(200);
});
// todo: improve test. Check whether the all the actions are present in the response and correct as well
it('Meta diff sync', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/audits`)
.set('xc-auth', context.token)
.send()
.expect(200);
});
}
export default function () {
describe('Project', projectTest);
}

253
packages/nocodb/tests/unit/rest/tests/table.test.ts

@ -0,0 +1,253 @@
// import { expect } from 'chai';
import 'mocha';
import request from 'supertest';
import init from '../../init';
import { createTable, getAllTables } from '../../factory/table';
import { createProject } from '../../factory/project';
import { defaultColumns } from '../../factory/column';
import Model from '../../../../src/lib/models/Model';
function tableTest() {
let context;
let project;
let table;
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project);
});
it('Get table list', async function () {
const response = await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({})
.expect(200);
if (response.body.list.length !== 1) return new Error('Wrong number of tables');
});
it('Create table', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'table2',
title: 'new_title_2',
columns: defaultColumns(context),
})
.expect(200);
const tables = await getAllTables({ project });
if (tables.length !== 2) {
return new Error('Tables is not be created');
}
if (response.body.columns.length !== (defaultColumns(context))) {
return new Error('Columns not saved properly');
}
if (
!(
response.body.table_name.startsWith(project.prefix) &&
response.body.table_name.endsWith('table2')
)
) {
return new Error('table name not configured properly');
}
});
it('Create table with no table name', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: undefined,
title: 'new_title',
columns: defaultColumns(context),
})
.expect(400);
if (
!response.text.includes(
'Missing table name `table_name` property in request body'
)
) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
console.log(tables);
return new Error(
`Tables should not be created, tables.length:${tables.length}`
);
}
});
it('Create table with same table name', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: table.table_name,
title: 'New_title',
columns: defaultColumns(context),
})
.expect(400);
if (!response.text.includes('Duplicate table name')) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Create table with same title', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'New_table_name',
title: table.title,
columns: defaultColumns(context),
})
.expect(400);
if (!response.text.includes('Duplicate table alias')) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Create table with title length more than the limit', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'a'.repeat(256),
title: 'new_title',
columns: defaultColumns(context),
})
.expect(400);
if (!response.text.includes('Table name exceeds ')) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Create table with title having leading white space', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'table_name_with_whitespace ',
title: 'new_title',
columns: defaultColumns(context),
})
.expect(400);
if (
!response.text.includes(
'Leading or trailing whitespace not allowed in table names'
)
) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Update table', async function () {
const response = await request(context.app)
.patch(`/api/v1/db/meta/tables/${table.id}`)
.set('xc-auth', context.token)
.send({
project_id: project.id,
table_name: 'new_title',
})
.expect(200);
const updatedTable = await Model.get(table.id);
if (!updatedTable.table_name.endsWith('new_title')) {
return new Error('Table was not updated');
}
});
it('Delete table', async function () {
const response = await request(context.app)
.delete(`/api/v1/db/meta/tables/${table.id}`)
.set('xc-auth', context.token)
.send({})
.expect(200);
const tables = await getAllTables({ project });
if (tables.length !== 0) {
return new Error('Table is not deleted');
}
});
// todo: Check the condtion where the table being deleted is being refered by multiple tables
// todo: Check the if views are also deleted
it('Get table', async function () {
const response = await request(context.app)
.get(`/api/v1/db/meta/tables/${table.id}`)
.set('xc-auth', context.token)
.send({})
.expect(200);
if (response.body.id !== table.id) new Error('Wrong table');
});
// todo: flaky test, order condition is sometimes not met
it('Reorder table', async function () {
const newOrder = table.order === 0 ? 1 : 0;
const response = await request(context.app)
.post(`/api/v1/db/meta/tables/${table.id}/reorder`)
.set('xc-auth', context.token)
.send({
order: newOrder,
})
.expect(200);
// .expect(200, async (err) => {
// if (err) return new Error(err);
// const updatedTable = await Model.get(table.id);
// console.log(Number(updatedTable.order), newOrder);
// if (Number(updatedTable.order) !== newOrder) {
// return new Error('Reordering failed');
// }
// new Error();
// });
});
}
export default async function () {
describe('Table', tableTest);
}

2031
packages/nocodb/tests/unit/rest/tests/tableRow.test.ts

File diff suppressed because it is too large Load Diff

1232
packages/nocodb/tests/unit/rest/tests/viewRow.test.ts

File diff suppressed because it is too large Load Diff

72
packages/nocodb/tests/unit/tsconfig.json

@ -0,0 +1,72 @@
{
"compilerOptions": {
"skipLibCheck": true,
"composite": true,
"target": "es2017",
"outDir": "build/main",
"rootDir": "src",
"moduleResolution": "node",
"module": "commonjs",
"declaration": true,
"inlineSourceMap": true,
"esModuleInterop": true
/* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
"allowJs": false,
// "strict": true /* Enable all strict type-checking options. */,
/* Strict Type-Checking Options */
// "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
// "strictNullChecks": true /* Enable strict null checks. */,
// "strictFunctionTypes": true /* Enable strict checking of function types. */,
// "strictPropertyInitialization": true /* Enable strict checking of property initialization in classes. */,
// "noImplicitThis": true /* Raise error on 'this' expressions with an implied 'any' type. */,
// "alwaysStrict": true /* Parse in strict mode and emit "use strict" for each source file. */,
"resolveJsonModule": true,
/* Additional Checks */
"noUnusedLocals": false
/* Report errors on unused locals. */,
"noUnusedParameters": false
/* Report errors on unused parameters. */,
"noImplicitReturns": false
/* Report error when not all code paths in function return a value. */,
"noFallthroughCasesInSwitch": false
/* Report errors for fallthrough cases in switch statement. */,
/* Debugging Options */
"traceResolution": false
/* Report module resolution log messages. */,
"listEmittedFiles": false
/* Print names of generated files part of the compilation. */,
"listFiles": false
/* Print names of files part of the compilation. */,
"pretty": true
/* Stylize errors and messages using color and context. */,
/* Experimental Options */
// "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */,
// "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */,
"lib": [
"es2017"
],
"types": [
"mocha", "node"
],
"typeRoots": [
"node_modules/@types",
"src/types"
]
},
"parserOptions": {
"sourceType": "module",
"tsconfigRootDir": "./",
"project": "./tsconfig.json",
},
"include": [
"./tests/**/**/**.ts",
"./tests/**/**.ts"
// "**/*.ts",
// "**/*.json"
],
"exclude": [
],
"compileOnSave": false
}
Loading…
Cancel
Save