Browse Source

Merge pull request #5289 from nocodb/test/unit-on-pg

test: ut on pg
pull/5301/head
Raju Udava 2 years ago committed by GitHub
parent
commit
49354ca4d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 41
      .github/workflows/ci-cd.yml
  2. 1
      packages/nocodb/package.json
  3. 5
      packages/nocodb/tests/unit/.pg.env
  4. 173
      packages/nocodb/tests/unit/TestDbMngr.ts
  5. 8
      packages/nocodb/tests/unit/factory/column.ts
  6. 5
      packages/nocodb/tests/unit/factory/project.ts
  7. 17
      packages/nocodb/tests/unit/init/cleanupMeta.ts
  8. 58
      packages/nocodb/tests/unit/init/cleanupSakila.ts
  9. 16
      packages/nocodb/tests/unit/init/db.ts
  10. 315
      packages/nocodb/tests/unit/model/tests/baseModelSql.test.ts
  11. 52
      packages/nocodb/tests/unit/rest/tests/tableRow.test.ts
  12. 20
      packages/nocodb/tests/unit/rest/tests/viewRow.test.ts

41
.github/workflows/ci-cd.yml

@ -63,6 +63,47 @@ jobs:
- name: run unit tests
working-directory: ./packages/nocodb
run: npm run test:unit
unit-tests-pg:
runs-on: ubuntu-20.04
timeout-minutes: 40
if: ${{ github.event_name == 'push' || contains(github.event.pull_request.labels.*.name, 'trigger-CI') || !github.event.pull_request.draft }}
steps:
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 16.15.0
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Cache node modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
# npm cache files are stored in `~/.npm` on Linux/macOS
path: ~/.npm
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
- name: setup pg
working-directory: ./
run: docker-compose -f ./tests/playwright/scripts/docker-compose-playwright-pg.yml up -d &
- name: install dependencies nocodb-sdk
working-directory: ./packages/nocodb-sdk
run: npm install
- name: build nocodb-sdk
working-directory: ./packages/nocodb-sdk
run: npm run build:main
- name: Install dependencies
working-directory: ./packages/nocodb
run: npm install
- name: run unit tests
working-directory: ./packages/nocodb
run: npm run test:unit:pg
playwright-mysql-1:
if: ${{ github.event_name == 'push' || contains(github.event.pull_request.labels.*.name, 'trigger-CI') || !github.event.pull_request.draft }}
uses: ./.github/workflows/playwright-test-workflow.yml

1
packages/nocodb/package.json

@ -30,6 +30,7 @@
"unit-test": "cross-env TS_NODE_PROJECT=tsconfig.json mocha --require ts-node/register 'src/__tests__/unit/**/*.test.ts' --recursive --check-leaks --exit",
"local:test:unit": "cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay",
"test:unit": "cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay",
"test:unit:pg": "cp tests/unit/.pg.env tests/unit/.env; cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay",
"test:lint": "tslint --project . && prettier \"src/**/*.ts\" --list-different",
"watch": "run-s clean build:main && run-p \"build:main -- -w\" \"test:unit -- --watch\"",
"clean": "trash build src/test",

5
packages/nocodb/tests/unit/.pg.env

@ -0,0 +1,5 @@
DB_USER=postgres
DB_PASSWORD=password
DB_PORT=5432
DB_HOST=localhost
DB_CLIENT=pg

173
packages/nocodb/tests/unit/TestDbMngr.ts

@ -1,9 +1,9 @@
import { DbConfig } from "../../src/interface/config";
import { NcConfigFactory } from "../../src/lib";
import SqlMgrv2 from "../../src/lib/db/sql-mgr/v2/SqlMgrv2";
import { DbConfig } from '../../src/interface/config';
import { NcConfigFactory } from '../../src/lib';
import SqlMgrv2 from '../../src/lib/db/sql-mgr/v2/SqlMgrv2';
import fs from 'fs';
import { Knex, knex } from "knex";
import process from "process";
import { Knex, knex } from 'knex';
import process from 'process';
export default class TestDbMngr {
public static readonly dbName = 'test_meta';
@ -17,7 +17,15 @@ export default class TestDbMngr {
host: 'localhost',
port: 3306,
client: 'mysql2',
}
};
public static pgConnection = {
user: 'postgres',
password: 'password',
host: 'localhost',
port: 5432,
client: 'pg',
};
public static connection: {
user: string;
@ -36,8 +44,10 @@ export default class TestDbMngr {
password: process.env['DB_PASSWORD'] || password,
host: process.env['DB_HOST'] || host,
port: Number(process.env['DB_PORT']) || port,
client
}
client: process.env['DB_CLIENT'] || client,
};
console.log(TestDbMngr.connection);
}
static async testConnection(config: DbConfig) {
@ -51,33 +61,37 @@ export default class TestDbMngr {
}
static async init() {
TestDbMngr.populateConnectionConfig()
TestDbMngr.populateConnectionConfig();
if(await TestDbMngr.isMysqlConfigured()){
await TestDbMngr.connectMysql();
// common for both pg and mysql
if (await TestDbMngr.isDbConfigured()) {
await TestDbMngr.connectDb();
} else {
console.log('Mysql is not configured. Switching to sqlite');
await TestDbMngr.switchToSqlite();
}
}
static async isMysqlConfigured() {
private static async isDbConfigured() {
const { user, password, host, port, client } = TestDbMngr.connection;
const config = NcConfigFactory.urlToDbConfig(`${client}://${user}:${password}@${host}:${port}`);
const config = NcConfigFactory.urlToDbConfig(
`${client}://${user}:${password}@${host}:${port}`
);
config.connection = {
user,
password,
host,
port,
}
};
const result = await TestDbMngr.testConnection(config);
return result.code !== -1;
}
static async connectMysql() {
static async connectDb() {
const { user, password, host, port, client } = TestDbMngr.connection;
if(!process.env[`DATABASE_URL`]){
process.env[`DATABASE_URL`] = `${client}://${user}:${password}@${host}:${port}/${TestDbMngr.dbName}`;
if (!process.env[`DATABASE_URL`]) {
process.env[
`DATABASE_URL`
] = `${client}://${user}:${password}@${host}:${port}/${TestDbMngr.dbName}`;
}
TestDbMngr.dbConfig = NcConfigFactory.urlToDbConfig(
@ -95,21 +109,21 @@ export default class TestDbMngr {
tn: 'camelize',
cn: 'camelize',
},
}
};
await TestDbMngr.setupMeta();
await TestDbMngr.setupSakila();
}
static async setupMeta() {
if(TestDbMngr.metaKnex){
if (TestDbMngr.metaKnex) {
await TestDbMngr.metaKnex.destroy();
}
if(TestDbMngr.isSqlite()){
if (TestDbMngr.isSqlite()) {
await TestDbMngr.resetMetaSqlite();
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig());
return
return;
}
TestDbMngr.metaKnex = knex(TestDbMngr.getDbConfigWithNoDb());
@ -120,23 +134,29 @@ export default class TestDbMngr {
await TestDbMngr.useDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName);
}
static async setupSakila () {
if(TestDbMngr.sakilaKnex) {
static async setupSakila() {
if (TestDbMngr.sakilaKnex) {
await TestDbMngr.sakilaKnex.destroy();
}
if(TestDbMngr.isSqlite()){
if (TestDbMngr.isSqlite()) {
await TestDbMngr.seedSakila();
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig());
return
return;
}
TestDbMngr.sakilaKnex = knex(TestDbMngr.getDbConfigWithNoDb());
await TestDbMngr.resetDatabase(TestDbMngr.sakilaKnex, TestDbMngr.sakilaDbName);
await TestDbMngr.resetDatabase(
TestDbMngr.sakilaKnex,
TestDbMngr.sakilaDbName
);
await TestDbMngr.sakilaKnex.destroy();
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig());
await TestDbMngr.useDatabase(TestDbMngr.sakilaKnex, TestDbMngr.sakilaDbName);
await TestDbMngr.useDatabase(
TestDbMngr.sakilaKnex,
TestDbMngr.sakilaDbName
);
}
static async switchToSqlite() {
@ -161,23 +181,28 @@ export default class TestDbMngr {
cn: 'camelize',
},
},
}
};
process.env[`NC_DB`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.db`;
process.env[
`NC_DB`
] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.db`;
await TestDbMngr.setupMeta();
await TestDbMngr.setupSakila();
}
private static async resetDatabase(knexClient, dbName) {
if(TestDbMngr.isSqlite()){
if (TestDbMngr.isSqlite()) {
// return knexClient.raw(`DELETE FROM sqlite_sequence`);
} else {
try {
await knexClient.raw(`DROP DATABASE ${dbName}`);
} catch(e) {}
} catch (e) {}
await knexClient.raw(`CREATE DATABASE ${dbName}`);
console.log(`Database ${dbName} created`);
await knexClient.raw(`USE ${dbName}`);
if (!TestDbMngr.isPg()) {
await knexClient.raw(`USE ${dbName}`);
}
}
}
@ -185,14 +210,18 @@ export default class TestDbMngr {
return TestDbMngr.dbConfig.client === 'sqlite3';
}
static isPg() {
return TestDbMngr.dbConfig.client === 'pg';
}
private static async useDatabase(knexClient, dbName) {
if(!TestDbMngr.isSqlite()){
if (!TestDbMngr.isSqlite() && !TestDbMngr.isPg()) {
await knexClient.raw(`USE ${dbName}`);
}
}
static getDbConfigWithNoDb() {
const dbConfig =JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
const dbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
delete dbConfig.connection.database;
return dbConfig;
}
@ -202,7 +231,7 @@ export default class TestDbMngr {
}
private static resetMetaSqlite() {
if(fs.existsSync(`${__dirname}/test_meta.db`)){
if (fs.existsSync(`${__dirname}/test_meta.db`)) {
fs.unlinkSync(`${__dirname}/test_meta.db`);
}
}
@ -210,9 +239,9 @@ export default class TestDbMngr {
static getSakilaDbConfig() {
const sakilaDbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
sakilaDbConfig.connection.database = TestDbMngr.sakilaDbName;
sakilaDbConfig.connection.multipleStatements = true
if(TestDbMngr.isSqlite()){
sakilaDbConfig.connection.filename = `${__dirname}/test_sakila.db`;
sakilaDbConfig.connection.multipleStatements = true;
if (TestDbMngr.isSqlite()) {
sakilaDbConfig.connection.filename = `${__dirname}/test_sakila.db`;
}
return sakilaDbConfig;
}
@ -220,47 +249,73 @@ export default class TestDbMngr {
static async seedSakila() {
const testsDir = __dirname.replace('tests/unit', 'tests');
if(TestDbMngr.isSqlite()){
if(fs.existsSync(`${__dirname}/test_sakila.db`)){
if (TestDbMngr.isSqlite()) {
if (fs.existsSync(`${__dirname}/test_sakila.db`)) {
fs.unlinkSync(`${__dirname}/test_sakila.db`);
}
fs.copyFileSync(`${testsDir}/sqlite-sakila-db/sakila.db`, `${__dirname}/test_sakila.db`);
fs.copyFileSync(
`${testsDir}/sqlite-sakila-db/sakila.db`,
`${__dirname}/test_sakila.db`
);
} else if (TestDbMngr.isPg()) {
const schemaFile = fs
.readFileSync(`${testsDir}/pg-sakila-db/01-postgres-sakila-schema.sql`)
.toString();
const dataFile = fs
.readFileSync(
`${testsDir}/pg-sakila-db/02-postgres-sakila-insert-data.sql`
)
.toString();
await TestDbMngr.sakilaKnex.raw(schemaFile);
await TestDbMngr.sakilaKnex.raw(dataFile);
} else {
const schemaFile = fs.readFileSync(`${testsDir}/mysql-sakila-db/03-test-sakila-schema.sql`).toString();
const dataFile = fs.readFileSync(`${testsDir}/mysql-sakila-db/04-test-sakila-data.sql`).toString();
const schemaFile = fs
.readFileSync(`${testsDir}/mysql-sakila-db/03-test-sakila-schema.sql`)
.toString();
const dataFile = fs
.readFileSync(`${testsDir}/mysql-sakila-db/04-test-sakila-data.sql`)
.toString();
await TestDbMngr.sakilaKnex.raw(schemaFile);
await TestDbMngr.sakilaKnex.raw(dataFile);
}
}
static async disableForeignKeyChecks(knexClient) {
if(TestDbMngr.isSqlite()){
await knexClient.raw("PRAGMA foreign_keys = OFF");
}
else {
if (TestDbMngr.isSqlite()) {
await knexClient.raw('PRAGMA foreign_keys = OFF');
} else if (TestDbMngr.isPg()) {
await knexClient.raw(`SET session_replication_role = 'replica'`);
} else {
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 0`);
}
}
static async enableForeignKeyChecks(knexClient) {
if(TestDbMngr.isSqlite()){
if (TestDbMngr.isSqlite()) {
await knexClient.raw(`PRAGMA foreign_keys = ON;`);
}
else {
} else if (TestDbMngr.isPg()) {
await knexClient.raw(`SET session_replication_role = 'origin'`);
} else {
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 1`);
}
}
static async showAllTables(knexClient) {
if(TestDbMngr.isSqlite()){
const tables = await knexClient.raw(`SELECT name FROM sqlite_master WHERE type='table'`);
return tables.filter(t => t.name !== 'sqlite_sequence' && t.name !== '_evolutions').map(t => t.name);
}
else {
const response = await knexClient.raw(`SHOW TABLES`);
return response[0].map(
(table) => Object.values(table)[0]
if (TestDbMngr.isSqlite()) {
const tables = await knexClient.raw(
`SELECT name FROM sqlite_master WHERE type='table'`
);
return tables
.filter((t) => t.name !== 'sqlite_sequence' && t.name !== '_evolutions')
.map((t) => t.name);
} else if (TestDbMngr.isPg()) {
const tables = await knexClient.raw(
`SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';`
);
return tables.rows.map((t) => t.tablename);
} else {
const response = await knexClient.raw(`SHOW TABLES`);
return response[0].map((table) => Object.values(table)[0]);
}
}
}

8
packages/nocodb/tests/unit/factory/column.ts

@ -7,7 +7,7 @@ import GridViewColumn from '../../../src/lib/models/GridViewColumn';
import Model from '../../../src/lib/models/Model';
import Project from '../../../src/lib/models/Project';
import View from '../../../src/lib/models/View';
import { isSqlite } from '../init/db';
import { isSqlite, isPg } from '../init/db';
const defaultColumns = function (context) {
return [
@ -22,22 +22,26 @@ const defaultColumns = function (context) {
uidt: 'SingleLineText',
},
{
cdf: 'CURRENT_TIMESTAMP',
cdf: isPg(context) ? 'now()' : 'CURRENT_TIMESTAMP',
column_name: 'created_at',
title: 'CreatedAt',
dtxp: '',
dtxs: '',
uidt: 'DateTime',
dt: isPg(context) ? 'timestamp without time zone' : undefined,
},
{
cdf: isSqlite(context)
? 'CURRENT_TIMESTAMP'
: isPg(context)
? 'now()'
: 'CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP',
column_name: 'updated_at',
title: 'UpdatedAt',
dtxp: '',
dtxs: '',
uidt: 'DateTime',
dt: isPg(context) ? 'timestamp without time zone' : undefined,
},
];
};

5
packages/nocodb/tests/unit/factory/project.ts

@ -4,7 +4,10 @@ import Project from '../../../src/lib/models/Project';
const sakilaProjectConfig = (context) => {
let base;
if (context.sakilaDbConfig.client === 'mysql2') {
if (
context.sakilaDbConfig.client === 'mysql2' ||
context.sakilaDbConfig.client === 'pg'
) {
base = {
type: context.sakilaDbConfig.client,
config: {

17
packages/nocodb/tests/unit/init/cleanupMeta.ts

@ -1,8 +1,9 @@
import Model from "../../../src/lib/models/Model";
import Project from "../../../src/lib/models/Project";
import NcConnectionMgrv2 from "../../../src/lib/utils/common/NcConnectionMgrv2";
import { orderedMetaTables } from "../../../src/lib/utils/globals";
import TestDbMngr from "../TestDbMngr";
import Model from '../../../src/lib/models/Model';
import Project from '../../../src/lib/models/Project';
import NcConnectionMgrv2 from '../../../src/lib/utils/common/NcConnectionMgrv2';
import { orderedMetaTables } from '../../../src/lib/utils/globals';
import TestDbMngr from '../TestDbMngr';
import { isPg } from './db';
const dropTablesAllNonExternalProjects = async () => {
const projects = await Project.list({});
@ -28,7 +29,11 @@ const dropTablesAllNonExternalProjects = async () => {
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex);
for (const tableName of userCreatedTableNames) {
await TestDbMngr.metaKnex.raw(`DROP TABLE ${tableName}`);
if (TestDbMngr.isPg()) {
await TestDbMngr.metaKnex.raw(`DROP TABLE "${tableName}" CASCADE`);
} else {
await TestDbMngr.metaKnex.raw(`DROP TABLE ${tableName}`);
}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex);

58
packages/nocodb/tests/unit/init/cleanupSakila.ts

@ -5,13 +5,30 @@ import TestDbMngr from '../TestDbMngr';
const dropTablesOfSakila = async () => {
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.sakilaKnex);
for(const tableName of sakilaTableNames){
for (const tableName of sakilaTableNames) {
try {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
} catch(e){}
if (TestDbMngr.isPg()) {
await TestDbMngr.sakilaKnex.raw(
`DROP TABLE IF EXISTS "${tableName}" CASCADE`
);
} else {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
}
} catch (e) {}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.sakilaKnex);
}
};
const dropSchemaAndSeedSakila = async () => {
try {
await TestDbMngr.sakilaKnex.raw(`DROP SCHEMA "public" CASCADE`);
await TestDbMngr.sakilaKnex.raw(`CREATE SCHEMA "public"`);
await TestDbMngr.seedSakila();
} catch (e) {
console.error('dropSchemaAndSeedSakila', e);
throw e;
}
};
const resetAndSeedSakila = async () => {
try {
@ -19,28 +36,42 @@ const resetAndSeedSakila = async () => {
await TestDbMngr.seedSakila();
} catch (e) {
console.error('resetSakila', e);
throw e
throw e;
}
}
};
const cleanUpSakila = async () => {
try {
const sakilaProject = await Project.getByTitle('sakila');
const audits = sakilaProject && await Audit.projectAuditList(sakilaProject.id, {});
const audits =
sakilaProject && (await Audit.projectAuditList(sakilaProject.id, {}));
if(audits?.length > 0) {
if (audits?.length > 0) {
// if PG, drop schema
if (TestDbMngr.isPg()) {
return await dropSchemaAndSeedSakila();
}
// if mysql, drop tables
return await resetAndSeedSakila();
}
const tablesInSakila = await TestDbMngr.showAllTables(TestDbMngr.sakilaKnex);
const tablesInSakila = await TestDbMngr.showAllTables(
TestDbMngr.sakilaKnex
);
await Promise.all(
tablesInSakila
.filter((tableName) => !sakilaTableNames.includes(tableName))
.map(async (tableName) => {
try {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
if (TestDbMngr.isPg()) {
await TestDbMngr.sakilaKnex.raw(
`DROP TABLE "${tableName}" CASCADE`
);
} else {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
}
} catch (e) {
console.error(e);
}
@ -61,10 +92,15 @@ const sakilaTableNames = [
'film',
'film_actor',
'film_category',
'film_text',
'inventory',
'language',
'payment',
'payment_p2007_01',
'payment_p2007_02',
'payment_p2007_03',
'payment_p2007_04',
'payment_p2007_05',
'payment_p2007_06',
'rental',
'staff',
'store',

16
packages/nocodb/tests/unit/init/db.ts

@ -1,12 +1,18 @@
import { DbConfig } from "../../../src/interface/config";
import { DbConfig } from '../../../src/interface/config';
const isSqlite = (context) => {
return (
(context.dbConfig as DbConfig).client === 'sqlite' ||
(context.dbConfig as DbConfig).client === 'sqlite3'
);
};
const isSqlite = (context) =>{
return (context.dbConfig as DbConfig).client === 'sqlite' || (context.dbConfig as DbConfig).client === 'sqlite3';
}
const isPg = (context) => {
return (context.dbConfig as DbConfig).client === 'pg';
};
const isMysql = (context) =>
(context.dbConfig as DbConfig).client === 'mysql' ||
(context.dbConfig as DbConfig).client === 'mysql2';
export { isSqlite, isMysql };
export { isSqlite, isMysql, isPg };

315
packages/nocodb/tests/unit/model/tests/baseModelSql.test.ts

@ -14,7 +14,7 @@ import { expect } from 'chai';
import Filter from '../../../../src/lib/models/Filter';
import { createLtarColumn } from '../../factory/column';
import LinkToAnotherRecordColumn from '../../../../src/lib/models/LinkToAnotherRecordColumn';
import { isSqlite } from '../../init/db';
import { isPg, isSqlite } from '../../init/db';
function baseModelSqlTests() {
let context;
@ -33,25 +33,42 @@ function baseModelSqlTests() {
baseModelSql = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(base),
model: table,
view
})
view,
});
});
it('Insert record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
user: { email: 'test@example.com' },
};
const columns = await table.getColumns();
const inputData = generateDefaultRowAttributes({columns})
const response = await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request);
let inputData: any = generateDefaultRowAttributes({ columns });
const response = await baseModelSql.insert(
generateDefaultRowAttributes({ columns }),
undefined,
request
);
const insertedRow = (await baseModelSql.list())[0];
if (isPg(context)) {
inputData.CreatedAt = new Date(inputData.CreatedAt).toISOString();
inputData.UpdatedAt = new Date(inputData.UpdatedAt).toISOString();
insertedRow.CreatedAt = new Date(insertedRow.CreatedAt).toISOString();
insertedRow.UpdatedAt = new Date(insertedRow.UpdatedAt).toISOString();
response.CreatedAt = new Date(response.CreatedAt).toISOString();
response.UpdatedAt = new Date(response.UpdatedAt).toISOString();
}
expect(insertedRow).to.include(inputData);
expect(insertedRow).to.include(response);
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'INSERT');
const rowInsertedAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'INSERT');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -69,18 +86,33 @@ function baseModelSqlTests() {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const insertedRows = await baseModelSql.list();
bulkData.forEach((inputData, index) => {
if (isPg(context)) {
insertedRows.forEach((row) => {
row.CreatedAt = new Date(row.CreatedAt).toISOString();
row.UpdatedAt = new Date(row.UpdatedAt).toISOString();
});
}
bulkData.forEach((inputData: any, index) => {
if (isPg(context)) {
inputData.CreatedAt = new Date(inputData.CreatedAt).toISOString();
inputData.UpdatedAt = new Date(inputData.UpdatedAt).toISOString();
}
expect(insertedRows[index]).to.include(inputData);
});
const rowBulkInsertedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_INSERT');;
const rowBulkInsertedAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_INSERT');
expect(rowBulkInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -99,20 +131,22 @@ function baseModelSqlTests() {
it('Update record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
user: { email: 'test@example.com' },
};
const columns = await table.getColumns();
await baseModelSql.insert(generateDefaultRowAttributes({columns}));
await baseModelSql.insert(generateDefaultRowAttributes({ columns }));
const rowId = 1;
await baseModelSql.updateByPk(rowId, {Title: 'test'},undefined, request);
await baseModelSql.updateByPk(rowId, { Title: 'test' }, undefined, request);
const updatedRow = await baseModelSql.readByPk(1);
expect(updatedRow).to.include({Id: rowId, Title: 'test'});
expect(updatedRow).to.include({ Id: rowId, Title: 'test' });
const rowUpdatedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'UPDATE');
const rowUpdatedAudit = (await Audit.projectAuditList(project.id, {})).find(
(audit) => audit.op_sub_type === 'UPDATE'
);
expect(rowUpdatedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -128,26 +162,33 @@ function baseModelSqlTests() {
it('Bulk update record', async () => {
// Since sqlite doesn't support multiple sql connections, we can't test bulk update in sqlite
if(isSqlite(context)) return
if (isSqlite(context)) return;
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const insertedRows: any[] = await baseModelSql.list();
await baseModelSql.bulkUpdate(insertedRows.map((row)=> ({...row, Title: `new-${row['Title']}`})), { cookie: request });
await baseModelSql.bulkUpdate(
insertedRows.map((row) => ({ ...row, Title: `new-${row['Title']}` })),
{ cookie: request }
);
const updatedRows = await baseModelSql.list();
updatedRows.forEach((row, index) => {
expect(row['Title']).to.equal(`new-test-${index}`);
})
const rowBulkUpdateAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
});
const rowBulkUpdateAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
expect(rowBulkUpdateAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -167,28 +208,38 @@ function baseModelSqlTests() {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const idColumn = columns.find((column) => column.title === 'Id')!;
await baseModelSql.bulkUpdateAll({filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
})
]}, ({Title: 'new-1'}), { cookie: request });
await baseModelSql.bulkUpdateAll(
{
filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
}),
],
},
{ Title: 'new-1' },
{ cookie: request }
);
const updatedRows = await baseModelSql.list();
updatedRows.forEach((row) => {
if(row.id < 5) expect(row['Title']).to.equal('new-1');
})
const rowBulkUpdateAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
if (row.id < 5) expect(row['Title']).to.equal('new-1');
});
const rowBulkUpdateAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
expect(rowBulkUpdateAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -207,23 +258,27 @@ function baseModelSqlTests() {
it('Delete record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'},
params: {id: 1}
}
user: { email: 'test@example.com' },
params: { id: 1 },
};
const columns = await table.getColumns();
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const rowIdToDeleted = 1;
await baseModelSql.delByPk(rowIdToDeleted,undefined ,request);
await baseModelSql.delByPk(rowIdToDeleted, undefined, request);
const deletedRow = await baseModelSql.readByPk(rowIdToDeleted);
expect(deletedRow).to.be.undefined;
console.log('Delete record', await Audit.projectAuditList(project.id, {}));
const rowDeletedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'DELETE');
const rowDeletedAudit = (await Audit.projectAuditList(project.id, {})).find(
(audit) => audit.op_sub_type === 'DELETE'
);
expect(rowDeletedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -241,17 +296,19 @@ function baseModelSqlTests() {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const insertedRows: any[] = await baseModelSql.list();
await baseModelSql.bulkDelete(
insertedRows
.filter((row) => row['Id'] < 5)
.map((row)=> ({'id': row['Id']})),
.map((row) => ({ id: row['Id'] })),
{ cookie: request }
);
@ -259,7 +316,9 @@ function baseModelSqlTests() {
expect(remainingRows).to.length(6);
const rowBulkDeleteAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_DELETE');
const rowBulkDeleteAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_DELETE');
expect(rowBulkDeleteAudit).to.include({
user: 'test@example.com',
@ -280,26 +339,35 @@ function baseModelSqlTests() {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index}))
await baseModelSql.bulkInsert(bulkData, {cookie:request});
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const idColumn = columns.find((column) => column.title === 'Id')!;
await baseModelSql.bulkDeleteAll({filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
})
]}, { cookie: request });
await baseModelSql.bulkDeleteAll(
{
filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
}),
],
},
{ cookie: request }
);
const remainingRows = await baseModelSql.list();
expect(remainingRows).to.length(6);
const rowBulkDeleteAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_DELETE');
const rowBulkDeleteAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_DELETE');
expect(rowBulkDeleteAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
@ -319,38 +387,41 @@ function baseModelSqlTests() {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
})
});
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: "hm"
})
type: 'hm',
});
const childRow = await createRow(context, {
project,
table: childTable,
})
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
});
const ltarColOptions =
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
user: { email: 'test@example.com' },
};
await baseModelSql.nestedInsert(
{...generateDefaultRowAttributes({columns}), [ltarColumn.title]: [{'Id': childRow['Id']}]},
undefined,
{
...generateDefaultRowAttributes({ columns }),
[ltarColumn.title]: [{ Id: childRow['Id'] }],
},
undefined,
request
);
const childBaseModel = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view
})
view,
});
const insertedChildRow = await childBaseModel.readByPk(childRow['Id']);
expect(insertedChildRow[childCol.column_name]).to.equal(childRow['Id']);
@ -369,48 +440,55 @@ function baseModelSqlTests() {
op_sub_type: 'INSERT',
description: '1 inserted into Table1_Title',
});
})
});
it('Link child', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
})
});
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: "hm"
})
type: 'hm',
});
const insertedChildRow = await createRow(context, {
project,
table: childTable,
})
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
});
const ltarColOptions =
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
user: { email: 'test@example.com' },
};
await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request);
await baseModelSql.insert(
generateDefaultRowAttributes({ columns }),
undefined,
request
);
const insertedRow = await baseModelSql.readByPk(1);
await baseModelSql.addChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request
cookie: request,
});
const childBaseModel = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view
})
const updatedChildRow = await childBaseModel.readByPk(insertedChildRow['Id']);
view,
});
const updatedChildRow = await childBaseModel.readByPk(
insertedChildRow['Id']
);
expect(updatedChildRow[childCol.column_name]).to.equal(insertedRow['Id']);
@ -427,57 +505,65 @@ function baseModelSqlTests() {
row_id: '1',
op_type: 'DATA',
op_sub_type: 'LINK_RECORD',
description: 'Record [id:1] record linked with record [id:1] record in Table1_Title',
description:
'Record [id:1] record linked with record [id:1] record in Table1_Title',
});
})
});
it('Unlink child', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
})
});
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: "hm"
})
type: 'hm',
});
const insertedChildRow = await createRow(context, {
project,
table: childTable,
})
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
});
const ltarColOptions =
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: {email: 'test@example.com'}
}
user: { email: 'test@example.com' },
};
await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request);
await baseModelSql.insert(
generateDefaultRowAttributes({ columns }),
undefined,
request
);
const insertedRow = await baseModelSql.readByPk(1);
await baseModelSql.addChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request
cookie: request,
});
await baseModelSql.removeChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request
cookie: request,
});
const childBaseModel = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view
})
const updatedChildRow = await childBaseModel.readByPk(insertedChildRow['Id']);
view,
});
const updatedChildRow = await childBaseModel.readByPk(
insertedChildRow['Id']
);
expect(updatedChildRow[childCol.column_name]).to.be.null;
@ -494,9 +580,10 @@ function baseModelSqlTests() {
row_id: '1',
op_type: 'DATA',
op_sub_type: 'UNLINK_RECORD',
description: 'Record [id:1] record unlinked with record [id:1] record in Table1_Title',
description:
'Record [id:1] record unlinked with record [id:1] record in Table1_Title',
});
})
});
}
export default function () {

52
packages/nocodb/tests/unit/rest/tests/tableRow.test.ts

@ -19,7 +19,7 @@ import {
listRow,
createBulkRows,
} from '../../factory/row';
import { isMysql, isSqlite } from '../../init/db';
import { isMysql, isPg, isSqlite } from '../../init/db';
import Model from '../../../../src/lib/models/Model';
import Project from '../../../../src/lib/models/Project';
import { expect } from 'chai';
@ -446,7 +446,7 @@ function tableTest() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
],
};
@ -536,7 +536,7 @@ function tableTest() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
{
is_group: true,
@ -563,7 +563,7 @@ function tableTest() {
})
.expect(200);
if (response.body.pageInfo.totalRows !== 9133) {
if (parseInt(response.body.pageInfo.totalRows) !== 9133) {
console.log(response.body.pageInfo);
throw new Error('Wrong number of rows');
}
@ -585,7 +585,7 @@ function tableTest() {
})
.expect(200);
if (ascResponse.body.pageInfo.totalRows !== 9133) {
if (parseInt(ascResponse.body.pageInfo.totalRows) !== 9133) {
console.log(ascResponse.body.pageInfo);
throw new Error('Wrong number of rows');
}
@ -609,7 +609,7 @@ function tableTest() {
})
.expect(200);
if (descResponse.body.pageInfo.totalRows !== 9133) {
if (parseInt(descResponse.body.pageInfo.totalRows) !== 9133) {
console.log(descResponse.body.pageInfo);
throw new Error('Wrong number of rows');
}
@ -619,6 +619,7 @@ function tableTest() {
});
it('Get nested sorted filtered table data list with a rollup column in customer table', async function () {
if (isPg(context)) return;
const rollupColumn = await createRollupColumn(context, {
project: sakilaProject,
title: 'Number of rentals',
@ -664,7 +665,7 @@ function tableTest() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
{
is_group: true,
@ -691,12 +692,12 @@ function tableTest() {
})
.expect(200);
if (response.body.pageInfo.totalRows !== 594) {
if (parseInt(response.body.pageInfo.totalRows) !== 594) {
console.log(response.body.pageInfo);
throw new Error('Wrong number of rows');
}
if (response.body.list[0][rollupColumn.title] !== 32) {
if (parseInt(response.body.list[0][rollupColumn.title]) !== 32) {
console.log(response.body.list[0]);
throw new Error('Wrong filter response 0');
}
@ -719,12 +720,12 @@ function tableTest() {
})
.expect(200);
if (ascResponse.body.pageInfo.totalRows !== 594) {
if (parseInt(ascResponse.body.pageInfo.totalRows) !== 594) {
console.log(ascResponse.body.pageInfo);
throw new Error('Wrong number of rows');
}
if (ascResponse.body.list[0][rollupColumn.title] !== 12) {
if (parseInt(ascResponse.body.list[0][rollupColumn.title]) !== 12) {
console.log(ascResponse.body.list[0][rollupColumn.title]);
throw new Error('Wrong filter ascResponse 0');
}
@ -755,12 +756,12 @@ function tableTest() {
})
.expect(200);
if (descResponse.body.pageInfo.totalRows !== 594) {
if (parseInt(descResponse.body.pageInfo.totalRows) !== 594) {
console.log(descResponse.body.pageInfo);
throw new Error('Wrong number of rows');
}
if (descResponse.body.list[0][rollupColumn.title] !== 46) {
if (parseInt(descResponse.body.list[0][rollupColumn.title]) !== 46) {
console.log(descResponse.body.list[0]);
throw new Error('Wrong filter descResponse 0');
}
@ -820,7 +821,7 @@ function tableTest() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
{
is_group: true,
@ -860,7 +861,7 @@ function tableTest() {
throw new Error('Wrong number of rows');
}
if (ascResponse.body.list[0][rollupColumn.title] !== 12) {
if (parseInt(ascResponse.body.list[0][rollupColumn.title]) !== 12) {
throw new Error('Wrong filter');
}
@ -907,12 +908,13 @@ function tableTest() {
})
.expect(200);
if (response.body.list[0][formulaColumnTitle] !== 22)
if (parseInt(response.body.list[0][formulaColumnTitle]) !== 22)
throw new Error('Wrong sorting');
if (
(response.body.list as Array<any>).every(
(row) => row['Formula'] !== row[rollupColumnTitle] + 10
(row) =>
parseInt(row['Formula']) !== parseInt(row[rollupColumnTitle]) + 10
)
) {
throw new Error('Wrong formula');
@ -1128,7 +1130,7 @@ function tableTest() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
{
is_group: true,
@ -1160,7 +1162,7 @@ function tableTest() {
})
.expect(200);
if (ascResponse.body[rollupColumn.title] !== 12) {
if (parseInt(ascResponse.body[rollupColumn.title]) !== 12) {
console.log(ascResponse.body);
throw new Error('Wrong filter');
}
@ -1206,7 +1208,7 @@ function tableTest() {
if (
response.body.list[4]['first_name'] !== 'WILLIE' ||
response.body.list[4]['count'] !== 2
parseInt(response.body.list[4]['count']) !== 2
)
throw new Error('Wrong groupby');
});
@ -1243,7 +1245,7 @@ function tableTest() {
if (
response.body.list[0]['first_name'] !== 'WILLIE' ||
response.body.list[0]['count'] !== 2
parseInt(response.body.list[0]['count']) !== 2
)
throw new Error('Wrong groupby');
});
@ -1422,7 +1424,7 @@ function tableTest() {
it('Exist should be false table row when it does not exists', async function () {
const response = await request(context.app)
.get(
`/api/v1/db/data/noco/${sakilaProject.id}/${customerTable.id}/invalid-id/exist`
`/api/v1/db/data/noco/${sakilaProject.id}/${customerTable.id}/998546/exist`
)
.set('xc-auth', context.token)
.expect(200);
@ -2110,7 +2112,7 @@ function tableTest() {
it('Delete list hm with existing ref row id with non nullable clause', async () => {
// todo: Foreign key has non nullable clause in sqlite sakila
if (isSqlite(context)) return;
if (isSqlite(context) || isPg(context)) return;
const rowId = 1;
const rentalListColumn = (await customerTable.getColumns()).find(
@ -2127,7 +2129,9 @@ function tableTest() {
// todo: only keep generic error message once updated in noco catchError middleware
if (
!response.body.message?.includes("The column 'customer_id' cannot be null") &&
!response.body.message?.includes(
"The column 'customer_id' cannot be null"
) &&
!response.body.message?.includes("Column 'customer_id' cannot be null") &&
!response.body.message?.includes('Cannot add or update a child row') &&
!response.body.msg?.includes("Column 'customer_id' cannot be null") &&

20
packages/nocodb/tests/unit/rest/tests/viewRow.test.ts

@ -22,6 +22,8 @@ import {
getRow,
} from '../../factory/row';
import { expect } from 'chai';
import { isPg } from '../../init/db';
import { isString } from 'util';
// Test case list
// 1. Get view row list g
@ -540,7 +542,7 @@ function viewRowTests() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
],
};
@ -657,7 +659,7 @@ function viewRowTests() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
{
is_group: true,
@ -696,7 +698,7 @@ function viewRowTests() {
expect(ascResponse.body.pageInfo.totalRows).equal(594);
if (ascResponse.body.list[0][rollupColumn.title] !== 12) {
if (parseInt(ascResponse.body.list[0][rollupColumn.title]) !== 12) {
throw new Error('Wrong filter');
}
@ -926,7 +928,7 @@ function viewRowTests() {
fk_column_id: paymentListColumn?.id,
status: 'create',
logical_op: 'and',
comparison_op: 'notempty',
comparison_op: 'notblank',
},
{
is_group: true,
@ -958,7 +960,7 @@ function viewRowTests() {
})
.expect(200);
if (ascResponse.body[rollupColumn.title] !== 12) {
if (parseInt(ascResponse.body[rollupColumn.title]) !== 12) {
console.log('response.body', ascResponse.body);
throw new Error('Wrong filter');
}
@ -1022,7 +1024,7 @@ function viewRowTests() {
if (
response.body.list[4]['first_name'] !== 'WILLIE' ||
response.body.list[4]['count'] !== 2
parseInt(response.body.list[4]['count']) !== 2
)
throw new Error('Wrong groupby');
};
@ -1077,7 +1079,7 @@ function viewRowTests() {
if (
response.body.list[0]['first_name'] !== 'WILLIE' ||
response.body.list[0]['count'] !== 2
parseInt(response.body.list[0]['count']) !== 2
)
throw new Error('Wrong groupby');
};
@ -1108,7 +1110,7 @@ function viewRowTests() {
.set('xc-auth', context.token)
.expect(200);
if (response.body.count !== 599) {
if (parseInt(response.body.count) !== 599) {
throw new Error('Wrong count');
}
};
@ -1443,7 +1445,7 @@ function viewRowTests() {
});
const response = await request(context.app)
.get(
`/api/v1/db/data/noco/${sakilaProject.id}/${customerTable.id}/views/${view.id}/invalid-id/exist`
`/api/v1/db/data/noco/${sakilaProject.id}/${customerTable.id}/views/${view.id}/999999/exist`
)
.set('xc-auth', context.token)
.expect(200);

Loading…
Cancel
Save