mirror of https://github.com/nocodb/nocodb
Wing-Kam Wong
2 years ago
81 changed files with 53537 additions and 1226 deletions
@ -0,0 +1,24 @@
|
||||
spec: |
||||
name: nocodb |
||||
services: |
||||
- name: nocodb |
||||
image: |
||||
registry_type: DOCKER_HUB |
||||
registry: nocodb |
||||
repository: nocodb |
||||
tag: latest |
||||
run_command: "./server/scripts/digitalocean-postbuild.sh" |
||||
instance_size_slug: "basic-s" |
||||
health_check: |
||||
initial_delay_seconds: 10 |
||||
http_path: /api/health |
||||
envs: |
||||
- key: NODE_ENV |
||||
value: "production" |
||||
- key: DATABASE_URL |
||||
scope: RUN_TIME |
||||
value: ${postgres.DATABASE_URL} |
||||
databases: |
||||
- name: postgres |
||||
engine: PG |
||||
production: false |
@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager"> |
||||
<configuration default="false" name="Drop metadb" type="NodeJSConfigurationType" path-to-js-file="$PROJECT_DIR$/packages/nocodb/src/run/deleteMetaDb.js" working-dir="$PROJECT_DIR$/packages/nocodb/src/run"> |
||||
<configuration default="false" name="Drop metadb" type="NodeJSConfigurationType" path-to-js-file="deleteMetaDb.js" working-dir="$PROJECT_DIR$/packages/nocodb/src/run"> |
||||
<method v="2" /> |
||||
</configuration> |
||||
</component> |
@ -1,12 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager"> |
||||
<configuration default="false" name="Run GUI" type="js.build_tools.npm"> |
||||
<package-json value="$PROJECT_DIR$/packages/nc-gui/package.json" /> |
||||
<command value="run" /> |
||||
<scripts> |
||||
<script value="dev" /> |
||||
</scripts> |
||||
<node-interpreter value="project" /> |
||||
<envs /> |
||||
<method v="2" /> |
||||
</configuration> |
||||
</component> |
@ -0,0 +1,7 @@
|
||||
<component name="ProjectRunConfigurationManager"> |
||||
<configuration default="false" name="Start::IDE" type="CompoundRunConfigurationType"> |
||||
<toRun name="Run::Backend" type="js.build_tools.npm" /> |
||||
<toRun name="Run::Frontend" type="js.build_tools.npm" /> |
||||
<method v="2" /> |
||||
</configuration> |
||||
</component> |
@ -0,0 +1,26 @@
|
||||
import { useClipboard } from '#imports' |
||||
|
||||
export const useCopy = () => { |
||||
/** fallback for copy if clipboard api is not supported */ |
||||
const copyFallback = (text: string) => { |
||||
const textAreaEl = document.createElement('textarea') |
||||
textAreaEl.innerHTML = text |
||||
document.body.appendChild(textAreaEl) |
||||
textAreaEl.select() |
||||
const result = document.execCommand('copy') |
||||
document.body.removeChild(textAreaEl) |
||||
return result |
||||
} |
||||
|
||||
const { copy: _copy, isSupported } = useClipboard() |
||||
|
||||
const copy = async (text: string) => { |
||||
if (isSupported) { |
||||
await _copy(text) |
||||
} else { |
||||
copyFallback(text) |
||||
} |
||||
} |
||||
|
||||
return { copy } |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,70 @@
|
||||
{ |
||||
"compilerOptions": { |
||||
"skipLibCheck": true, |
||||
"composite": true, |
||||
"target": "es2017", |
||||
"outDir": "build/main", |
||||
"rootDir": "src", |
||||
"moduleResolution": "node", |
||||
"module": "commonjs", |
||||
"declaration": true, |
||||
"inlineSourceMap": true, |
||||
"esModuleInterop": true |
||||
/* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, |
||||
"allowJs": false, |
||||
// "strict": true /* Enable all strict type-checking options. */, |
||||
|
||||
/* Strict Type-Checking Options */ |
||||
// "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */, |
||||
// "strictNullChecks": true /* Enable strict null checks. */, |
||||
// "strictFunctionTypes": true /* Enable strict checking of function types. */, |
||||
// "strictPropertyInitialization": true /* Enable strict checking of property initialization in classes. */, |
||||
// "noImplicitThis": true /* Raise error on 'this' expressions with an implied 'any' type. */, |
||||
// "alwaysStrict": true /* Parse in strict mode and emit "use strict" for each source file. */, |
||||
"resolveJsonModule": true, |
||||
/* Additional Checks */ |
||||
"noUnusedLocals": false |
||||
/* Report errors on unused locals. */, |
||||
"noUnusedParameters": false |
||||
/* Report errors on unused parameters. */, |
||||
"noImplicitReturns": false |
||||
/* Report error when not all code paths in function return a value. */, |
||||
"noFallthroughCasesInSwitch": false |
||||
/* Report errors for fallthrough cases in switch statement. */, |
||||
/* Debugging Options */ |
||||
"traceResolution": false |
||||
/* Report module resolution log messages. */, |
||||
"listEmittedFiles": false |
||||
/* Print names of generated files part of the compilation. */, |
||||
"listFiles": false |
||||
/* Print names of files part of the compilation. */, |
||||
"pretty": true |
||||
/* Stylize errors and messages using color and context. */, |
||||
/* Experimental Options */ |
||||
// "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */, |
||||
// "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */, |
||||
|
||||
"lib": [ |
||||
"es2017" |
||||
], |
||||
"types": [ |
||||
"mocha", "node" |
||||
], |
||||
"typeRoots": [ |
||||
"node_modules/@types", |
||||
"src/types" |
||||
] |
||||
}, |
||||
"include": [ |
||||
"src/**/*.ts", |
||||
// "src/lib/xgene/migrations/*.js", |
||||
"src/**/*.json" |
||||
], |
||||
"exclude": [ |
||||
"node_modules/**", |
||||
"node_modules", |
||||
"../../../xc-lib-private/**", |
||||
"../../../xc-lib-private" |
||||
], |
||||
"compileOnSave": false |
||||
} |
@ -0,0 +1,34 @@
|
||||
import Knex from 'knex'; |
||||
import { MetaTable } from '../../utils/globals'; |
||||
|
||||
const up = async (knex: Knex) => { |
||||
await knex.schema.alterTable(MetaTable.FORM_VIEW, (table) => { |
||||
table.text('meta'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.FORM_VIEW_COLUMNS, (table) => { |
||||
table.text('meta'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.GRID_VIEW, (table) => { |
||||
table.text('meta'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.GALLERY_VIEW, (table) => { |
||||
table.text('meta'); |
||||
}); |
||||
}; |
||||
|
||||
const down = async (knex) => { |
||||
await knex.schema.alterTable(MetaTable.FORM_VIEW, (table) => { |
||||
table.dropColumns('meta'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.FORM_VIEW_COLUMNS, (table) => { |
||||
table.dropColumns('meta'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.GRID_VIEW, (table) => { |
||||
table.dropColumns('meta'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.GALLERY_VIEW, (table) => { |
||||
table.dropColumns('meta'); |
||||
}); |
||||
}; |
||||
|
||||
export { up, down }; |
@ -0,0 +1,19 @@
|
||||
export const serializeJSON = (data: string | Record<string, any>) => { |
||||
// if already in string format ignore stringify
|
||||
if (typeof data === 'string') { |
||||
return data; |
||||
} |
||||
return JSON.stringify(data); |
||||
}; |
||||
|
||||
export const deserializeJSON = (data: string | Record<string, any>) => { |
||||
// if already in object format ignore parse
|
||||
if (typeof data === 'object') { |
||||
return data ?? {}; |
||||
} |
||||
try { |
||||
return JSON.parse(data) ?? {}; |
||||
} catch (e) { |
||||
return {}; |
||||
} |
||||
}; |
@ -0,0 +1,658 @@
|
||||
-- Sakila Sample Database Schema |
||||
-- Version 1.2 |
||||
|
||||
-- Copyright (c) 2006, 2019, Oracle and/or its affiliates. |
||||
|
||||
-- Redistribution and use in source and binary forms, with or without |
||||
-- modification, are permitted provided that the following conditions are |
||||
-- met: |
||||
|
||||
-- * Redistributions of source code must retain the above copyright notice, |
||||
-- this list of conditions and the following disclaimer. |
||||
-- * Redistributions in binary form must reproduce the above copyright |
||||
-- notice, this list of conditions and the following disclaimer in the |
||||
-- documentation and/or other materials provided with the distribution. |
||||
-- * Neither the name of Oracle nor the names of its contributors may be used |
||||
-- to endorse or promote products derived from this software without |
||||
-- specific prior written permission. |
||||
|
||||
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS |
||||
-- IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
||||
-- THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
||||
-- PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR |
||||
-- CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
||||
-- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
-- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
||||
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF |
||||
-- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
||||
-- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS |
||||
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
SET NAMES utf8mb4; |
||||
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; |
||||
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; |
||||
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL'; |
||||
|
||||
DROP SCHEMA IF EXISTS test_sakila; |
||||
CREATE SCHEMA test_sakila; |
||||
USE test_sakila; |
||||
|
||||
-- |
||||
-- Table structure for table `actor` |
||||
-- |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id), |
||||
KEY idx_actor_last_name (last_name) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `address` |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id SMALLINT UNSIGNED NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
-- Add GEOMETRY column for MySQL 5.7.5 and higher |
||||
-- Also include SRID attribute for MySQL 8.0.3 and higher |
||||
/*!50705 location GEOMETRY */ /*!80003 SRID 0 */ /*!50705 NOT NULL,*/ |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (address_id), |
||||
KEY idx_fk_city_id (city_id), |
||||
/*!50705 SPATIAL KEY `idx_location` (location),*/ |
||||
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `category` |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (category_id) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `city` |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (city_id), |
||||
KEY idx_fk_country_id (country_id), |
||||
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `country` |
||||
-- |
||||
|
||||
CREATE TABLE country ( |
||||
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (country_id) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `customer` |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
create_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (customer_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
KEY idx_last_name (last_name), |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film` |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
title VARCHAR(128) NOT NULL, |
||||
description TEXT DEFAULT NULL, |
||||
release_year YEAR DEFAULT NULL, |
||||
language_id TINYINT UNSIGNED NOT NULL, |
||||
original_language_id TINYINT UNSIGNED DEFAULT NULL, |
||||
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3, |
||||
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99, |
||||
length SMALLINT UNSIGNED DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99, |
||||
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G', |
||||
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id), |
||||
KEY idx_title (title), |
||||
KEY idx_fk_language_id (language_id), |
||||
KEY idx_fk_original_language_id (original_language_id), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film_actor` |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id,film_id), |
||||
KEY idx_fk_film_id (`film_id`), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film_category` |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
category_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film_text` |
||||
-- |
||||
-- InnoDB added FULLTEXT support in 5.6.10. If you use an |
||||
-- earlier version, then consider upgrading (recommended) or |
||||
-- changing InnoDB to MyISAM as the film_text engine |
||||
-- |
||||
|
||||
-- Use InnoDB for film_text as of 5.6.10, MyISAM prior to 5.6.10. |
||||
SET @old_default_storage_engine = @@default_storage_engine; |
||||
SET @@default_storage_engine = 'MyISAM'; |
||||
/*!50610 SET @@default_storage_engine = 'InnoDB'*/; |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id SMALLINT NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT, |
||||
PRIMARY KEY (film_id), |
||||
FULLTEXT KEY idx_title_description (title,description) |
||||
) DEFAULT CHARSET=utf8mb4; |
||||
|
||||
SET @@default_storage_engine = @old_default_storage_engine; |
||||
|
||||
-- |
||||
-- Triggers for loading film_text from film |
||||
-- |
||||
|
||||
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN |
||||
INSERT INTO film_text (film_id, title, description) |
||||
VALUES (new.film_id, new.title, new.description); |
||||
END; |
||||
|
||||
|
||||
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN |
||||
IF (old.title != new.title) OR (old.description != new.description) OR (old.film_id != new.film_id) |
||||
THEN |
||||
UPDATE film_text |
||||
SET title=new.title, |
||||
description=new.description, |
||||
film_id=new.film_id |
||||
WHERE film_id=old.film_id; |
||||
END IF; |
||||
END; |
||||
|
||||
|
||||
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN |
||||
DELETE FROM film_text WHERE film_id = old.film_id; |
||||
END; |
||||
|
||||
-- |
||||
-- Table structure for table `inventory` |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (inventory_id), |
||||
KEY idx_fk_film_id (film_id), |
||||
KEY idx_store_id_film_id (store_id,film_id), |
||||
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `language` |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name CHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (language_id) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `payment` |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (payment_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table `rental` |
||||
-- |
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INT NOT NULL AUTO_INCREMENT, |
||||
rental_date DATETIME NOT NULL, |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
return_date DATETIME DEFAULT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (rental_id), |
||||
UNIQUE KEY (rental_date,inventory_id,customer_id), |
||||
KEY idx_fk_inventory_id (inventory_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `staff` |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
picture BLOB DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (staff_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `store` |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
manager_staff_id TINYINT UNSIGNED NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (store_id), |
||||
UNIQUE KEY idx_unique_manager (manager_staff_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- View structure for view `customer_list` |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8mb4' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, |
||||
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8mb4'active',_utf8mb4'') AS notes, cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `film_list` |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8mb4' ', actor.last_name) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id, category.name; |
||||
|
||||
-- |
||||
-- View structure for view `nicer_but_slower_film_list` |
||||
-- |
||||
|
||||
CREATE VIEW nicer_but_slower_film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)), |
||||
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8mb4' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)), |
||||
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id, category.name; |
||||
|
||||
-- |
||||
-- View structure for view `staff_list` |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8mb4' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone, |
||||
city.city AS city, country.country AS country, s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_store` |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
CONCAT(c.city, _utf8mb4',', cy.country) AS store |
||||
, CONCAT(m.first_name, _utf8mb4' ', m.last_name) AS manager |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY s.store_id |
||||
ORDER BY cy.country, c.city; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_film_category` |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
ORDER BY total_sales DESC; |
||||
|
||||
-- |
||||
-- View structure for view `actor_info` |
||||
-- |
||||
|
||||
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM test_sakila.film f |
||||
INNER JOIN test_sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN test_sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM test_sakila.actor a |
||||
LEFT JOIN test_sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN test_sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN test_sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
|
||||
-- |
||||
-- Procedure structure for procedure `rewards_report` |
||||
-- |
||||
|
||||
CREATE PROCEDURE rewards_report ( |
||||
IN min_monthly_purchases TINYINT UNSIGNED |
||||
, IN min_dollar_amount_purchased DECIMAL(10,2) |
||||
, OUT count_rewardees INT |
||||
) |
||||
LANGUAGE SQL |
||||
NOT DETERMINISTIC |
||||
READS SQL DATA |
||||
SQL SECURITY DEFINER |
||||
COMMENT 'Provides a customizable report on best customers' |
||||
proc: BEGIN |
||||
|
||||
DECLARE last_month_start DATE; |
||||
DECLARE last_month_end DATE; |
||||
|
||||
/* Some sanity checks... */ |
||||
IF min_monthly_purchases = 0 THEN |
||||
SELECT 'Minimum monthly purchases parameter must be > 0'; |
||||
LEAVE proc; |
||||
END IF; |
||||
IF min_dollar_amount_purchased = 0.00 THEN |
||||
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00'; |
||||
LEAVE proc; |
||||
END IF; |
||||
|
||||
/* Determine start and end time periods */ |
||||
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH); |
||||
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d'); |
||||
SET last_month_end = LAST_DAY(last_month_start); |
||||
|
||||
/* |
||||
Create a temporary storage area for |
||||
Customer IDs. |
||||
*/ |
||||
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY); |
||||
|
||||
/* |
||||
Find all customers meeting the |
||||
monthly purchase requirements |
||||
*/ |
||||
INSERT INTO tmpCustomer (customer_id) |
||||
SELECT p.customer_id |
||||
FROM payment AS p |
||||
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end |
||||
GROUP BY customer_id |
||||
HAVING SUM(p.amount) > min_dollar_amount_purchased |
||||
AND COUNT(customer_id) > min_monthly_purchases; |
||||
|
||||
/* Populate OUT parameter with count of found customers */ |
||||
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees; |
||||
|
||||
/* |
||||
Output ALL customer information of matching rewardees. |
||||
Customize output as needed. |
||||
*/ |
||||
SELECT c.* |
||||
FROM tmpCustomer AS t |
||||
INNER JOIN customer AS c ON t.customer_id = c.customer_id; |
||||
|
||||
/* Clean up */ |
||||
DROP TABLE tmpCustomer; |
||||
END; |
||||
|
||||
CREATE FUNCTION IF NOT EXISTS get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2) |
||||
DETERMINISTIC |
||||
READS SQL DATA |
||||
BEGIN |
||||
|
||||
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE |
||||
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS: |
||||
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS |
||||
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE |
||||
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST |
||||
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED |
||||
|
||||
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY |
||||
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS |
||||
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY |
||||
|
||||
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees |
||||
FROM film, inventory, rental |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration, |
||||
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees |
||||
FROM rental, inventory, film |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
|
||||
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments |
||||
FROM payment |
||||
|
||||
WHERE payment.payment_date <= p_effective_date |
||||
AND payment.customer_id = p_customer_id; |
||||
|
||||
RETURN v_rentfees + v_overfees - v_payments; |
||||
END; |
||||
|
||||
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND inventory_in_stock(inventory_id); |
||||
|
||||
SELECT COUNT(*) |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND inventory_in_stock(inventory_id) |
||||
INTO p_film_count; |
||||
END; |
||||
|
||||
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND NOT inventory_in_stock(inventory_id); |
||||
|
||||
SELECT COUNT(*) |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND NOT inventory_in_stock(inventory_id) |
||||
INTO p_film_count; |
||||
END; |
||||
|
||||
|
||||
CREATE FUNCTION IF NOT EXISTS inventory_held_by_customer(p_inventory_id INT) RETURNS INT |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_customer_id INT; |
||||
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL; |
||||
|
||||
SELECT customer_id INTO v_customer_id |
||||
FROM rental |
||||
WHERE return_date IS NULL |
||||
AND inventory_id = p_inventory_id; |
||||
|
||||
RETURN v_customer_id; |
||||
END; |
||||
|
||||
CREATE FUNCTION IF NOT EXISTS inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_rentals INT; |
||||
DECLARE v_out INT; |
||||
|
||||
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE |
||||
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED |
||||
|
||||
SELECT COUNT(*) INTO v_rentals |
||||
FROM rental |
||||
WHERE inventory_id = p_inventory_id; |
||||
|
||||
IF v_rentals = 0 THEN |
||||
RETURN TRUE; |
||||
END IF; |
||||
|
||||
SELECT COUNT(rental_id) INTO v_out |
||||
FROM inventory LEFT JOIN rental USING(inventory_id) |
||||
WHERE inventory.inventory_id = p_inventory_id |
||||
AND rental.return_date IS NULL; |
||||
|
||||
IF v_out > 0 THEN |
||||
RETURN FALSE; |
||||
ELSE |
||||
RETURN TRUE; |
||||
END IF; |
||||
END; |
||||
|
||||
SET SQL_MODE=@OLD_SQL_MODE; |
||||
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; |
||||
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; |
||||
|
||||
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,243 @@
|
||||
import { DbConfig } from "../../src/interface/config"; |
||||
import { NcConfigFactory } from "../../src/lib"; |
||||
import SqlMgrv2 from "../../src/lib/db/sql-mgr/v2/SqlMgrv2"; |
||||
import fs from 'fs'; |
||||
import knex from "knex"; |
||||
import process from "process"; |
||||
|
||||
export default class TestDbMngr { |
||||
public static readonly dbName = 'test_meta'; |
||||
public static readonly sakilaDbName = 'test_sakila'; |
||||
public static metaKnex: knex; |
||||
public static sakilaKnex: knex; |
||||
|
||||
public static defaultConnection = { |
||||
user: process.env['DB_USER'] || 'root', |
||||
password: process.env['DB_PASSWORD'] || 'password', |
||||
host: process.env['DB_HOST'] || 'localhost', |
||||
port: Number(process.env['DB_PORT']) || 3306, |
||||
client: 'mysql2', |
||||
} |
||||
|
||||
public static dbConfig: DbConfig; |
||||
|
||||
static async testConnection(config: DbConfig) { |
||||
try { |
||||
return await SqlMgrv2.testConnection(config); |
||||
} catch (e) { |
||||
console.log(e); |
||||
return { code: -1, message: 'Connection invalid' }; |
||||
} |
||||
} |
||||
|
||||
static async init() { |
||||
if(await TestDbMngr.isMysqlConfigured()){ |
||||
await TestDbMngr.connectMysql(); |
||||
} else { |
||||
await TestDbMngr.switchToSqlite(); |
||||
} |
||||
} |
||||
|
||||
static async isMysqlConfigured() { |
||||
const { user, password, host, port, client } = TestDbMngr.defaultConnection; |
||||
const config = NcConfigFactory.urlToDbConfig(`${client}://${user}:${password}@${host}:${port}`); |
||||
config.connection = { |
||||
user, |
||||
password, |
||||
host, |
||||
port, |
||||
} |
||||
const result = await TestDbMngr.testConnection(config); |
||||
return result.code !== -1; |
||||
} |
||||
|
||||
static async connectMysql() { |
||||
const { user, password, host, port, client } = TestDbMngr.defaultConnection; |
||||
if(!process.env[`DATABASE_URL`]){ |
||||
process.env[`DATABASE_URL`] = `${client}://${user}:${password}@${host}:${port}/${TestDbMngr.dbName}`; |
||||
} |
||||
|
||||
TestDbMngr.dbConfig = NcConfigFactory.urlToDbConfig( |
||||
NcConfigFactory.extractXcUrlFromJdbc(process.env[`DATABASE_URL`]) |
||||
); |
||||
this.dbConfig.meta = { |
||||
tn: 'nc_evolutions', |
||||
dbAlias: 'db', |
||||
api: { |
||||
type: 'rest', |
||||
prefix: '', |
||||
graphqlDepthLimit: 10, |
||||
}, |
||||
inflection: { |
||||
tn: 'camelize', |
||||
cn: 'camelize', |
||||
}, |
||||
} |
||||
|
||||
await TestDbMngr.setupMeta(); |
||||
await TestDbMngr.setupSakila(); |
||||
} |
||||
|
||||
static async setupMeta() { |
||||
if(TestDbMngr.metaKnex){ |
||||
await TestDbMngr.metaKnex.destroy(); |
||||
} |
||||
|
||||
if(TestDbMngr.isSqlite()){ |
||||
await TestDbMngr.resetMetaSqlite(); |
||||
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig()); |
||||
return |
||||
}
|
||||
|
||||
TestDbMngr.metaKnex = knex(TestDbMngr.getDbConfigWithNoDb()); |
||||
await TestDbMngr.resetDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName); |
||||
await TestDbMngr.metaKnex.destroy(); |
||||
|
||||
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig()); |
||||
await TestDbMngr.useDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName); |
||||
} |
||||
|
||||
static async setupSakila () { |
||||
if(TestDbMngr.sakilaKnex) { |
||||
await TestDbMngr.sakilaKnex.destroy(); |
||||
} |
||||
|
||||
if(TestDbMngr.isSqlite()){ |
||||
await TestDbMngr.seedSakila(); |
||||
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig()); |
||||
return |
||||
}
|
||||
|
||||
TestDbMngr.sakilaKnex = knex(TestDbMngr.getDbConfigWithNoDb()); |
||||
await TestDbMngr.resetDatabase(TestDbMngr.sakilaKnex, TestDbMngr.sakilaDbName); |
||||
await TestDbMngr.sakilaKnex.destroy(); |
||||
|
||||
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig()); |
||||
await TestDbMngr.useDatabase(TestDbMngr.sakilaKnex, TestDbMngr.sakilaDbName); |
||||
} |
||||
|
||||
static async switchToSqlite() { |
||||
// process.env[`DATABASE_URL`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.sqlite`;
|
||||
TestDbMngr.dbConfig = { |
||||
client: 'sqlite3', |
||||
connection: { |
||||
filename: `${__dirname}/${TestDbMngr.dbName}.db`, |
||||
database: TestDbMngr.dbName, |
||||
}, |
||||
useNullAsDefault: true, |
||||
meta: { |
||||
tn: 'nc_evolutions', |
||||
dbAlias: 'db', |
||||
api: { |
||||
type: 'rest', |
||||
prefix: '', |
||||
graphqlDepthLimit: 10, |
||||
}, |
||||
inflection: { |
||||
tn: 'camelize', |
||||
cn: 'camelize', |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
process.env[`NC_DB`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.db`; |
||||
await TestDbMngr.setupMeta(); |
||||
await TestDbMngr.setupSakila(); |
||||
} |
||||
|
||||
private static async resetDatabase(knexClient, dbName) { |
||||
if(TestDbMngr.isSqlite()){ |
||||
// return knexClient.raw(`DELETE FROM sqlite_sequence`);
|
||||
} else { |
||||
try { |
||||
await knexClient.raw(`DROP DATABASE ${dbName}`); |
||||
} catch(e) {} |
||||
await knexClient.raw(`CREATE DATABASE ${dbName}`); |
||||
console.log(`Database ${dbName} created`); |
||||
await knexClient.raw(`USE ${dbName}`); |
||||
} |
||||
} |
||||
|
||||
static isSqlite() { |
||||
return TestDbMngr.dbConfig.client === 'sqlite3'; |
||||
} |
||||
|
||||
private static async useDatabase(knexClient, dbName) { |
||||
if(!TestDbMngr.isSqlite()){ |
||||
await knexClient.raw(`USE ${dbName}`); |
||||
} |
||||
} |
||||
|
||||
static getDbConfigWithNoDb() { |
||||
const dbConfig =JSON.parse(JSON.stringify(TestDbMngr.dbConfig)); |
||||
delete dbConfig.connection.database; |
||||
return dbConfig; |
||||
} |
||||
|
||||
static getMetaDbConfig() { |
||||
return TestDbMngr.dbConfig; |
||||
} |
||||
|
||||
private static resetMetaSqlite() { |
||||
if(fs.existsSync(`${__dirname}/test_meta.db`)){ |
||||
fs.unlinkSync(`${__dirname}/test_meta.db`); |
||||
} |
||||
} |
||||
|
||||
static getSakilaDbConfig() { |
||||
const sakilaDbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig)); |
||||
sakilaDbConfig.connection.database = TestDbMngr.sakilaDbName; |
||||
sakilaDbConfig.connection.multipleStatements = true |
||||
if(TestDbMngr.isSqlite()){ |
||||
sakilaDbConfig.connection.filename = `${__dirname}/test_sakila.db`; |
||||
} |
||||
return sakilaDbConfig; |
||||
} |
||||
|
||||
static async seedSakila() {
|
||||
const testsDir = __dirname.replace('tests/unit', 'tests'); |
||||
|
||||
if(TestDbMngr.isSqlite()){ |
||||
if(fs.existsSync(`${__dirname}/test_sakila.db`)){ |
||||
fs.unlinkSync(`${__dirname}/test_sakila.db`); |
||||
} |
||||
fs.copyFileSync(`${testsDir}/sqlite-sakila-db/sakila.db`, `${__dirname}/test_sakila.db`); |
||||
} else { |
||||
const schemaFile = fs.readFileSync(`${testsDir}/mysql-sakila-db/03-test-sakila-schema.sql`).toString(); |
||||
const dataFile = fs.readFileSync(`${testsDir}/mysql-sakila-db/04-test-sakila-data.sql`).toString(); |
||||
await TestDbMngr.sakilaKnex.raw(schemaFile); |
||||
await TestDbMngr.sakilaKnex.raw(dataFile); |
||||
} |
||||
} |
||||
|
||||
static async disableForeignKeyChecks(knexClient) { |
||||
if(TestDbMngr.isSqlite()){ |
||||
await knexClient.raw("PRAGMA foreign_keys = OFF"); |
||||
} |
||||
else { |
||||
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 0`); |
||||
} |
||||
} |
||||
|
||||
static async enableForeignKeyChecks(knexClient) { |
||||
if(TestDbMngr.isSqlite()){ |
||||
await knexClient.raw(`PRAGMA foreign_keys = ON;`); |
||||
} |
||||
else { |
||||
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 1`); |
||||
} |
||||
} |
||||
|
||||
static async showAllTables(knexClient) { |
||||
if(TestDbMngr.isSqlite()){ |
||||
const tables = await knexClient.raw(`SELECT name FROM sqlite_master WHERE type='table'`); |
||||
return tables.filter(t => t.name !== 'sqlite_sequence' && t.name !== '_evolutions').map(t => t.name); |
||||
} |
||||
else { |
||||
const response = await knexClient.raw(`SHOW TABLES`); |
||||
return response[0].map( |
||||
(table) => Object.values(table)[0] |
||||
); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,203 @@
|
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import request from 'supertest'; |
||||
import Column from '../../../src/lib/models/Column'; |
||||
import FormViewColumn from '../../../src/lib/models/FormViewColumn'; |
||||
import GalleryViewColumn from '../../../src/lib/models/GalleryViewColumn'; |
||||
import GridViewColumn from '../../../src/lib/models/GridViewColumn'; |
||||
import Model from '../../../src/lib/models/Model'; |
||||
import Project from '../../../src/lib/models/Project'; |
||||
import View from '../../../src/lib/models/View'; |
||||
import { isSqlite } from '../init/db'; |
||||
|
||||
const defaultColumns = function(context) { |
||||
return [ |
||||
{ |
||||
column_name: 'id', |
||||
title: 'Id', |
||||
uidt: 'ID', |
||||
}, |
||||
{ |
||||
column_name: 'title', |
||||
title: 'Title', |
||||
uidt: 'SingleLineText', |
||||
}, |
||||
{ |
||||
cdf: 'CURRENT_TIMESTAMP', |
||||
column_name: 'created_at', |
||||
title: 'CreatedAt', |
||||
dtxp: '', |
||||
dtxs: '', |
||||
uidt: 'DateTime', |
||||
}, |
||||
{ |
||||
cdf: isSqlite(context) ? 'CURRENT_TIMESTAMP': 'CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP', |
||||
column_name: 'updated_at', |
||||
title: 'UpdatedAt', |
||||
dtxp: '', |
||||
dtxs: '', |
||||
uidt: 'DateTime', |
||||
}, |
||||
] |
||||
}; |
||||
|
||||
const createColumn = async (context, table, columnAttr) => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/tables/${table.id}/columns`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
...columnAttr, |
||||
}); |
||||
|
||||
const column: Column = (await table.getColumns()).find( |
||||
(column) => column.title === columnAttr.title |
||||
); |
||||
return column; |
||||
}; |
||||
|
||||
const createRollupColumn = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
title, |
||||
rollupFunction, |
||||
table, |
||||
relatedTableName, |
||||
relatedTableColumnTitle, |
||||
}: { |
||||
project: Project; |
||||
title: string; |
||||
rollupFunction: string; |
||||
table: Model; |
||||
relatedTableName: string; |
||||
relatedTableColumnTitle: string; |
||||
} |
||||
) => { |
||||
const childBases = await project.getBases(); |
||||
const childTable = await Model.getByIdOrName({ |
||||
project_id: project.id, |
||||
base_id: childBases[0].id!, |
||||
table_name: relatedTableName, |
||||
}); |
||||
const childTableColumns = await childTable.getColumns(); |
||||
const childTableColumn = await childTableColumns.find( |
||||
(column) => column.title === relatedTableColumnTitle |
||||
); |
||||
|
||||
const ltarColumn = (await table.getColumns()).find( |
||||
(column) => |
||||
column.uidt === UITypes.LinkToAnotherRecord && |
||||
column.colOptions?.fk_related_model_id === childTable.id |
||||
); |
||||
|
||||
const rollupColumn = await createColumn(context, table, { |
||||
title: title, |
||||
uidt: UITypes.Rollup, |
||||
fk_relation_column_id: ltarColumn?.id, |
||||
fk_rollup_column_id: childTableColumn?.id, |
||||
rollup_function: rollupFunction, |
||||
table_name: table.table_name, |
||||
column_name: title, |
||||
}); |
||||
|
||||
return rollupColumn; |
||||
}; |
||||
|
||||
const createLookupColumn = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
title, |
||||
table, |
||||
relatedTableName, |
||||
relatedTableColumnTitle, |
||||
}: { |
||||
project: Project; |
||||
title: string; |
||||
table: Model; |
||||
relatedTableName: string; |
||||
relatedTableColumnTitle: string; |
||||
} |
||||
) => { |
||||
const childBases = await project.getBases(); |
||||
const childTable = await Model.getByIdOrName({ |
||||
project_id: project.id, |
||||
base_id: childBases[0].id!, |
||||
table_name: relatedTableName, |
||||
}); |
||||
const childTableColumns = await childTable.getColumns(); |
||||
const childTableColumn = await childTableColumns.find( |
||||
(column) => column.title === relatedTableColumnTitle |
||||
); |
||||
|
||||
if (!childTableColumn) { |
||||
throw new Error( |
||||
`Could not find column ${relatedTableColumnTitle} in ${relatedTableName}` |
||||
); |
||||
} |
||||
|
||||
const ltarColumn = (await table.getColumns()).find( |
||||
(column) => |
||||
column.uidt === UITypes.LinkToAnotherRecord && |
||||
column.colOptions?.fk_related_model_id === childTable.id |
||||
); |
||||
const lookupColumn = await createColumn(context, table, { |
||||
title: title, |
||||
uidt: UITypes.Lookup, |
||||
fk_relation_column_id: ltarColumn?.id, |
||||
fk_lookup_column_id: childTableColumn?.id, |
||||
table_name: table.table_name, |
||||
column_name: title, |
||||
}); |
||||
|
||||
return lookupColumn; |
||||
}; |
||||
|
||||
const createLtarColumn = async ( |
||||
context, |
||||
{ |
||||
title, |
||||
parentTable, |
||||
childTable, |
||||
type, |
||||
}: { |
||||
title: string; |
||||
parentTable: Model; |
||||
childTable: Model; |
||||
type: string; |
||||
} |
||||
) => { |
||||
const ltarColumn = await createColumn(context, parentTable, { |
||||
title: title, |
||||
column_name: title, |
||||
uidt: UITypes.LinkToAnotherRecord, |
||||
parentId: parentTable.id, |
||||
childId: childTable.id, |
||||
type: type, |
||||
}); |
||||
|
||||
return ltarColumn; |
||||
}; |
||||
|
||||
const updateViewColumn = async (context, {view, column, attr}: {column: Column, view: View, attr: any}) => { |
||||
const res = await request(context.app) |
||||
.patch(`/api/v1/db/meta/views/${view.id}/columns/${column.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
...attr, |
||||
}); |
||||
|
||||
const updatedColumn: FormViewColumn | GridViewColumn | GalleryViewColumn = (await view.getColumns()).find( |
||||
(column) => column.id === column.id |
||||
)!; |
||||
|
||||
return updatedColumn; |
||||
} |
||||
|
||||
export { |
||||
defaultColumns, |
||||
createColumn, |
||||
createRollupColumn, |
||||
createLookupColumn, |
||||
createLtarColumn, |
||||
updateViewColumn |
||||
}; |
@ -0,0 +1,64 @@
|
||||
import request from 'supertest'; |
||||
import Project from '../../../src/lib/models/Project'; |
||||
import TestDbMngr from '../TestDbMngr'; |
||||
|
||||
const externalProjectConfig = { |
||||
title: 'sakila', |
||||
bases: [ |
||||
{ |
||||
type: 'mysql2', |
||||
config: { |
||||
client: 'mysql2', |
||||
connection: { |
||||
host: 'localhost', |
||||
port: '3306', |
||||
user: 'root', |
||||
password: 'password', |
||||
database: TestDbMngr.sakilaDbName, |
||||
}, |
||||
}, |
||||
inflection_column: 'camelize', |
||||
inflection_table: 'camelize', |
||||
}, |
||||
], |
||||
external: true, |
||||
}; |
||||
|
||||
const defaultProjectValue = { |
||||
title: 'Title', |
||||
}; |
||||
|
||||
const defaultSharedBaseValue = { |
||||
roles: 'viewer', |
||||
password: 'test', |
||||
}; |
||||
|
||||
const createSharedBase = async (app, token, project, sharedBaseArgs = {}) => { |
||||
await request(app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', token) |
||||
.send({ |
||||
...defaultSharedBaseValue, |
||||
...sharedBaseArgs, |
||||
}); |
||||
}; |
||||
|
||||
const createSakilaProject = async (context) => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send(externalProjectConfig); |
||||
|
||||
return (await Project.getByTitleOrId(response.body.id)) as Project; |
||||
}; |
||||
|
||||
const createProject = async (context, projectArgs = defaultProjectValue) => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send(projectArgs); |
||||
|
||||
return (await Project.getByTitleOrId(response.body.id)) as Project; |
||||
}; |
||||
|
||||
export { createProject, createSharedBase, createSakilaProject }; |
@ -0,0 +1,181 @@
|
||||
import { ColumnType, UITypes } from 'nocodb-sdk'; |
||||
import request from 'supertest'; |
||||
import Column from '../../../src/lib/models/Column'; |
||||
import Filter from '../../../src/lib/models/Filter'; |
||||
import Model from '../../../src/lib/models/Model'; |
||||
import Project from '../../../src/lib/models/Project'; |
||||
import Sort from '../../../src/lib/models/Sort'; |
||||
import NcConnectionMgrv2 from '../../../src/lib/utils/common/NcConnectionMgrv2'; |
||||
|
||||
const rowValue = (column: ColumnType, index: number) => { |
||||
switch (column.uidt) { |
||||
case UITypes.Number: |
||||
return index; |
||||
case UITypes.SingleLineText: |
||||
return `test-${index}`; |
||||
case UITypes.Date: |
||||
return '2020-01-01'; |
||||
case UITypes.DateTime: |
||||
return '2020-01-01 00:00:00'; |
||||
case UITypes.Email: |
||||
return `test-${index}@example.com`; |
||||
default: |
||||
return `test-${index}`; |
||||
} |
||||
}; |
||||
|
||||
const getRow = async (context, {project, table, id}) => { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/${id}`) |
||||
.set('xc-auth', context.token); |
||||
|
||||
return response.body; |
||||
}; |
||||
|
||||
const listRow = async ({ |
||||
project, |
||||
table, |
||||
options, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
options?: { |
||||
limit?: any; |
||||
offset?: any; |
||||
filterArr?: Filter[]; |
||||
sortArr?: Sort[]; |
||||
}; |
||||
}) => { |
||||
const bases = await project.getBases(); |
||||
const baseModel = await Model.getBaseModelSQL({ |
||||
id: table.id, |
||||
dbDriver: NcConnectionMgrv2.get(bases[0]!), |
||||
}); |
||||
|
||||
const ignorePagination = !options; |
||||
|
||||
return await baseModel.list(options, ignorePagination); |
||||
}; |
||||
|
||||
const getOneRow = async ( |
||||
context, |
||||
{ project, table }: { project: Project; table: Model } |
||||
) => { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/find-one`) |
||||
.set('xc-auth', context.token); |
||||
|
||||
return response.body; |
||||
}; |
||||
|
||||
const generateDefaultRowAttributes = ({ |
||||
columns, |
||||
index = 0, |
||||
}: { |
||||
columns: ColumnType[]; |
||||
index?: number; |
||||
}) => |
||||
columns.reduce((acc, column) => { |
||||
if ( |
||||
column.uidt === UITypes.LinkToAnotherRecord || |
||||
column.uidt === UITypes.ForeignKey || |
||||
column.uidt === UITypes.ID |
||||
) { |
||||
return acc; |
||||
} |
||||
acc[column.title!] = rowValue(column, index); |
||||
return acc; |
||||
}, {}); |
||||
|
||||
const createRow = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
table, |
||||
index = 0, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
index?: number; |
||||
} |
||||
) => { |
||||
const columns = await table.getColumns(); |
||||
const rowData = generateDefaultRowAttributes({ columns, index }); |
||||
|
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/data/noco/${project.id}/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send(rowData); |
||||
|
||||
return response.body; |
||||
}; |
||||
|
||||
const createBulkRows = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
table, |
||||
values |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
values: any[]; |
||||
}) => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/data/bulk/noco/${project.id}/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send(values) |
||||
.expect(200); |
||||
} |
||||
|
||||
// Links 2 table rows together. Will create rows if ids are not provided
|
||||
const createChildRow = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
table, |
||||
childTable, |
||||
column, |
||||
rowId, |
||||
childRowId, |
||||
type, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
childTable: Model; |
||||
column: Column; |
||||
rowId?: string; |
||||
childRowId?: string; |
||||
type: string; |
||||
} |
||||
) => { |
||||
if (!rowId) { |
||||
const row = await createRow(context, { project, table }); |
||||
rowId = row['Id']; |
||||
} |
||||
|
||||
if (!childRowId) { |
||||
const row = await createRow(context, { table: childTable, project }); |
||||
childRowId = row['Id']; |
||||
} |
||||
|
||||
await request(context.app) |
||||
.post( |
||||
`/api/v1/db/data/noco/${project.id}/${table.id}/${rowId}/${type}/${column.title}/${childRowId}` |
||||
) |
||||
.set('xc-auth', context.token); |
||||
|
||||
const row = await getRow(context, { project, table, id: rowId }); |
||||
|
||||
return row; |
||||
}; |
||||
|
||||
export { |
||||
createRow, |
||||
getRow, |
||||
createChildRow, |
||||
getOneRow, |
||||
listRow, |
||||
generateDefaultRowAttributes, |
||||
createBulkRows |
||||
}; |
@ -0,0 +1,42 @@
|
||||
import request from 'supertest'; |
||||
import Model from '../../../src/lib/models/Model'; |
||||
import Project from '../../../src/lib/models/Project'; |
||||
import { defaultColumns } from './column'; |
||||
|
||||
const defaultTableValue = (context) => ({ |
||||
table_name: 'Table1', |
||||
title: 'Table1_Title', |
||||
columns: defaultColumns(context), |
||||
}); |
||||
|
||||
const createTable = async (context, project, args = {}) => { |
||||
const defaultValue = defaultTableValue(context); |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ ...defaultValue, ...args }); |
||||
|
||||
const table: Model = await Model.get(response.body.id); |
||||
return table; |
||||
}; |
||||
|
||||
const getTable = async ({project, name}: {project: Project, name: string}) => { |
||||
const bases = await project.getBases(); |
||||
return await Model.getByIdOrName({ |
||||
project_id: project.id, |
||||
base_id: bases[0].id!, |
||||
table_name: name, |
||||
}); |
||||
} |
||||
|
||||
const getAllTables = async ({project}: {project: Project}) => { |
||||
const bases = await project.getBases(); |
||||
const tables = await Model.list({ |
||||
project_id: project.id, |
||||
base_id: bases[0].id!, |
||||
}); |
||||
|
||||
return tables; |
||||
} |
||||
|
||||
export { createTable, getTable, getAllTables }; |
@ -0,0 +1,18 @@
|
||||
import request from 'supertest'; |
||||
import User from '../../../src/lib/models/User'; |
||||
|
||||
const defaultUserArgs = { |
||||
email: 'test@example.com', |
||||
password: 'A1234abh2@dsad', |
||||
}; |
||||
|
||||
const createUser = async (context, userArgs = {}) => { |
||||
const args = { ...defaultUserArgs, ...userArgs }; |
||||
const response = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args); |
||||
const user = User.getByEmail(args.email); |
||||
return { token: response.body.token, user }; |
||||
}; |
||||
|
||||
export { createUser, defaultUserArgs }; |
@ -0,0 +1,35 @@
|
||||
import { ViewTypes } from 'nocodb-sdk'; |
||||
import request from 'supertest'; |
||||
import Model from '../../../src/lib/models/Model'; |
||||
import View from '../../../src/lib/models/View'; |
||||
|
||||
const createView = async (context, {title, table, type}: {title: string, table: Model, type: ViewTypes}) => { |
||||
const viewTypeStr = (type) => { |
||||
switch (type) { |
||||
case ViewTypes.GALLERY: |
||||
return 'galleries'; |
||||
case ViewTypes.FORM: |
||||
return 'forms'; |
||||
case ViewTypes.GRID: |
||||
return 'grids'; |
||||
case ViewTypes.KANBAN: |
||||
return 'kanbans'; |
||||
default: |
||||
throw new Error('Invalid view type'); |
||||
} |
||||
}; |
||||
|
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/tables/${table.id}/${viewTypeStr(type)}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title, |
||||
type, |
||||
}); |
||||
|
||||
const view = await View.getByTitleOrId({fk_model_id: table.id, titleOrId:title}) as View; |
||||
|
||||
return view |
||||
} |
||||
|
||||
export {createView} |
@ -0,0 +1,20 @@
|
||||
import 'mocha'; |
||||
|
||||
import restTests from './rest/index.test'; |
||||
import modelTests from './model/index.test'; |
||||
import TestDbMngr from './TestDbMngr' |
||||
|
||||
process.env.NODE_ENV = 'test'; |
||||
process.env.TEST = 'test'; |
||||
process.env.NC_DISABLE_CACHE = 'true'; |
||||
process.env.NC_DISABLE_TELE = 'true'; |
||||
|
||||
|
||||
(async function() { |
||||
await TestDbMngr.init(); |
||||
|
||||
modelTests(); |
||||
restTests(); |
||||
|
||||
run(); |
||||
})(); |
@ -0,0 +1,56 @@
|
||||
import Model from "../../../src/lib/models/Model"; |
||||
import Project from "../../../src/lib/models/Project"; |
||||
import NcConnectionMgrv2 from "../../../src/lib/utils/common/NcConnectionMgrv2"; |
||||
import { orderedMetaTables } from "../../../src/lib/utils/globals"; |
||||
import TestDbMngr from "../TestDbMngr"; |
||||
|
||||
const dropTablesAllNonExternalProjects = async () => { |
||||
const projects = await Project.list({}); |
||||
const userCreatedTableNames: string[] = []; |
||||
await Promise.all( |
||||
projects |
||||
.filter((project) => project.is_meta) |
||||
.map(async (project) => { |
||||
await project.getBases(); |
||||
const base = project.bases && project.bases[0]; |
||||
if (!base) return; |
||||
|
||||
const models = await Model.list({ |
||||
project_id: project.id, |
||||
base_id: base.id!, |
||||
}); |
||||
models.forEach((model) => { |
||||
userCreatedTableNames.push(model.table_name); |
||||
}); |
||||
}) |
||||
); |
||||
|
||||
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
|
||||
for (const tableName of userCreatedTableNames) { |
||||
await TestDbMngr.metaKnex.raw(`DROP TABLE ${tableName}`); |
||||
} |
||||
|
||||
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
}; |
||||
|
||||
const cleanupMetaTables = async () => { |
||||
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
for (const tableName of orderedMetaTables) { |
||||
try { |
||||
await TestDbMngr.metaKnex.raw(`DELETE FROM ${tableName}`); |
||||
} catch (e) {} |
||||
} |
||||
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
}; |
||||
|
||||
export default async function () { |
||||
try { |
||||
await NcConnectionMgrv2.destroyAll(); |
||||
|
||||
await dropTablesAllNonExternalProjects(); |
||||
await cleanupMetaTables(); |
||||
} catch (e) { |
||||
console.error('cleanupMeta', e); |
||||
} |
||||
} |
@ -0,0 +1,81 @@
|
||||
import Audit from '../../../src/lib/models/Audit'; |
||||
import Project from '../../../src/lib/models/Project'; |
||||
|
||||
import TestDbMngr from '../TestDbMngr'; |
||||
|
||||
const dropTablesOfSakila = async () => { |
||||
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.sakilaKnex); |
||||
|
||||
for(const tableName of sakilaTableNames){ |
||||
try { |
||||
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`); |
||||
} catch(e){} |
||||
} |
||||
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.sakilaKnex); |
||||
} |
||||
|
||||
const resetAndSeedSakila = async () => { |
||||
try { |
||||
await dropTablesOfSakila(); |
||||
await TestDbMngr.seedSakila(); |
||||
} catch (e) { |
||||
console.error('resetSakila', e); |
||||
throw e |
||||
} |
||||
} |
||||
|
||||
const cleanUpSakila = async () => { |
||||
try { |
||||
const sakilaProject = await Project.getByTitle('sakila'); |
||||
|
||||
const audits = sakilaProject && await Audit.projectAuditList(sakilaProject.id, {}); |
||||
|
||||
if(audits?.length > 0) { |
||||
return await resetAndSeedSakila(); |
||||
} |
||||
|
||||
const tablesInSakila = await TestDbMngr.showAllTables(TestDbMngr.sakilaKnex); |
||||
|
||||
await Promise.all( |
||||
tablesInSakila |
||||
.filter((tableName) => !sakilaTableNames.includes(tableName)) |
||||
.map(async (tableName) => { |
||||
try { |
||||
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`); |
||||
} catch (e) { |
||||
console.error(e); |
||||
} |
||||
}) |
||||
); |
||||
} catch (e) { |
||||
console.error('cleanUpSakila', e); |
||||
} |
||||
}; |
||||
|
||||
const sakilaTableNames = [ |
||||
'actor', |
||||
'address', |
||||
'category', |
||||
'city', |
||||
'country', |
||||
'customer', |
||||
'film', |
||||
'film_actor', |
||||
'film_category', |
||||
'film_text', |
||||
'inventory', |
||||
'language', |
||||
'payment', |
||||
'rental', |
||||
'staff', |
||||
'store', |
||||
'actor_info', |
||||
'customer_list', |
||||
'film_list', |
||||
'nicer_but_slower_film_list', |
||||
'sales_by_film_category', |
||||
'sales_by_store', |
||||
'staff_list', |
||||
]; |
||||
|
||||
export { cleanUpSakila, resetAndSeedSakila }; |
@ -0,0 +1,12 @@
|
||||
import { DbConfig } from "../../../src/interface/config"; |
||||
|
||||
|
||||
const isSqlite = (context) =>{ |
||||
return (context.dbConfig as DbConfig).client === 'sqlite' || (context.dbConfig as DbConfig).client === 'sqlite3'; |
||||
} |
||||
|
||||
const isMysql = (context) => |
||||
(context.dbConfig as DbConfig).client === 'mysql' || |
||||
(context.dbConfig as DbConfig).client === 'mysql2'; |
||||
|
||||
export { isSqlite, isMysql }; |
@ -0,0 +1,42 @@
|
||||
|
||||
import express from 'express'; |
||||
import { Noco } from '../../../src/lib'; |
||||
|
||||
import cleanupMeta from './cleanupMeta'; |
||||
import {cleanUpSakila, resetAndSeedSakila} from './cleanupSakila'; |
||||
import { createUser } from '../factory/user'; |
||||
|
||||
let server; |
||||
|
||||
const serverInit = async () => { |
||||
const serverInstance = express(); |
||||
serverInstance.enable('trust proxy'); |
||||
serverInstance.use(await Noco.init()); |
||||
serverInstance.use(function(req, res, next){ |
||||
// 50 sec timeout
|
||||
req.setTimeout(500000, function(){ |
||||
console.log('Request has timed out.'); |
||||
res.send(408); |
||||
}); |
||||
next(); |
||||
}); |
||||
return serverInstance; |
||||
}; |
||||
|
||||
const isFirstTimeRun = () => !server |
||||
|
||||
export default async function () {
|
||||
const {default: TestDbMngr} = await import('../TestDbMngr'); |
||||
|
||||
if (isFirstTimeRun()) { |
||||
await resetAndSeedSakila(); |
||||
server = await serverInit(); |
||||
} |
||||
|
||||
await cleanUpSakila(); |
||||
await cleanupMeta(); |
||||
|
||||
const { token } = await createUser({ app: server }, { roles: 'editor' }); |
||||
|
||||
return { app: server, token, dbConfig: TestDbMngr.dbConfig }; |
||||
} |
@ -0,0 +1,10 @@
|
||||
import 'mocha'; |
||||
import baseModelSqlTest from './tests/baseModelSql.test'; |
||||
|
||||
function modelTests() { |
||||
baseModelSqlTest(); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Model', modelTests); |
||||
} |
@ -0,0 +1,500 @@
|
||||
import 'mocha'; |
||||
import init from '../../init'; |
||||
import { BaseModelSqlv2 } from '../../../../src/lib/db/sql-data-mapper/lib/sql/BaseModelSqlv2'; |
||||
import { createProject } from '../../factory/project'; |
||||
import { createTable } from '../../factory/table'; |
||||
import NcConnectionMgrv2 from '../../../../src/lib/utils/common/NcConnectionMgrv2'; |
||||
import Base from '../../../../src/lib/models/Base'; |
||||
import Model from '../../../../src/lib/models/Model'; |
||||
import Project from '../../../../src/lib/models/Project'; |
||||
import View from '../../../../src/lib/models/View'; |
||||
import { createRow, generateDefaultRowAttributes } from '../../factory/row'; |
||||
import Audit from '../../../../src/lib/models/Audit'; |
||||
import { expect } from 'chai'; |
||||
import Filter from '../../../../src/lib/models/Filter'; |
||||
import { createLtarColumn } from '../../factory/column'; |
||||
import LinkToAnotherRecordColumn from '../../../../src/lib/models/LinkToAnotherRecordColumn'; |
||||
|
||||
function baseModelSqlTests() { |
||||
let context; |
||||
let project: Project; |
||||
let table: Model; |
||||
let view: View; |
||||
let baseModelSql: BaseModelSqlv2; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
project = await createProject(context); |
||||
table = await createTable(context, project); |
||||
view = await table.getViews()[0]; |
||||
|
||||
const base = await Base.get(table.base_id); |
||||
baseModelSql = new BaseModelSqlv2({ |
||||
dbDriver: NcConnectionMgrv2.get(base), |
||||
model: table, |
||||
view |
||||
}) |
||||
}); |
||||
|
||||
it('Insert record', async () => { |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
const columns = await table.getColumns(); |
||||
|
||||
const inputData = generateDefaultRowAttributes({columns}) |
||||
const response = await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request); |
||||
const insertedRow = (await baseModelSql.list())[0]; |
||||
|
||||
expect(insertedRow).to.include(inputData); |
||||
expect(insertedRow).to.include(response); |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'INSERT'); |
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'INSERT', |
||||
description: '1 inserted into Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk insert record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index})) |
||||
await baseModelSql.bulkInsert(bulkData, {cookie:request}); |
||||
|
||||
const insertedRows = await baseModelSql.list(); |
||||
|
||||
bulkData.forEach((inputData, index) => { |
||||
expect(insertedRows[index]).to.include(inputData); |
||||
}); |
||||
|
||||
const rowBulkInsertedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_INSERT');; |
||||
expect(rowBulkInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_INSERT', |
||||
status: null, |
||||
description: '10 records bulk inserted into Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Update record', async () => { |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
|
||||
const columns = await table.getColumns(); |
||||
|
||||
await baseModelSql.insert(generateDefaultRowAttributes({columns})); |
||||
const rowId = 1; |
||||
await baseModelSql.updateByPk(rowId, {Title: 'test'},undefined, request); |
||||
|
||||
const updatedRow = await baseModelSql.readByPk(1); |
||||
|
||||
expect(updatedRow).to.include({Id: rowId, Title: 'test'}); |
||||
|
||||
const rowUpdatedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'UPDATE'); |
||||
expect(rowUpdatedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'UPDATE', |
||||
description: '1 updated in Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk update record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index})) |
||||
await baseModelSql.bulkInsert(bulkData, {cookie:request}); |
||||
|
||||
const insertedRows: any[] = await baseModelSql.list(); |
||||
|
||||
await baseModelSql.bulkUpdate(insertedRows.map((row)=> ({...row, Title: `new-${row['Title']}`})), { cookie: request }); |
||||
|
||||
const updatedRows = await baseModelSql.list(); |
||||
|
||||
updatedRows.forEach((row, index) => { |
||||
expect(row['Title']).to.equal(`new-test-${index}`); |
||||
}) |
||||
const rowBulkUpdateAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_UPDATE'); |
||||
expect(rowBulkUpdateAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_UPDATE', |
||||
status: null, |
||||
description: '10 records bulk updated in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk update all record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index})) |
||||
await baseModelSql.bulkInsert(bulkData, {cookie:request}); |
||||
|
||||
const idColumn = columns.find((column) => column.title === 'Id')!; |
||||
|
||||
await baseModelSql.bulkUpdateAll({filterArr: [ |
||||
new Filter({ |
||||
logical_op: 'and', |
||||
fk_column_id: idColumn.id, |
||||
comparison_op: 'lt', |
||||
value: 5, |
||||
}) |
||||
]}, ({Title: 'new-1'}), { cookie: request }); |
||||
|
||||
const updatedRows = await baseModelSql.list(); |
||||
|
||||
updatedRows.forEach((row) => { |
||||
if(row.id < 5) expect(row['Title']).to.equal('new-1'); |
||||
}) |
||||
const rowBulkUpdateAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_UPDATE'); |
||||
expect(rowBulkUpdateAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_UPDATE', |
||||
status: null, |
||||
description: '4 records bulk updated in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Delete record', async () => { |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'}, |
||||
params: {id: 1} |
||||
} |
||||
|
||||
const columns = await table.getColumns(); |
||||
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index})) |
||||
await baseModelSql.bulkInsert(bulkData, {cookie:request}); |
||||
|
||||
const rowIdToDeleted = 1; |
||||
await baseModelSql.delByPk(rowIdToDeleted,undefined ,request); |
||||
|
||||
const deletedRow = await baseModelSql.readByPk(rowIdToDeleted); |
||||
|
||||
expect(deletedRow).to.be.undefined; |
||||
|
||||
console.log('Delete record', await Audit.projectAuditList(project.id, {})); |
||||
const rowDeletedAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'DELETE'); |
||||
expect(rowDeletedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'DELETE', |
||||
description: '1 deleted from Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk delete records', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index})) |
||||
await baseModelSql.bulkInsert(bulkData, {cookie:request}); |
||||
|
||||
const insertedRows: any[] = await baseModelSql.list(); |
||||
|
||||
await baseModelSql.bulkDelete( |
||||
insertedRows |
||||
.filter((row) => row['Id'] < 5) |
||||
.map((row)=> ({'id': row['Id']})),
|
||||
{ cookie: request } |
||||
); |
||||
|
||||
const remainingRows = await baseModelSql.list(); |
||||
|
||||
expect(remainingRows).to.length(6); |
||||
|
||||
const rowBulkDeleteAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_DELETE'); |
||||
|
||||
expect(rowBulkDeleteAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_DELETE', |
||||
status: null, |
||||
description: '4 records bulk deleted in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk delete all record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
const bulkData = Array(10).fill(0).map((_, index) => generateDefaultRowAttributes({columns, index})) |
||||
await baseModelSql.bulkInsert(bulkData, {cookie:request}); |
||||
|
||||
const idColumn = columns.find((column) => column.title === 'Id')!; |
||||
|
||||
await baseModelSql.bulkDeleteAll({filterArr: [ |
||||
new Filter({ |
||||
logical_op: 'and', |
||||
fk_column_id: idColumn.id, |
||||
comparison_op: 'lt', |
||||
value: 5, |
||||
}) |
||||
]}, { cookie: request }); |
||||
|
||||
const remainingRows = await baseModelSql.list(); |
||||
|
||||
expect(remainingRows).to.length(6); |
||||
const rowBulkDeleteAudit = (await Audit.projectAuditList(project.id, {})).find((audit) => audit.op_sub_type === 'BULK_DELETE'); |
||||
expect(rowBulkDeleteAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_DELETE', |
||||
status: null, |
||||
description: '4 records bulk deleted in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Nested insert', async () => { |
||||
const childTable = await createTable(context, project, { |
||||
title: 'Child Table', |
||||
table_name: 'child_table', |
||||
}) |
||||
const ltarColumn = await createLtarColumn(context, { |
||||
title: 'Ltar Column', |
||||
parentTable: table, |
||||
childTable, |
||||
type: "hm" |
||||
}) |
||||
const childRow = await createRow(context, { |
||||
project, |
||||
table: childTable, |
||||
}) |
||||
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>(); |
||||
const childCol = await ltarColOptions.getChildColumn(); |
||||
|
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
|
||||
await baseModelSql.nestedInsert( |
||||
{...generateDefaultRowAttributes({columns}), [ltarColumn.title]: [{'Id': childRow['Id']}]},
|
||||
undefined,
|
||||
request |
||||
); |
||||
|
||||
const childBaseModel = new BaseModelSqlv2({ |
||||
dbDriver: NcConnectionMgrv2.get(await Base.get(table.base_id)), |
||||
model: childTable, |
||||
view |
||||
}) |
||||
const insertedChildRow = await childBaseModel.readByPk(childRow['Id']); |
||||
expect(insertedChildRow[childCol.column_name]).to.equal(childRow['Id']); |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})) |
||||
.filter((audit) => audit.fk_model_id === table.id) |
||||
.find((audit) => audit.op_sub_type === 'INSERT'); |
||||
|
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'INSERT', |
||||
description: '1 inserted into Table1_Title', |
||||
}); |
||||
}) |
||||
|
||||
it('Link child', async () => { |
||||
const childTable = await createTable(context, project, { |
||||
title: 'Child Table', |
||||
table_name: 'child_table', |
||||
}) |
||||
const ltarColumn = await createLtarColumn(context, { |
||||
title: 'Ltar Column', |
||||
parentTable: table, |
||||
childTable, |
||||
type: "hm" |
||||
}) |
||||
const insertedChildRow = await createRow(context, { |
||||
project, |
||||
table: childTable, |
||||
}) |
||||
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>(); |
||||
const childCol = await ltarColOptions.getChildColumn(); |
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
|
||||
await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request); |
||||
const insertedRow = await baseModelSql.readByPk(1); |
||||
|
||||
await baseModelSql.addChild({ |
||||
colId: ltarColumn.id, |
||||
rowId: insertedRow['Id'], |
||||
childId: insertedChildRow['Id'], |
||||
cookie: request |
||||
}); |
||||
|
||||
const childBaseModel = new BaseModelSqlv2({ |
||||
dbDriver: NcConnectionMgrv2.get(await Base.get(table.base_id)), |
||||
model: childTable, |
||||
view |
||||
}) |
||||
const updatedChildRow = await childBaseModel.readByPk(insertedChildRow['Id']); |
||||
|
||||
expect(updatedChildRow[childCol.column_name]).to.equal(insertedRow['Id']); |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})) |
||||
.filter((audit) => audit.fk_model_id === table.id) |
||||
.find((audit) => audit.op_sub_type === 'LINK_RECORD'); |
||||
|
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'LINK_RECORD', |
||||
description: 'Record [id:1] record linked with record [id:1] record in Table1_Title', |
||||
}); |
||||
}) |
||||
|
||||
it('Unlink child', async () => { |
||||
const childTable = await createTable(context, project, { |
||||
title: 'Child Table', |
||||
table_name: 'child_table', |
||||
}) |
||||
const ltarColumn = await createLtarColumn(context, { |
||||
title: 'Ltar Column', |
||||
parentTable: table, |
||||
childTable, |
||||
type: "hm" |
||||
}) |
||||
const insertedChildRow = await createRow(context, { |
||||
project, |
||||
table: childTable, |
||||
}) |
||||
const ltarColOptions = await ltarColumn.getColOptions<LinkToAnotherRecordColumn>(); |
||||
const childCol = await ltarColOptions.getChildColumn(); |
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: {email: 'test@example.com'} |
||||
} |
||||
|
||||
await baseModelSql.insert(generateDefaultRowAttributes({columns}), undefined, request); |
||||
const insertedRow = await baseModelSql.readByPk(1); |
||||
|
||||
await baseModelSql.addChild({ |
||||
colId: ltarColumn.id, |
||||
rowId: insertedRow['Id'], |
||||
childId: insertedChildRow['Id'], |
||||
cookie: request |
||||
}); |
||||
|
||||
await baseModelSql.removeChild({ |
||||
colId: ltarColumn.id, |
||||
rowId: insertedRow['Id'], |
||||
childId: insertedChildRow['Id'], |
||||
cookie: request |
||||
}); |
||||
|
||||
const childBaseModel = new BaseModelSqlv2({ |
||||
dbDriver: NcConnectionMgrv2.get(await Base.get(table.base_id)), |
||||
model: childTable, |
||||
view |
||||
}) |
||||
const updatedChildRow = await childBaseModel.readByPk(insertedChildRow['Id']); |
||||
|
||||
expect(updatedChildRow[childCol.column_name]).to.be.null; |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})) |
||||
.filter((audit) => audit.fk_model_id === table.id) |
||||
.find((audit) => audit.op_sub_type === 'UNLINK_RECORD'); |
||||
|
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'UNLINK_RECORD', |
||||
description: 'Record [id:1] record unlinked with record [id:1] record in Table1_Title', |
||||
}); |
||||
}) |
||||
} |
||||
|
||||
export default function () { |
||||
describe('BaseModelSql', baseModelSqlTests); |
||||
} |
@ -0,0 +1,18 @@
|
||||
import 'mocha'; |
||||
import authTests from './tests/auth.test'; |
||||
import projectTests from './tests/project.test'; |
||||
import tableTests from './tests/table.test'; |
||||
import tableRowTests from './tests/tableRow.test'; |
||||
import viewRowTests from './tests/viewRow.test'; |
||||
|
||||
function restTests() { |
||||
authTests(); |
||||
projectTests(); |
||||
tableTests(); |
||||
tableRowTests(); |
||||
viewRowTests(); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Rest', restTests); |
||||
} |
@ -0,0 +1,169 @@
|
||||
import { expect } from 'chai'; |
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import init from '../../init'; |
||||
import { defaultUserArgs } from '../../factory/user'; |
||||
|
||||
function authTests() { |
||||
let context; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
}); |
||||
|
||||
it('Signup with valid email', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send({ email: 'new@example.com', password: defaultUserArgs.password }) |
||||
.expect(200) |
||||
|
||||
const token = response.body.token; |
||||
expect(token).to.be.a('string'); |
||||
}); |
||||
|
||||
it('Signup with invalid email', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send({ email: 'test', password: defaultUserArgs.password }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signup with invalid passsword', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send({ email: defaultUserArgs.email, password: 'weakpass' }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signin with valid credentials', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
.send({ |
||||
email: defaultUserArgs.email, |
||||
password: defaultUserArgs.password, |
||||
}) |
||||
.expect(200); |
||||
const token = response.body.token; |
||||
expect(token).to.be.a('string'); |
||||
}); |
||||
|
||||
it('Signup without email and password', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
// pass empty data in await request
|
||||
.send({}) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signin with invalid credentials', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
.send({ email: 'abc@abc.com', password: defaultUserArgs.password }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signin with invalid password', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
.send({ email: defaultUserArgs.email, password: 'wrongPassword' }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('me without token', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/auth/user/me') |
||||
.unset('xc-auth') |
||||
.expect(200); |
||||
|
||||
if (!response.body?.roles?.guest) { |
||||
return new Error('User should be guest'); |
||||
} |
||||
}); |
||||
|
||||
it('me with token', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/auth/user/me') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
const email = response.body.email; |
||||
expect(email).to.equal(defaultUserArgs.email); |
||||
}); |
||||
|
||||
it('Forgot password with a non-existing email id', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/forgot') |
||||
.send({ email: 'nonexisting@email.com' }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
// todo: fix mailer issues
|
||||
// it('Forgot password with an existing email id', function () {});
|
||||
|
||||
it('Change password', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/change') |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
currentPassword: defaultUserArgs.password, |
||||
newPassword: 'NEW' + defaultUserArgs.password, |
||||
}) |
||||
.expect(200); |
||||
}); |
||||
|
||||
it('Change password - after logout', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/change') |
||||
.unset('xc-auth') |
||||
.send({ |
||||
currentPassword: defaultUserArgs.password, |
||||
newPassword: 'NEW' + defaultUserArgs.password, |
||||
}) |
||||
.expect(401); |
||||
}); |
||||
|
||||
// todo:
|
||||
it('Reset Password with an invalid token', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/reset/someRandomValue') |
||||
.send({ email: defaultUserArgs.email }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Email validate with an invalid token', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/email/validate/someRandomValue') |
||||
.send({ email: defaultUserArgs.email }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
// todo:
|
||||
// it('Email validate with a valid token', async () => {
|
||||
// // await request(context.app)
|
||||
// // .post('/auth/email/validate/someRandomValue')
|
||||
// // .send({email: EMAIL_ID})
|
||||
// // .expect(500, done);
|
||||
// });
|
||||
|
||||
// todo:
|
||||
// it('Forgot password validate with a valid token', async () => {
|
||||
// // await request(context.app)
|
||||
// // .post('/auth/token/validate/someRandomValue')
|
||||
// // .send({email: EMAIL_ID})
|
||||
// // .expect(500, done);
|
||||
// });
|
||||
|
||||
// todo:
|
||||
// it('Reset Password with an valid token', async () => {
|
||||
// // await request(context.app)
|
||||
// // .post('/auth/password/reset/someRandomValue')
|
||||
// // .send({password: 'anewpassword'})
|
||||
// // .expect(500, done);
|
||||
// });
|
||||
|
||||
// todo: refresh token api
|
||||
} |
||||
|
||||
export default function () { |
||||
describe('Auth', authTests); |
||||
} |
@ -0,0 +1,268 @@
|
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import init from '../../init/index'; |
||||
import { createProject, createSharedBase } from '../../factory/project'; |
||||
import { beforeEach } from 'mocha'; |
||||
import { Exception } from 'handlebars'; |
||||
import Project from '../../../../src/lib/models/Project'; |
||||
|
||||
function projectTest() { |
||||
let context; |
||||
let project; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
|
||||
project = await createProject(context); |
||||
}); |
||||
|
||||
it('Get project info', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/info`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
}); |
||||
|
||||
// todo: Test by creating models under project and check if the UCL is working
|
||||
it('UI ACL', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/visibility-rules`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
}); |
||||
// todo: Test creating visibility set
|
||||
|
||||
it('List projects', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
if (response.body.list.length !== 1) new Error('Should list only 1 project'); |
||||
if (!response.body.pageInfo) new Error('Should have pagination info'); |
||||
}); |
||||
|
||||
it('Create project', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: 'Title1', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const newProject = await Project.getByTitleOrId(response.body.id); |
||||
if (!newProject) return new Error('Project not created'); |
||||
}); |
||||
|
||||
it('Create projects with existing title', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: project.title, |
||||
}) |
||||
.expect(400); |
||||
}); |
||||
|
||||
// todo: fix passport user role popluation bug
|
||||
// it('Delete project', async async () => {
|
||||
// const toBeDeletedProject = await createProject(app, token, {
|
||||
// title: 'deletedTitle',
|
||||
// });
|
||||
// await request(app)
|
||||
// .delete('/api/v1/db/meta/projects/${toBeDeletedProject.id}')
|
||||
// .set('xc-auth', token)
|
||||
// .send({
|
||||
// title: 'Title1',
|
||||
// })
|
||||
// .expect(200, async (err) => {
|
||||
// // console.log(res);
|
||||
//
|
||||
|
||||
// const deletedProject = await Project.getByTitleOrId(
|
||||
// toBeDeletedProject.id
|
||||
// );
|
||||
// if (deletedProject) return new Error('Project not delete');
|
||||
|
||||
// new Error();
|
||||
// });
|
||||
// });
|
||||
|
||||
it('Read project', async () => { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
|
||||
if (response.body.id !== project.id) return new Error('Got the wrong project'); |
||||
}); |
||||
|
||||
it('Update projects', async () => { |
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: 'NewTitle', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const newProject = await Project.getByTitleOrId(project.id); |
||||
if (newProject.title !== 'NewTitle') { |
||||
return new Error('Project not updated'); |
||||
} |
||||
}); |
||||
|
||||
it('Update projects with existing title', async function () { |
||||
const newProject = await createProject(context, { |
||||
title: 'NewTitle1', |
||||
}); |
||||
|
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: newProject.title, |
||||
}) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Create project shared base', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'viewer', |
||||
password: 'test', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if ( |
||||
!updatedProject.uuid || |
||||
updatedProject.roles !== 'viewer' || |
||||
updatedProject.password !== 'test' |
||||
) { |
||||
return new Error('Shared base not configured properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Created project shared base should have only editor or viewer role', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'commenter', |
||||
password: 'test', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if (updatedProject.roles === 'commenter') { |
||||
return new Error('Shared base not configured properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Updated project shared base should have only editor or viewer role', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'commenter', |
||||
password: 'test', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if (updatedProject.roles === 'commenter') { |
||||
throw new Exception('Shared base not updated properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Updated project shared base', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'editor', |
||||
password: 'test', |
||||
}) |
||||
.expect(200); |
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if (updatedProject.roles !== 'editor') { |
||||
throw new Exception('Shared base not updated properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Get project shared base', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
if (!updatedProject.uuid) { |
||||
throw new Exception('Shared base not created'); |
||||
} |
||||
}); |
||||
|
||||
it('Delete project shared base', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.delete(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
if (updatedProject.uuid) { |
||||
throw new Exception('Shared base not deleted'); |
||||
} |
||||
}); |
||||
|
||||
// todo: Do compare api test
|
||||
|
||||
it('Meta diff sync', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/meta-diff`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
}); |
||||
|
||||
it('Meta diff sync', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/meta-diff`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
}); |
||||
|
||||
// todo: improve test. Check whether the all the actions are present in the response and correct as well
|
||||
it('Meta diff sync', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/audits`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
}); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Project', projectTest); |
||||
} |
@ -0,0 +1,253 @@
|
||||
// import { expect } from 'chai';
|
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import init from '../../init'; |
||||
import { createTable, getAllTables } from '../../factory/table'; |
||||
import { createProject } from '../../factory/project'; |
||||
import { defaultColumns } from '../../factory/column'; |
||||
import Model from '../../../../src/lib/models/Model'; |
||||
|
||||
function tableTest() { |
||||
let context; |
||||
let project; |
||||
let table; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
|
||||
project = await createProject(context); |
||||
table = await createTable(context, project); |
||||
}); |
||||
|
||||
it('Get table list', async function () { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
if (response.body.list.length !== 1) return new Error('Wrong number of tables'); |
||||
}); |
||||
|
||||
it('Create table', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'table2', |
||||
title: 'new_title_2', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(200); |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 2) { |
||||
return new Error('Tables is not be created'); |
||||
} |
||||
|
||||
if (response.body.columns.length !== (defaultColumns(context))) { |
||||
return new Error('Columns not saved properly'); |
||||
} |
||||
|
||||
if ( |
||||
!( |
||||
response.body.table_name.startsWith(project.prefix) && |
||||
response.body.table_name.endsWith('table2') |
||||
) |
||||
) { |
||||
return new Error('table name not configured properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with no table name', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: undefined, |
||||
title: 'new_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if ( |
||||
!response.text.includes( |
||||
'Missing table name `table_name` property in request body' |
||||
) |
||||
) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
console.log(tables); |
||||
return new Error( |
||||
`Tables should not be created, tables.length:${tables.length}` |
||||
); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with same table name', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: table.table_name, |
||||
title: 'New_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if (!response.text.includes('Duplicate table name')) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with same title', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'New_table_name', |
||||
title: table.title, |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if (!response.text.includes('Duplicate table alias')) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with title length more than the limit', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'a'.repeat(256), |
||||
title: 'new_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if (!response.text.includes('Table name exceeds ')) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
|
||||
}); |
||||
|
||||
it('Create table with title having leading white space', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'table_name_with_whitespace ', |
||||
title: 'new_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if ( |
||||
!response.text.includes( |
||||
'Leading or trailing whitespace not allowed in table names' |
||||
) |
||||
) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Update table', async function () { |
||||
const response = await request(context.app) |
||||
.patch(`/api/v1/db/meta/tables/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
project_id: project.id, |
||||
table_name: 'new_title', |
||||
}) |
||||
.expect(200); |
||||
const updatedTable = await Model.get(table.id); |
||||
|
||||
if (!updatedTable.table_name.endsWith('new_title')) { |
||||
return new Error('Table was not updated'); |
||||
} |
||||
}); |
||||
|
||||
it('Delete table', async function () { |
||||
const response = await request(context.app) |
||||
.delete(`/api/v1/db/meta/tables/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
|
||||
if (tables.length !== 0) { |
||||
return new Error('Table is not deleted'); |
||||
} |
||||
}); |
||||
|
||||
// todo: Check the condtion where the table being deleted is being refered by multiple tables
|
||||
// todo: Check the if views are also deleted
|
||||
|
||||
it('Get table', async function () { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/meta/tables/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
if (response.body.id !== table.id) new Error('Wrong table'); |
||||
}); |
||||
|
||||
// todo: flaky test, order condition is sometimes not met
|
||||
it('Reorder table', async function () { |
||||
const newOrder = table.order === 0 ? 1 : 0; |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/tables/${table.id}/reorder`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
order: newOrder, |
||||
}) |
||||
.expect(200); |
||||
// .expect(200, async (err) => {
|
||||
// if (err) return new Error(err);
|
||||
|
||||
// const updatedTable = await Model.get(table.id);
|
||||
// console.log(Number(updatedTable.order), newOrder);
|
||||
// if (Number(updatedTable.order) !== newOrder) {
|
||||
// return new Error('Reordering failed');
|
||||
// }
|
||||
|
||||
// new Error();
|
||||
// });
|
||||
}); |
||||
} |
||||
|
||||
export default async function () { |
||||
describe('Table', tableTest); |
||||
} |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,72 @@
|
||||
{ |
||||
"compilerOptions": { |
||||
"skipLibCheck": true, |
||||
"composite": true, |
||||
"target": "es2017", |
||||
"outDir": "build/main", |
||||
"rootDir": "src", |
||||
"moduleResolution": "node", |
||||
"module": "commonjs", |
||||
"declaration": true, |
||||
"inlineSourceMap": true, |
||||
"esModuleInterop": true |
||||
/* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, |
||||
"allowJs": false, |
||||
// "strict": true /* Enable all strict type-checking options. */, |
||||
|
||||
/* Strict Type-Checking Options */ |
||||
// "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */, |
||||
// "strictNullChecks": true /* Enable strict null checks. */, |
||||
// "strictFunctionTypes": true /* Enable strict checking of function types. */, |
||||
// "strictPropertyInitialization": true /* Enable strict checking of property initialization in classes. */, |
||||
// "noImplicitThis": true /* Raise error on 'this' expressions with an implied 'any' type. */, |
||||
// "alwaysStrict": true /* Parse in strict mode and emit "use strict" for each source file. */, |
||||
"resolveJsonModule": true, |
||||
/* Additional Checks */ |
||||
"noUnusedLocals": false |
||||
/* Report errors on unused locals. */, |
||||
"noUnusedParameters": false |
||||
/* Report errors on unused parameters. */, |
||||
"noImplicitReturns": false |
||||
/* Report error when not all code paths in function return a value. */, |
||||
"noFallthroughCasesInSwitch": false |
||||
/* Report errors for fallthrough cases in switch statement. */, |
||||
/* Debugging Options */ |
||||
"traceResolution": false |
||||
/* Report module resolution log messages. */, |
||||
"listEmittedFiles": false |
||||
/* Print names of generated files part of the compilation. */, |
||||
"listFiles": false |
||||
/* Print names of files part of the compilation. */, |
||||
"pretty": true |
||||
/* Stylize errors and messages using color and context. */, |
||||
/* Experimental Options */ |
||||
// "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */, |
||||
// "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */, |
||||
|
||||
"lib": [ |
||||
"es2017" |
||||
], |
||||
"types": [ |
||||
"mocha", "node" |
||||
], |
||||
"typeRoots": [ |
||||
"node_modules/@types", |
||||
"src/types" |
||||
] |
||||
}, |
||||
"parserOptions": { |
||||
"sourceType": "module", |
||||
"tsconfigRootDir": "./", |
||||
"project": "./tsconfig.json", |
||||
}, |
||||
"include": [ |
||||
"./tests/**/**/**.ts", |
||||
"./tests/**/**.ts" |
||||
// "**/*.ts", |
||||
// "**/*.json" |
||||
], |
||||
"exclude": [ |
||||
], |
||||
"compileOnSave": false |
||||
} |
Loading…
Reference in new issue