Browse Source

chore: introduce unit test

Signed-off-by: Pranav C <pranavxc@gmail.com>
pull/5444/head
Pranav C 1 year ago
parent
commit
ea76f12966
  1. 738
      packages/nocodb-nest/package-lock.json
  2. 8
      packages/nocodb-nest/package.json
  3. 16
      packages/nocodb-nest/src/modules/global/global.module.ts
  4. 14
      packages/nocodb-nest/tests/dockerize-mssql.sh
  5. 23
      packages/nocodb-nest/tests/export-import/ReadMe.md
  6. 7
      packages/nocodb-nest/tests/export-import/config.json
  7. 276
      packages/nocodb-nest/tests/export-import/exportSchema.js
  8. 515
      packages/nocodb-nest/tests/export-import/importSchema.js
  9. 21
      packages/nocodb-nest/tests/mysql-dump/mysql-sakila-delete-data.sql
  10. 37
      packages/nocodb-nest/tests/mysql-dump/mysql-sakila-drop-objects.sql
  11. 231481
      packages/nocodb-nest/tests/mysql-dump/mysql-sakila-insert-data.sql
  12. 643
      packages/nocodb-nest/tests/mysql-dump/mysql-sakila-schema.sql
  13. 642
      packages/nocodb-nest/tests/mysql-sakila-db/01-mysql-sakila-schema.sql
  14. 165
      packages/nocodb-nest/tests/mysql-sakila-db/02-mysql-sakila-insert-data.sql
  15. 658
      packages/nocodb-nest/tests/mysql-sakila-db/03-test-sakila-schema.sql
  16. 46449
      packages/nocodb-nest/tests/mysql-sakila-db/04-test-sakila-data.sql
  17. 4759
      packages/nocodb-nest/tests/pg-cy-quick/01-cy-quick.sql
  18. 1711
      packages/nocodb-nest/tests/pg-sakila-db/01-postgres-sakila-schema.sql
  19. 231654
      packages/nocodb-nest/tests/pg-sakila-db/02-postgres-sakila-insert-data.sql
  20. 1710
      packages/nocodb-nest/tests/pg-sakila-db/03-postgres-sakila-schema.sql
  21. 46702
      packages/nocodb-nest/tests/pg-sakila-db/04-postgres-sakila-insert-data.sql
  22. 504
      packages/nocodb-nest/tests/sql-server-sakila-db/01-sql-server-sakila-schema.sql
  23. 231584
      packages/nocodb-nest/tests/sql-server-sakila-db/02-sql-server-sakila-insert-data.sql
  24. 0
      packages/nocodb-nest/tests/sql-server-sakila-db/mssql-ready.sh
  25. BIN
      packages/nocodb-nest/tests/sqlite-dump/sakila.db
  26. 45
      packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-delete-data.sql
  27. 70
      packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-drop-objects.sql
  28. 231502
      packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-insert-data.sql
  29. 645
      packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-schema.sql
  30. 467
      packages/nocodb-nest/tests/sqlite-sakila-db/01-sqlite-sakila-schema.sql
  31. 231502
      packages/nocodb-nest/tests/sqlite-sakila-db/02-sqlite-sakila-insert-data.sql
  32. 180
      packages/nocodb-nest/tests/sqlite-sakila-db/03-sqlite-sakila-triggers.sql
  33. BIN
      packages/nocodb-nest/tests/sqlite-sakila-db/sakila.db
  34. 4
      packages/nocodb-nest/tests/unit/.env.sample
  35. 5
      packages/nocodb-nest/tests/unit/.pg.env
  36. 322
      packages/nocodb-nest/tests/unit/TestDbMngr.ts
  37. 274
      packages/nocodb-nest/tests/unit/factory/column.ts
  38. 81
      packages/nocodb-nest/tests/unit/factory/project.ts
  39. 373
      packages/nocodb-nest/tests/unit/factory/row.ts
  40. 48
      packages/nocodb-nest/tests/unit/factory/table.ts
  41. 18
      packages/nocodb-nest/tests/unit/factory/user.ts
  42. 38
      packages/nocodb-nest/tests/unit/factory/view.ts
  43. 24
      packages/nocodb-nest/tests/unit/index.test.ts
  44. 60
      packages/nocodb-nest/tests/unit/init/cleanupMeta.ts
  45. 116
      packages/nocodb-nest/tests/unit/init/cleanupSakila.ts
  46. 18
      packages/nocodb-nest/tests/unit/init/db.ts
  47. 43
      packages/nocodb-nest/tests/unit/init/index.ts
  48. 10
      packages/nocodb-nest/tests/unit/model/index.test.ts
  49. 591
      packages/nocodb-nest/tests/unit/model/tests/baseModelSql.test.ts
  50. 26
      packages/nocodb-nest/tests/unit/rest/index.test.ts
  51. 172
      packages/nocodb-nest/tests/unit/rest/tests/attachment.test.ts
  52. 190
      packages/nocodb-nest/tests/unit/rest/tests/auth.test.ts
  53. 102
      packages/nocodb-nest/tests/unit/rest/tests/columnTypeSpecific.test.ts
  54. 962
      packages/nocodb-nest/tests/unit/rest/tests/filter.test.ts
  55. 223
      packages/nocodb-nest/tests/unit/rest/tests/org.test.ts
  56. 360
      packages/nocodb-nest/tests/unit/rest/tests/project.test.ts
  57. 264
      packages/nocodb-nest/tests/unit/rest/tests/table.test.ts
  58. 2412
      packages/nocodb-nest/tests/unit/rest/tests/tableRow.test.ts
  59. 1521
      packages/nocodb-nest/tests/unit/rest/tests/viewRow.test.ts
  60. 73
      packages/nocodb-nest/tests/unit/tsconfig.json

738
packages/nocodb-nest/package-lock.json generated

@ -17,6 +17,8 @@
"@nestjs/mapped-types": "*",
"@nestjs/platform-express": "^9.4.0",
"@sentry/node": "^6.3.5",
"@types/chai": "^4.2.12",
"@types/mocha": "^8.0.1",
"airtable": "^0.11.3",
"ajv": "^8.12.0",
"ajv-formats": "^2.1.1",
@ -118,6 +120,7 @@
"@types/supertest": "^2.0.11",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"chai": "^4.2.0",
"cross-env": "^7.0.3",
"eslint": "^7.8.0",
"eslint-config-prettier": "^6.15.0",
@ -126,6 +129,7 @@
"eslint-plugin-import": "^2.25.2",
"eslint-plugin-prettier": "^4.0.0",
"jest": "29.5.0",
"mocha": "^10.1.0",
"nodemon": "^2.0.22",
"prettier": "^2.7.1",
"source-map-support": "^0.5.20",
@ -3082,6 +3086,11 @@
"@types/node": "*"
}
},
"node_modules/@types/chai": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.4.tgz",
"integrity": "sha512-KnRanxnpfpjUTqTCXslZSEdLfXExwgNxYPdiO2WGUj8+HDjFi8R3k5RVKPeSCzLjCcshCAtVO2QBbVuAV4kTnw=="
},
"node_modules/@types/connect": {
"version": "3.4.35",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz",
@ -3252,6 +3261,11 @@
"integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==",
"dev": true
},
"node_modules/@types/mocha": {
"version": "8.2.3",
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.2.3.tgz",
"integrity": "sha512-ekGvFhFgrc2zYQoX4JeZPmVzZxw6Dtllga7iGHzfbYIYkAMUx/sAFP2GdFpLff+vdHXu5fl7WX9AT+TtqYcsyw=="
},
"node_modules/@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
@ -4483,6 +4497,15 @@
"node": ">=0.8"
}
},
"node_modules/assertion-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/ast-types": {
"version": "0.13.4",
"resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz",
@ -5008,6 +5031,12 @@
"node"
]
},
"node_modules/browser-stdout": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
"dev": true
},
"node_modules/browserify-aes": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz",
@ -5504,6 +5533,24 @@
"node": ">=0.8"
}
},
"node_modules/chai": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz",
"integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==",
"dev": true,
"dependencies": {
"assertion-error": "^1.1.0",
"check-error": "^1.0.2",
"deep-eql": "^4.1.2",
"get-func-name": "^2.0.0",
"loupe": "^2.3.1",
"pathval": "^1.1.1",
"type-detect": "^4.0.5"
},
"engines": {
"node": ">=4"
}
},
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@ -5542,6 +5589,15 @@
"node": "*"
}
},
"node_modules/check-error": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
"integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/chokidar": {
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
@ -6357,6 +6413,18 @@
"node": "*"
}
},
"node_modules/decamelize": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
"integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
"dev": true,
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/decimal.js": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz",
@ -6376,6 +6444,18 @@
"integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==",
"dev": true
},
"node_modules/deep-eql": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz",
"integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==",
"dev": true,
"dependencies": {
"type-detect": "^4.0.0"
},
"engines": {
"node": ">=6"
}
},
"node_modules/deep-is": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
@ -8212,6 +8292,31 @@
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
},
"node_modules/find-up": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dev": true,
"dependencies": {
"locate-path": "^6.0.0",
"path-exists": "^4.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/flat": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
"integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
"dev": true,
"bin": {
"flat": "cli.js"
}
},
"node_modules/flat-cache": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
@ -8616,6 +8721,15 @@
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/get-intrinsic": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
@ -9160,6 +9274,15 @@
"minimalistic-assert": "^1.0.1"
}
},
"node_modules/he": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
"integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
"dev": true,
"bin": {
"he": "bin/he"
}
},
"node_modules/hexoid": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz",
@ -9825,6 +9948,15 @@
"node": ">=8"
}
},
"node_modules/is-plain-obj": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
"integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
"dev": true,
"engines": {
"node": ">=8"
}
},
"node_modules/is-potential-custom-element-name": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
@ -11278,6 +11410,21 @@
"node": ">=6.11.5"
}
},
"node_modules/locate-path": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dev": true,
"dependencies": {
"p-locate": "^5.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
@ -11400,6 +11547,15 @@
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
},
"node_modules/loupe": {
"version": "2.3.6",
"resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz",
"integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==",
"dev": true,
"dependencies": {
"get-func-name": "^2.0.0"
}
},
"node_modules/lru_map": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/lru_map/-/lru_map-0.3.3.tgz",
@ -11909,6 +12065,215 @@
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/mocha": {
"version": "10.2.0",
"resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz",
"integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==",
"dev": true,
"dependencies": {
"ansi-colors": "4.1.1",
"browser-stdout": "1.3.1",
"chokidar": "3.5.3",
"debug": "4.3.4",
"diff": "5.0.0",
"escape-string-regexp": "4.0.0",
"find-up": "5.0.0",
"glob": "7.2.0",
"he": "1.2.0",
"js-yaml": "4.1.0",
"log-symbols": "4.1.0",
"minimatch": "5.0.1",
"ms": "2.1.3",
"nanoid": "3.3.3",
"serialize-javascript": "6.0.0",
"strip-json-comments": "3.1.1",
"supports-color": "8.1.1",
"workerpool": "6.2.1",
"yargs": "16.2.0",
"yargs-parser": "20.2.4",
"yargs-unparser": "2.0.0"
},
"bin": {
"_mocha": "bin/_mocha",
"mocha": "bin/mocha.js"
},
"engines": {
"node": ">= 14.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/mochajs"
}
},
"node_modules/mocha/node_modules/ansi-colors": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
"integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
"dev": true,
"engines": {
"node": ">=6"
}
},
"node_modules/mocha/node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"dev": true
},
"node_modules/mocha/node_modules/cliui": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
"dev": true,
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"wrap-ansi": "^7.0.0"
}
},
"node_modules/mocha/node_modules/diff": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
"integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
"dev": true,
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/mocha/node_modules/glob": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
"integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
"dev": true,
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/mocha/node_modules/glob/node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/mocha/node_modules/js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dev": true,
"dependencies": {
"argparse": "^2.0.1"
},
"bin": {
"js-yaml": "bin/js-yaml.js"
}
},
"node_modules/mocha/node_modules/minimatch": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
"dev": true,
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=10"
}
},
"node_modules/mocha/node_modules/minimatch/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dev": true,
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/mocha/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true
},
"node_modules/mocha/node_modules/nanoid": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz",
"integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==",
"dev": true,
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/mocha/node_modules/serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"dependencies": {
"randombytes": "^2.1.0"
}
},
"node_modules/mocha/node_modules/supports-color": {
"version": "8.1.1",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
"dev": true,
"dependencies": {
"has-flag": "^4.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/supports-color?sponsor=1"
}
},
"node_modules/mocha/node_modules/yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
"integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
"dev": true,
"dependencies": {
"cliui": "^7.0.2",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.0",
"y18n": "^5.0.5",
"yargs-parser": "^20.2.2"
},
"engines": {
"node": ">=10"
}
},
"node_modules/mocha/node_modules/yargs-parser": {
"version": "20.2.4",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
"integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
"dev": true,
"engines": {
"node": ">=10"
}
},
"node_modules/mock-require": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/mock-require/-/mock-require-3.0.3.tgz",
@ -12960,6 +13325,21 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/p-locate": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dev": true,
"dependencies": {
"p-limit": "^3.0.2"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/p-map": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
@ -13290,6 +13670,15 @@
"node": ">=8"
}
},
"node_modules/pathval": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/pause": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz",
@ -17160,6 +17549,12 @@
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
"integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="
},
"node_modules/workerpool": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz",
"integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==",
"dev": true
},
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@ -17366,6 +17761,33 @@
"node": ">=12"
}
},
"node_modules/yargs-unparser": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
"integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
"dev": true,
"dependencies": {
"camelcase": "^6.0.0",
"decamelize": "^4.0.0",
"flat": "^5.0.2",
"is-plain-obj": "^2.1.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/yargs-unparser/node_modules/camelcase": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
"dev": true,
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/yauzl": {
"version": "2.10.0",
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
@ -19713,6 +20135,11 @@
"@types/node": "*"
}
},
"@types/chai": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.4.tgz",
"integrity": "sha512-KnRanxnpfpjUTqTCXslZSEdLfXExwgNxYPdiO2WGUj8+HDjFi8R3k5RVKPeSCzLjCcshCAtVO2QBbVuAV4kTnw=="
},
"@types/connect": {
"version": "3.4.35",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz",
@ -19883,6 +20310,11 @@
"integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==",
"dev": true
},
"@types/mocha": {
"version": "8.2.3",
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.2.3.tgz",
"integrity": "sha512-ekGvFhFgrc2zYQoX4JeZPmVzZxw6Dtllga7iGHzfbYIYkAMUx/sAFP2GdFpLff+vdHXu5fl7WX9AT+TtqYcsyw=="
},
"@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
@ -20842,6 +21274,12 @@
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
"integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw=="
},
"assertion-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"dev": true
},
"ast-types": {
"version": "0.13.4",
"resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz",
@ -21261,6 +21699,12 @@
"resolved": "https://registry.npmjs.org/browser-request/-/browser-request-0.3.3.tgz",
"integrity": "sha512-YyNI4qJJ+piQG6MMEuo7J3Bzaqssufx04zpEKYfSrl/1Op59HWali9zMtBpXnkmqMcOuWJPZvudrm9wISmnCbg=="
},
"browser-stdout": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
"dev": true
},
"browserify-aes": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz",
@ -21642,6 +22086,21 @@
"crc-32": "~1.2.0"
}
},
"chai": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz",
"integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==",
"dev": true,
"requires": {
"assertion-error": "^1.1.0",
"check-error": "^1.0.2",
"deep-eql": "^4.1.2",
"get-func-name": "^2.0.0",
"loupe": "^2.3.1",
"pathval": "^1.1.1",
"type-detect": "^4.0.5"
}
},
"chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@ -21668,6 +22127,12 @@
"resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
"integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA=="
},
"check-error": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
"integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==",
"dev": true
},
"chokidar": {
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
@ -22304,6 +22769,12 @@
"resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz",
"integrity": "sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw=="
},
"decamelize": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
"integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
"dev": true
},
"decimal.js": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz",
@ -22320,6 +22791,15 @@
"integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==",
"dev": true
},
"deep-eql": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz",
"integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==",
"dev": true,
"requires": {
"type-detect": "^4.0.0"
}
},
"deep-is": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
@ -23753,6 +24233,22 @@
}
}
},
"find-up": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dev": true,
"requires": {
"locate-path": "^6.0.0",
"path-exists": "^4.0.0"
}
},
"flat": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
"integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
"dev": true
},
"flat-cache": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
@ -24063,6 +24559,12 @@
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
"dev": true
},
"get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==",
"dev": true
},
"get-intrinsic": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
@ -24471,6 +24973,12 @@
"minimalistic-assert": "^1.0.1"
}
},
"he": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
"integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
"dev": true
},
"hexoid": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz",
@ -24933,6 +25441,12 @@
"resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
"integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w=="
},
"is-plain-obj": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
"integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
"dev": true
},
"is-potential-custom-element-name": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
@ -26021,6 +26535,15 @@
"integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==",
"dev": true
},
"locate-path": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dev": true,
"requires": {
"p-locate": "^5.0.0"
}
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
@ -26137,6 +26660,15 @@
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
},
"loupe": {
"version": "2.3.6",
"resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz",
"integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==",
"dev": true,
"requires": {
"get-func-name": "^2.0.0"
}
},
"lru_map": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/lru_map/-/lru_map-0.3.3.tgz",
@ -26536,6 +27068,171 @@
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.6.tgz",
"integrity": "sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A=="
},
"mocha": {
"version": "10.2.0",
"resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz",
"integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==",
"dev": true,
"requires": {
"ansi-colors": "4.1.1",
"browser-stdout": "1.3.1",
"chokidar": "3.5.3",
"debug": "4.3.4",
"diff": "5.0.0",
"escape-string-regexp": "4.0.0",
"find-up": "5.0.0",
"glob": "7.2.0",
"he": "1.2.0",
"js-yaml": "4.1.0",
"log-symbols": "4.1.0",
"minimatch": "5.0.1",
"ms": "2.1.3",
"nanoid": "3.3.3",
"serialize-javascript": "6.0.0",
"strip-json-comments": "3.1.1",
"supports-color": "8.1.1",
"workerpool": "6.2.1",
"yargs": "16.2.0",
"yargs-parser": "20.2.4",
"yargs-unparser": "2.0.0"
},
"dependencies": {
"ansi-colors": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
"integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
"dev": true
},
"argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"dev": true
},
"cliui": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
"dev": true,
"requires": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"wrap-ansi": "^7.0.0"
}
},
"diff": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
"integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
"dev": true
},
"glob": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
"integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"dependencies": {
"minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
}
}
},
"js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dev": true,
"requires": {
"argparse": "^2.0.1"
}
},
"minimatch": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
"dev": true,
"requires": {
"brace-expansion": "^2.0.1"
},
"dependencies": {
"brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0"
}
}
}
},
"ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true
},
"nanoid": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz",
"integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==",
"dev": true
},
"serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"requires": {
"randombytes": "^2.1.0"
}
},
"supports-color": {
"version": "8.1.1",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
"dev": true,
"requires": {
"has-flag": "^4.0.0"
}
},
"yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
"integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
"dev": true,
"requires": {
"cliui": "^7.0.2",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.0",
"y18n": "^5.0.5",
"yargs-parser": "^20.2.2"
}
},
"yargs-parser": {
"version": "20.2.4",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
"integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
"dev": true
}
}
},
"mock-require": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/mock-require/-/mock-require-3.0.3.tgz",
@ -27332,6 +28029,15 @@
"yocto-queue": "^0.1.0"
}
},
"p-locate": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dev": true,
"requires": {
"p-limit": "^3.0.2"
}
},
"p-map": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
@ -27583,6 +28289,12 @@
"integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
"dev": true
},
"pathval": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
"dev": true
},
"pause": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz",
@ -30530,6 +31242,12 @@
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
"integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="
},
"workerpool": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz",
"integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==",
"dev": true
},
"wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@ -30671,6 +31389,26 @@
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
"dev": true
},
"yargs-unparser": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
"integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
"dev": true,
"requires": {
"camelcase": "^6.0.0",
"decamelize": "^4.0.0",
"flat": "^5.0.2",
"is-plain-obj": "^2.1.0"
},
"dependencies": {
"camelcase": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
"dev": true
}
}
},
"yauzl": {
"version": "2.10.0",
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",

8
packages/nocodb-nest/package.json

@ -18,9 +18,13 @@
"test:cov": "jest --coverage",
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
"test:e2e": "jest --config ./test/jest-e2e.json",
"watch:run:playwright": "rm -f ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\""
"watch:run:playwright": "rm -f ./test_noco.db; cross-env DATABASE_URL=sqlite:./test_noco.db PLAYWRIGHT_TEST=true NC_DISABLE_TELE=true EE=true nodemon -e ts,js -w ./src -x \"ts-node src/run/testDocker --log-error --project tsconfig.json\"",
"test:unit": "cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay",
"test:unit:pg": "cp tests/unit/.pg.env tests/unit/.env; cross-env TS_NODE_PROJECT=./tests/unit/tsconfig.json mocha -r ts-node/register tests/unit/index.test.ts --recursive --timeout 300000 --exit --delay"
},
"dependencies": {
"@types/chai": "^4.2.12",
"@types/mocha": "^8.0.1",
"@google-cloud/storage": "^5.7.2",
"@graphql-tools/merge": "^6.0.12",
"@nestjs/common": "^9.0.0",
@ -130,6 +134,7 @@
"@types/supertest": "^2.0.11",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"chai": "^4.2.0",
"cross-env": "^7.0.3",
"eslint": "^7.8.0",
"eslint-config-prettier": "^6.15.0",
@ -138,6 +143,7 @@
"eslint-plugin-import": "^2.25.2",
"eslint-plugin-prettier": "^4.0.0",
"jest": "29.5.0",
"mocha": "^10.1.0",
"nodemon": "^2.0.22",
"prettier": "^2.7.1",
"source-map-support": "^0.5.20",

16
packages/nocodb-nest/src/modules/global/global.module.ts

@ -0,0 +1,16 @@
import { Global, Module } from '@nestjs/common'
import { Connection } from '../../connection/connection'
import { MetaService } from '../../meta/meta.service'
@Global()
@Module({
providers:[
Connection,
MetaService
],
exports: [
Connection,
MetaService
]
})
export class GlobalModule {}

14
packages/nocodb-nest/tests/dockerize-mssql.sh

@ -0,0 +1,14 @@
apt-get update && apt-get install -y wget
wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
echo "waiting for MSSQL........... "
dockerize -wait http://$DOCKER_DB_HOST:80 -wait-retry-interval 5s -timeout 20m
echo "MSSQL is UP >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"

23
packages/nocodb-nest/tests/export-import/ReadMe.md

@ -0,0 +1,23 @@
## config.json
{
"srcProject": "sample",
"dstProject": "sample-copy",
"baseURL": "http://localhost:8080",
"xc-auth": "Copy Auth Token"
}
- baseURL & xc-auth are common configurations for both import & export
## Export
- `srcProject`: specify source project name to be exported.
- Export JSON file will be created as `srcProject.json`
- execute
`cd packages/nocodb/tests/export-import`
`node exportSchema.js`
## Import
- `srcProject`: specify JSON file name to be imported (sans .JSON suffix)
- `dstProject`: new project name to be imported as
- Data will also be imported if `srcProject` exists in NocoDB. Note that, data import isn't via exported JSON
- execute
`cd packages/nocodb/tests/export-import`
`node importSchema.js`

7
packages/nocodb-nest/tests/export-import/config.json

@ -0,0 +1,7 @@
{
"srcProject": "sample",
"dstProject": "sample-copy",
"excludeDt": true,
"baseURL": "http://localhost:8080",
"xc-auth": "Copy Auth Token"
}

276
packages/nocodb-nest/tests/export-import/exportSchema.js

@ -0,0 +1,276 @@
const Api = require('nocodb-sdk').Api;
const { UITypes } = require('nocodb-sdk');
const jsonfile = require('jsonfile');
const GRID = 3, GALLERY = 2, FORM = 1;
let ncMap = { /* id: name <string> */ };
let tblSchema = [];
let api = {};
let viewStore = { columns: {}, sort: {}, filter: {} };
let inputConfig = jsonfile.readFileSync(`config.json`)
let ncConfig = {
projectName: inputConfig.srcProject,
baseURL: inputConfig.baseURL,
headers: {
'xc-auth': `${inputConfig["xc-auth"]}`
}
};
// helper routines
// remove objects containing 0/ false/ null
// fixme: how to handle when cdf (default value) is configured as 0/ null/ false
function removeEmpty(obj) {
return Object.fromEntries(
Object.entries(obj)
.filter(([_, v]) => v != null && v != 0 && v != false)
.map(([k, v]) => [k, v === Object(v) ? removeEmpty(v) : v])
);
}
function addColumnSpecificData(c) {
// pick required fields to proceed further
let col;
if(inputConfig.excludeDt) {
col = removeEmpty(
(({ id, title, column_name, uidt, pk, pv, rqd, dtxp, system, ai }) => ({
id, title, column_name, uidt, pk, pv, rqd, dtxp, system, ai
}))(c)
);
} else {
col = removeEmpty(
(({ id, title, column_name, uidt, dt, pk, pv, rqd, dtxp, system, ai }) => ({
id, title, column_name, uidt, dt, pk, pv, rqd, dtxp, system, ai
}))(c)
);
}
switch (c.uidt) {
case UITypes.Formula:
col.formula = c.colOptions.formula;
col.formula_raw = c.colOptions.formula_raw;
break;
case UITypes.LinkToAnotherRecord:
col[`colOptions`] = {
fk_model_id: c.fk_model_id,
fk_related_model_id: c.colOptions.fk_related_model_id,
fk_child_column_id: c.colOptions.fk_child_column_id,
fk_parent_column_id: c.colOptions.fk_parent_column_id,
type: c.colOptions.type
};
break;
case UITypes.Lookup:
col[`colOptions`] = {
fk_model_id: c.fk_model_id,
fk_relation_column_id: c.colOptions.fk_relation_column_id,
fk_lookup_column_id: c.colOptions.fk_lookup_column_id
};
break;
case UITypes.Rollup:
col[`colOptions`] = {
fk_model_id: c.fk_model_id,
fk_relation_column_id: c.colOptions.fk_relation_column_id,
fk_rollup_column_id: c.colOptions.fk_rollup_column_id,
rollup_function: c.colOptions.rollup_function
};
break;
}
return col;
}
function addViewDetails(v) {
// pick required fields to proceed further
let view = (({ id, title, type, show_system_fields, lock_type, order }) => ({
id,
title,
type,
show_system_fields,
lock_type,
order
}))(v);
// form view
if (v.type === FORM) {
view.property = (({
heading,
subheading,
success_msg,
redirect_after_secs,
email,
submit_another_form,
show_blank_form
}) => ({
heading,
subheading,
success_msg,
redirect_after_secs,
email,
submit_another_form,
show_blank_form
}))(v.view);
}
// gallery view
else if (v.type === GALLERY) {
view.property = {
fk_cover_image_col_id: ncMap[v.view.fk_cover_image_col_id]
};
}
// gallery view doesn't share column information in api yet
if (v.type !== GALLERY) {
if (v.type === GRID)
view.columns = viewStore.columns[v.id].map(a =>
(({ id, width, order, show }) => ({ id, width, order, show }))(a)
);
if (v.type === FORM)
view.columns = viewStore.columns[v.id].map(a =>
(({ id, order, show, label, help, description, required }) => ({
id,
order,
show,
label,
help,
description,
required
}))(a)
);
for (let i = 0; i < view.columns?.length; i++)
view.columns[i].title = ncMap[viewStore.columns[v.id][i].id];
// skip hm & mm columns
view.columns = view.columns
?.filter(a => a.title?.includes('_nc_m2m_') === false)
.filter(a => a.title?.includes('nc_') === false);
}
// filter & sort configurations
if (v.type !== FORM) {
view.sort = viewStore.sort[v.id].map(a =>
(({ fk_column_id, direction, order }) => ({
fk_column_id,
direction,
order
}))(a)
);
view.filter = viewStore.filter[v.id].map(a =>
(({ fk_column_id, logical_op, comparison_op, value, order }) => ({
fk_column_id,
logical_op,
comparison_op,
value,
order
}))(a)
);
}
return view;
}
// view data stored as is for quick access
async function storeViewDetails(tableId) {
// read view data for each table
let viewList = await api.dbView.list(tableId);
for (let j = 0; j < viewList.list.length; j++) {
let v = viewList.list[j];
let viewDetails = [];
// invoke view specific read to populate columns information
if (v.type === FORM) viewDetails = (await api.dbView.formRead(v.id)).columns;
else if (v.type === GALLERY) viewDetails = await api.dbView.galleryRead(v.id);
else if (v.type === GRID) viewDetails = await api.dbView.gridColumnsList(v.id);
viewStore.columns[v.id] = viewDetails;
// populate sort information
let vSort = await api.dbTableSort.list(v.id);
viewStore.sort[v.id] = vSort.sorts.list;
let vFilter = await api.dbTableFilter.read(v.id);
viewStore.filter[v.id] = vFilter;
}
}
// mapping table for quick information access
// store maps for tableId, columnId, viewColumnId & viewId to their names
async function generateMapTbl(pId) {
const tblList = await api.dbTable.list(pId);
for (let i = 0; i < tblList.list.length; i++) {
let tblId = tblList.list[i].id;
let tbl = await api.dbTable.read(tblId);
// table ID <> name
ncMap[tblId] = tbl.title;
// column ID <> name
tbl.columns.map(x => (ncMap[x.id] = x.title));
// view ID <> name
tbl.views.map(x => (ncMap[x.id] = x.tn));
for (let i = 0; i < tbl.views.length; i++) {
let x = tbl.views[i];
let viewColumns = [];
if (x.type === FORM) viewColumns = (await api.dbView.formRead(x.id)).columns;
else if (x.type === GALLERY)
viewColumns = (await api.dbView.galleryRead(x.id)).columns;
else if (x.type === GRID) viewColumns = await api.dbView.gridColumnsList(x.id);
// view column ID <> name
viewColumns?.map(a => (ncMap[a.id] = ncMap[a.fk_column_id]));
}
}
}
// main
//
async function exportSchema() {
api = new Api(ncConfig);
// fetch project details (id et.al)
const x = await api.project.list();
const p = x.list.find(a => a.title === ncConfig.projectName);
await generateMapTbl(p.id);
// read project
const tblList = await api.dbTable.list(p.id);
// for each table
for (let i = 0; i < tblList.list.length; i++) {
let tblId = tblList.list[i].id;
await storeViewDetails(tblId);
let tbl = await api.dbTable.read(tblId);
// prepare schema
let tSchema = {
id: tbl.id,
title: tbl.title,
table_name: tbl?.table_name,
columns: [...tbl.columns.map(c => addColumnSpecificData(c))]
.filter(a => a.title.includes('_nc_m2m_') === false) // mm
.filter(a => a.title.includes(p.prefix) === false) // hm
.filter(
a => !(a?.system === 1 && a.uidt === UITypes.LinkToAnotherRecord)
),
views: [...tbl.views.map(v => addViewDetails(v))]
};
tblSchema.push(tSchema);
}
}
(async () => {
await exportSchema();
jsonfile.writeFileSync(
`${ncConfig.projectName.replace(/ /g, '_')}.json`,
tblSchema,
{ spaces: 2 }
);
})().catch(e => {
console.log(e);
});

515
packages/nocodb-nest/tests/export-import/importSchema.js

@ -0,0 +1,515 @@
// tbd
// - formula dependency list
// - nested lookup/ rollup
const Api = require('nocodb-sdk').Api;
const { UITypes } = require('nocodb-sdk');
const jsonfile = require('jsonfile');
let inputConfig = jsonfile.readFileSync(`config.json`)
let ncConfig = {
srcProject: inputConfig.srcProject,
projectName: inputConfig.dstProject,
baseURL: inputConfig.baseURL,
headers: {
'xc-auth': `${inputConfig["xc-auth"]}`
}
};
let ncIn = jsonfile.readFileSync(`${ncConfig.srcProject}.json`);
let api = {};
let ncProject = {};
let link = [];
let lookup = [];
let rollup = [];
let formula = [];
let rootLinks = [];
// maps v1 table ID, v2 table ID & table title to table schema
let ncTables = {};
async function createBaseTables() {
console.log(`createBaseTables`);
for (let i = 0; i < ncIn.length; i++) {
let tblSchema = ncIn[i];
let reducedColumnSet = tblSchema.columns.filter(
a =>
a.uidt !== UITypes.LinkToAnotherRecord &&
a.uidt !== UITypes.Lookup &&
a.uidt !== UITypes.Rollup &&
a.uidt !== UITypes.Formula
);
link.push(
...tblSchema.columns.filter(a => a.uidt === UITypes.LinkToAnotherRecord)
);
lookup.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Lookup));
rollup.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Rollup));
formula.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Formula));
formula.map(a => (a['table_id'] = tblSchema.id));
let tbl = await api.dbTable.create(ncProject.id, {
title: tblSchema.title,
table_name: tblSchema.title,
columns: reducedColumnSet.map(({ id, ...rest }) => ({ ...rest }))
});
ncTables[tbl.title] = tbl;
ncTables[tbl.id] = tbl;
ncTables[tblSchema.id] = tbl;
}
}
let linksCreated = [];
function isLinkCreated(pId, cId) {
let idx = linksCreated.findIndex(a => a.cId === pId && a.pId === cId);
if (idx === -1) {
linksCreated.push({ pId: pId, cId: cId });
return false;
}
return true;
}
// retrieve nc-view column ID from corresponding nc-column ID
async function nc_getViewColumnId(viewId, viewType, ncColumnId) {
// retrieve view Info
let viewDetails;
if (viewType === 'form')
viewDetails = (await api.dbView.formRead(viewId)).columns;
else if (viewType === 'gallery')
viewDetails = (await api.dbView.galleryRead(viewId)).columns;
else viewDetails = await api.dbView.gridColumnsList(viewId);
return viewDetails.find(x => x.fk_column_id === ncColumnId)?.id;
}
async function createFormula() {
for (let i = 0; i < formula.length; i++) {
let tbl = await api.dbTableColumn.create(ncTables[formula[i].table_id].id, {
uidt: UITypes.Formula,
title: formula[i].title,
formula_raw: formula[i].formula_raw
});
}
}
async function createLinks() {
console.log(`createLinks`);
for (let i = 0; i < link.length; i++) {
if (
(link[i].colOptions.type === 'mm' &&
false ===
isLinkCreated(
link[i].colOptions.fk_parent_column_id,
link[i].colOptions.fk_child_column_id
)) ||
link[i].colOptions.type === 'hm'
) {
let srcTbl = ncTables[link[i].colOptions.fk_model_id];
let dstTbl = ncTables[link[i].colOptions.fk_related_model_id];
// create link
let tbl = await api.dbTableColumn.create(srcTbl.id, {
uidt: UITypes.LinkToAnotherRecord,
title: link[i].title,
parentId: srcTbl.id,
childId: dstTbl.id,
type: link[i].colOptions.type
});
ncTables[tbl.title] = tbl;
ncTables[tbl.id] = tbl;
ncTables[link[i].colOptions.fk_model_id] = tbl;
// for data-link procedure later
rootLinks.push({ linkColumn: link[i], linkSrcTbl: srcTbl });
// symmetry field update
//
let v2ColSchema = tbl.columns.find(x => x.title === link[i].title);
// read related table again after link is created
dstTbl = await api.dbTable.read(dstTbl.id);
let v2SymmetricColumn =
link[i].colOptions.type === 'mm'
? dstTbl.columns.find(
x =>
x.uidt === UITypes.LinkToAnotherRecord &&
x?.colOptions.fk_parent_column_id ===
v2ColSchema.colOptions.fk_child_column_id &&
x?.colOptions.fk_child_column_id ===
v2ColSchema.colOptions.fk_parent_column_id
)
: dstTbl.columns.find(
x =>
x.uidt === UITypes.LinkToAnotherRecord &&
x?.colOptions.fk_parent_column_id ===
v2ColSchema.colOptions.fk_parent_column_id &&
x?.colOptions.fk_child_column_id ===
v2ColSchema.colOptions.fk_child_column_id
);
let v1SymmetricColumn =
link[i].colOptions.type === 'mm'
? link.find(
x =>
x.colOptions.fk_parent_column_id ===
link[i].colOptions.fk_child_column_id &&
x.colOptions.fk_child_column_id ===
link[i].colOptions.fk_parent_column_id &&
x.colOptions.type === 'mm'
)
: link.find(
x =>
x.colOptions.fk_parent_column_id ===
link[i].colOptions.fk_parent_column_id &&
x.colOptions.fk_child_column_id ===
link[i].colOptions.fk_child_column_id &&
x.colOptions.type === 'bt'
);
tbl = await api.dbTableColumn.update(v2SymmetricColumn.id, {
...v2SymmetricColumn,
title: v1SymmetricColumn.title,
column_name: null
});
ncTables[tbl.title] = tbl;
ncTables[tbl.id] = tbl;
ncTables[v1SymmetricColumn.colOptions.fk_model_id] = tbl;
}
}
}
function get_v2Id(v1ColId) {
for (let i = 0; i < ncIn.length; i++) {
let tblSchema = ncIn[i];
let colSchema = {};
if (
undefined !== (colSchema = tblSchema.columns.find(x => x.id === v1ColId))
) {
let colName = colSchema.title;
let v2Tbl = ncTables[tblSchema.id];
return v2Tbl.columns.find(y => y.title === colName)?.id;
}
}
}
async function createLookup() {
console.log(`createLookup`);
for (let i = 0; i < lookup.length; i++) {
let srcTbl = ncTables[lookup[i].colOptions.fk_model_id];
let v2_fk_relation_column_id = get_v2Id(
lookup[i].colOptions.fk_relation_column_id
);
let v2_lookup_column_id = get_v2Id(
lookup[i].colOptions.fk_lookup_column_id
);
if (v2_lookup_column_id) {
let tbl = await api.dbTableColumn.create(srcTbl.id, {
uidt: UITypes.Lookup,
title: lookup[i].title,
fk_relation_column_id: v2_fk_relation_column_id,
fk_lookup_column_id: v2_lookup_column_id
});
ncTables[tbl.title] = tbl;
ncTables[tbl.id] = tbl;
ncTables[lookup[i].colOptions.fk_model_id] = tbl;
}
}
}
async function createRollup() {
console.log(`createRollup`);
for (let i = 0; i < rollup.length; i++) {
let srcTbl = ncTables[rollup[i].colOptions.fk_model_id];
let v2_fk_relation_column_id = get_v2Id(
rollup[i].colOptions.fk_relation_column_id
);
let v2_rollup_column_id = get_v2Id(
rollup[i].colOptions.fk_rollup_column_id
);
if (v2_rollup_column_id) {
let tbl = await api.dbTableColumn.create(srcTbl.id, {
uidt: UITypes.Rollup,
title: rollup[i].title,
column_name: rollup[i].title,
fk_relation_column_id: v2_fk_relation_column_id,
fk_rollup_column_id: v2_rollup_column_id,
rollup_function: rollup[i].colOptions.rollup_function
});
ncTables[tbl.title] = tbl;
ncTables[tbl.id] = tbl;
ncTables[rollup[i].colOptions.fk_model_id] = tbl;
}
}
}
async function configureGrid() {
console.log(`configureGrid`);
for (let i = 0; i < ncIn.length; i++) {
let tblSchema = ncIn[i];
let tblId = ncTables[tblSchema.id].id;
let gridList = tblSchema.views.filter(a => a.type === 3);
let srcTbl = await api.dbTable.read(tblId);
const view = await api.dbView.list(tblId);
// create / rename view
for (let gridCnt = 0; gridCnt < gridList.length; gridCnt++) {
let viewCreated = {};
// rename first view; default view already created
if (gridCnt === 0) {
viewCreated = await api.dbView.update(view.list[0].id, {
title: gridList[gridCnt].title
});
}
// create new views
else {
viewCreated = await api.dbView.gridCreate(tblId, {
title: gridList[gridCnt].title
});
}
// retrieve view Info
let viewId = viewCreated.id;
let viewDetails = await api.dbView.gridColumnsList(viewId);
// column visibility
for (
let colCnt = 0;
colCnt < gridList[gridCnt].columns.length;
colCnt++
) {
let ncColumnId = srcTbl.columns.find(
a => a.title === gridList[gridCnt].columns[colCnt].title
)?.id;
// let ncViewColumnId = await nc_getViewColumnId( viewCreated.id, "grid", ncColumnId )
let ncViewColumnId = viewDetails.find(
x => x.fk_column_id === ncColumnId
)?.id;
// column order & visibility
await api.dbViewColumn.update(viewCreated.id, ncViewColumnId, {
show: gridList[gridCnt].columns[colCnt].show,
order: gridList[gridCnt].columns[colCnt].order
});
await api.dbView.gridColumnUpdate(ncViewColumnId, {
width: gridList[gridCnt].columns[colCnt].width
});
}
// sort
for (let sCnt = 0; sCnt < gridList[gridCnt].sort.length; sCnt++) {
let sColName = tblSchema.columns.find(
a => gridList[gridCnt].sort[sCnt].fk_column_id === a.id
).title;
await api.dbTableSort.create(viewId, {
fk_column_id: srcTbl.columns.find(a => a.title === sColName)?.id,
direction: gridList[gridCnt].sort[sCnt].direction
});
}
// filter
for (let fCnt = 0; fCnt < gridList[gridCnt].filter.length; fCnt++) {
let fColName = tblSchema.columns.find(
a => gridList[gridCnt].sort[fCnt].fk_column_id === a.id
).title;
await api.dbTableFilter.create(viewId, {
...gridList[gridCnt].filter[fCnt],
fk_column_id: srcTbl.columns.find(a => a.title === fColName)?.id
});
}
}
}
}
async function configureGallery() {
console.log(`configureGallery`);
for (let i = 0; i < ncIn.length; i++) {
let tblSchema = ncIn[i];
let tblId = ncTables[tblSchema.id].id;
let galleryList = tblSchema.views.filter(a => a.type === 2);
for (let cnt = 0; cnt < galleryList.length; cnt++) {
const viewCreated = await api.dbView.galleryCreate(tblId, {
title: galleryList[cnt].title
});
}
}
}
async function configureForm() {
console.log(`configureForm`);
for (let i = 0; i < ncIn.length; i++) {
let tblSchema = ncIn[i];
let tblId = ncTables[tblSchema.id].id;
let formList = tblSchema.views.filter(a => a.type === 1);
let srcTbl = await api.dbTable.read(tblId);
for (let formCnt = 0; formCnt < formList.length; formCnt++) {
const formData = {
title: formList[formCnt].title,
...formList[formCnt].property
};
const viewCreated = await api.dbView.formCreate(tblId, formData);
// column visibility
for (
let colCnt = 0;
colCnt < formList[formCnt].columns.length;
colCnt++
) {
let ncColumnId = srcTbl.columns.find(
a => a.title === formList[formCnt].columns[colCnt].title
)?.id;
let ncViewColumnId = await nc_getViewColumnId(
viewCreated.id,
'form',
ncColumnId
);
// column order & visibility
await api.dbView.formColumnUpdate(ncViewColumnId, {
show: formList[formCnt].columns[colCnt].show,
order: formList[formCnt].columns[colCnt].order,
label: formList[formCnt].columns[colCnt].label,
description: formList[formCnt].columns[colCnt].description,
required: formList[formCnt].columns[colCnt].required
});
}
}
}
}
async function restoreBaseData() {
console.log(`restoreBaseData`);
for (let i = 0; i < ncIn.length; i++) {
let tblSchema = ncIn[i];
let tblId = ncTables[tblSchema.id].id;
let pk = tblSchema.columns.find(a => a.pk).title;
let moreRecords = true;
let offset = 0,
limit = 25;
while (moreRecords) {
let recList = await api.dbTableRow.list(
'nc',
ncConfig.srcProject,
tblSchema.title,
{},
{
query: { limit: limit, offset: offset }
}
);
moreRecords = !recList.pageInfo.isLastPage;
offset += limit;
for (let recCnt = 0; recCnt < recList.list.length; recCnt++) {
let record = await api.dbTableRow.read(
'nc',
ncConfig.srcProject,
tblSchema.title,
recList.list[recCnt][pk]
);
// post-processing on the record
for (const [key, value] of Object.entries(record)) {
let table = ncTables[tblId];
// retrieve datatype
const dt = table.columns.find(x => x.title === key)?.uidt;
if (dt === UITypes.LinkToAnotherRecord) delete record[key];
if (dt === UITypes.Lookup) delete record[key];
if (dt === UITypes.Rollup) delete record[key];
}
await api.dbTableRow.create(
'nc',
ncConfig.projectName,
tblSchema.title,
record
);
}
}
}
}
async function restoreLinks() {
console.log(`restoreLinks`);
for (let i = 0; i < rootLinks.length; i++) {
let pk = rootLinks[i].linkSrcTbl.columns.find(a => a.pk).title;
let moreRecords = true;
let offset = 0,
limit = 25;
while (moreRecords) {
let recList = await api.dbTableRow.list(
'nc',
ncConfig.srcProject,
rootLinks[i].linkSrcTbl.title,
{},
{
query: { limit: limit, offset: offset }
}
);
moreRecords = !recList.pageInfo.isLastPage;
offset += limit;
for (let recCnt = 0; recCnt < recList.list.length; recCnt++) {
let record = await api.dbTableRow.read(
'nc',
ncConfig.srcProject,
rootLinks[i].linkSrcTbl.title,
recList.list[recCnt][pk]
);
let linkField = record[rootLinks[i].linkColumn.title];
if (linkField.length) {
await api.dbTableRow.nestedAdd(
'nc',
ncConfig.projectName,
rootLinks[i].linkSrcTbl.title,
record[pk],
rootLinks[i].linkColumn.colOptions.type,
encodeURIComponent(rootLinks[i].linkColumn.title),
linkField[0][pk]
);
}
}
}
}
}
async function importSchema() {
api = new Api(ncConfig);
const x = await api.project.list();
const p = x.list.find(a => a.title === ncConfig.projectName);
if (p) await api.project.delete(p.id);
ncProject = await api.project.create({ title: ncConfig.projectName });
await createBaseTables();
await createLinks();
await createLookup();
await createRollup();
await createFormula();
// configure views
await configureGrid();
await configureGallery();
await configureForm();
// restore data only if source project exists
const p2 = x.list.find(a => a.title === ncConfig.srcProject);
if (p2 !== undefined) {
await restoreBaseData();
await restoreLinks();
}
}
(async () => {
await importSchema();
console.log('completed');
})().catch(e => console.log(e));

21
packages/nocodb-nest/tests/mysql-dump/mysql-sakila-delete-data.sql

@ -0,0 +1,21 @@
-- Delete data
ALTER TABLE staff DROP FOREIGN KEY fk_staff_store , DROP FOREIGN KEY fk_staff_address;
DELETE FROM payment ;
DELETE FROM rental ;
DELETE FROM customer ;
DELETE FROM film_category ;
DELETE FROM film_text ;
DELETE FROM film_actor ;
DELETE FROM inventory ;
DELETE FROM film ;
DELETE FROM category ;
ALTER TABLE store CHANGE COLUMN manager_staff_id manager_staff_id TINYINT UNSIGNED NULL;
update store set manager_staff_id=null;
DELETE FROM staff ;
DELETE FROM store ;
DELETE FROM actor ;
DELETE FROM address ;
DELETE FROM city ;
DELETE FROM country ;
DELETE FROM language ;
ALTER TABLE store CHANGE COLUMN manager_staff_id manager_staff_id TINYINT UNSIGNED NOT NULL;

37
packages/nocodb-nest/tests/mysql-dump/mysql-sakila-drop-objects.sql

@ -0,0 +1,37 @@
-- Drop Views
DROP VIEW customer_list;
DROP VIEW film_list;
DROP VIEW nicer_but_slower_film_list;
DROP VIEW sales_by_film_category;
DROP VIEW sales_by_store;
DROP VIEW staff_list;
-- Drop Tables
DROP TABLE payment;
DROP TABLE rental;
DROP TABLE inventory;
DROP TABLE film_text;
DROP TABLE film_category;
DROP TABLE film_actor;
DROP TABLE film;
DROP TABLE language;
DROP TABLE customer;
DROP TABLE actor;
DROP TABLE category;
ALTER TABLE staff DROP FOREIGN KEY fk_staff_store , DROP FOREIGN KEY fk_staff_address;
DROP TABLE store;
DROP TABLE address;
DROP TABLE staff;
DROP TABLE city;
DROP TABLE country;
-- Procedures and views
drop procedure film_in_stock;
drop procedure film_not_in_stock;
drop function get_customer_balance;
drop function inventory_held_by_customer;
drop function inventory_in_stock;
drop procedure rewards_report;

231481
packages/nocodb-nest/tests/mysql-dump/mysql-sakila-insert-data.sql

File diff suppressed because it is too large Load Diff

643
packages/nocodb-nest/tests/mysql-dump/mysql-sakila-schema.sql

@ -0,0 +1,643 @@
-- Sakila Sample Database Schema
-- Version 0.8
-- Copyright (c) 2006, MySQL AB
-- All rights reserved.
-- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-- * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-- * Neither the name of MySQL AB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL';
DROP SCHEMA IF EXISTS sakila;
CREATE SCHEMA sakila;
USE sakila;
--
-- Table structure for table `actor`
--
CREATE TABLE actor (
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id),
KEY idx_actor_last_name (last_name)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `address`
--
CREATE TABLE address (
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id SMALLINT UNSIGNED NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (address_id),
KEY idx_fk_city_id (city_id),
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `category`
--
CREATE TABLE category (
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name VARCHAR(25) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (category_id)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `city`
--
CREATE TABLE city (
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
city VARCHAR(50) NOT NULL,
country_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (city_id),
KEY idx_fk_country_id (country_id),
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `country`
--
CREATE TABLE country (
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
country VARCHAR(50) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (country_id)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `customer`
--
CREATE TABLE customer (
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
store_id TINYINT UNSIGNED NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
create_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (customer_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
KEY idx_last_name (last_name),
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film`
--
CREATE TABLE film (
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
title VARCHAR(255) NOT NULL,
description TEXT DEFAULT NULL,
release_year YEAR DEFAULT NULL,
language_id TINYINT UNSIGNED NOT NULL,
original_language_id TINYINT UNSIGNED DEFAULT NULL,
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3,
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99,
length SMALLINT UNSIGNED DEFAULT NULL,
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99,
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G',
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id),
KEY idx_title (title),
KEY idx_fk_language_id (language_id),
KEY idx_fk_original_language_id (original_language_id),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film_actor`
--
CREATE TABLE film_actor (
actor_id SMALLINT UNSIGNED NOT NULL,
film_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id,film_id),
KEY idx_fk_film_id (`film_id`),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film_category`
--
CREATE TABLE film_category (
film_id SMALLINT UNSIGNED NOT NULL,
category_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film_text`
--
CREATE TABLE film_text (
film_id SMALLINT NOT NULL,
title VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (film_id),
FULLTEXT KEY idx_title_description (title,description)
)ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Triggers for loading film_text from film
--
DELIMITER ;;
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN
INSERT INTO film_text (film_id, title, description)
VALUES (new.film_id, new.title, new.description);
END;;
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN
IF (old.title != new.title) or (old.description != new.description)
THEN
UPDATE film_text
SET title=new.title,
description=new.description,
film_id=new.film_id
WHERE film_id=old.film_id;
END IF;
END;;
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN
DELETE FROM film_text WHERE film_id = old.film_id;
END;;
DELIMITER ;
--
-- Table structure for table `inventory`
--
CREATE TABLE inventory (
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT,
film_id SMALLINT UNSIGNED NOT NULL,
store_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (inventory_id),
KEY idx_fk_film_id (film_id),
KEY idx_store_id_film_id (store_id,film_id),
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `language`
--
CREATE TABLE language (
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name CHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (language_id)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `payment`
--
CREATE TABLE payment (
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
customer_id SMALLINT UNSIGNED NOT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (payment_id),
KEY idx_fk_staff_id (staff_id),
KEY idx_fk_customer_id (customer_id),
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `rental`
--
CREATE TABLE rental (
rental_id INT NOT NULL AUTO_INCREMENT,
rental_date DATETIME NOT NULL,
inventory_id MEDIUMINT UNSIGNED NOT NULL,
customer_id SMALLINT UNSIGNED NOT NULL,
return_date DATETIME DEFAULT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rental_id),
UNIQUE KEY (rental_date,inventory_id,customer_id),
KEY idx_fk_inventory_id (inventory_id),
KEY idx_fk_customer_id (customer_id),
KEY idx_fk_staff_id (staff_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `staff`
--
CREATE TABLE staff (
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
picture MEDIUMBLOB DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id TINYINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) BINARY DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (staff_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `store`
--
CREATE TABLE store (
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
manager_staff_id TINYINT UNSIGNED NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (store_id),
UNIQUE KEY idx_unique_manager (manager_staff_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- View structure for view `customer_list`
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`,
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8'active',_utf8'') AS notes, cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `film_list`
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8' ', actor.last_name) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id;
--
-- View structure for view `nicer_but_slower_film_list`
--
CREATE VIEW nicer_but_slower_film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)),
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)),
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id;
--
-- View structure for view `staff_list`
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone,
city.city AS city, country.country AS country, s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `sales_by_store`
--
CREATE VIEW sales_by_store
AS
SELECT
CONCAT(c.city, _utf8',', cy.country) AS store
, CONCAT(m.first_name, _utf8' ', m.last_name) AS manager
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY s.store_id
ORDER BY cy.country, c.city;
--
-- View structure for view `sales_by_film_category`
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
ORDER BY total_sales DESC;
--
-- View structure for view `actor_info`
--
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM sakila.film f
INNER JOIN sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM sakila.actor a
LEFT JOIN sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
--
-- Procedure structure for procedure `rewards_report`
--
DELIMITER //
CREATE PROCEDURE rewards_report (
IN min_monthly_purchases TINYINT UNSIGNED
, IN min_dollar_amount_purchased DECIMAL(10,2) UNSIGNED
, OUT count_rewardees INT
)
LANGUAGE SQL
NOT DETERMINISTIC
READS SQL DATA
SQL SECURITY DEFINER
COMMENT 'Provides a customizable report on best customers'
proc: BEGIN
DECLARE last_month_start DATE;
DECLARE last_month_end DATE;
/* Some sanity checks... */
IF min_monthly_purchases = 0 THEN
SELECT 'Minimum monthly purchases parameter must be > 0';
LEAVE proc;
END IF;
IF min_dollar_amount_purchased = 0.00 THEN
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00';
LEAVE proc;
END IF;
/* Determine start and end time periods */
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH);
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d');
SET last_month_end = LAST_DAY(last_month_start);
/*
Create a temporary storage area for
Customer IDs.
*/
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY);
/*
Find all customers meeting the
monthly purchase requirements
*/
INSERT INTO tmpCustomer (customer_id)
SELECT p.customer_id
FROM payment AS p
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end
GROUP BY customer_id
HAVING SUM(p.amount) > min_dollar_amount_purchased
AND COUNT(customer_id) > min_monthly_purchases;
/* Populate OUT parameter with count of found customers */
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees;
/*
Output ALL customer information of matching rewardees.
Customize output as needed.
*/
SELECT c.*
FROM tmpCustomer AS t
INNER JOIN customer AS c ON t.customer_id = c.customer_id;
/* Clean up */
DROP TABLE tmpCustomer;
END //
DELIMITER ;
DELIMITER $$
CREATE FUNCTION get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2)
DETERMINISTIC
READS SQL DATA
BEGIN
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS:
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees
FROM film, inventory, rental
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration,
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees
FROM rental, inventory, film
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments
FROM payment
WHERE payment.payment_date <= p_effective_date
AND payment.customer_id = p_customer_id;
RETURN v_rentfees + v_overfees - v_payments;
END $$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND inventory_in_stock(inventory_id);
SELECT FOUND_ROWS() INTO p_film_count;
END $$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND NOT inventory_in_stock(inventory_id);
SELECT FOUND_ROWS() INTO p_film_count;
END $$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION inventory_held_by_customer(p_inventory_id INT) RETURNS INT
READS SQL DATA
BEGIN
DECLARE v_customer_id INT;
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL;
SELECT customer_id INTO v_customer_id
FROM rental
WHERE return_date IS NULL
AND inventory_id = p_inventory_id;
RETURN v_customer_id;
END $$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN
READS SQL DATA
BEGIN
DECLARE v_rentals INT;
DECLARE v_out INT;
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED
SELECT COUNT(*) INTO v_rentals
FROM rental
WHERE inventory_id = p_inventory_id;
IF v_rentals = 0 THEN
RETURN TRUE;
END IF;
SELECT COUNT(rental_id) INTO v_out
FROM inventory LEFT JOIN rental USING(inventory_id)
WHERE inventory.inventory_id = p_inventory_id
AND rental.return_date IS NULL;
IF v_out > 0 THEN
RETURN FALSE;
ELSE
RETURN TRUE;
END IF;
END $$
DELIMITER ;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;

642
packages/nocodb-nest/tests/mysql-sakila-db/01-mysql-sakila-schema.sql

@ -0,0 +1,642 @@
-- Sakila Sample Database Schema
-- Version 0.8
-- Copyright (c) 2006, MySQL AB
-- All rights reserved.
-- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-- * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-- * Neither the name of MySQL AB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL';
DROP SCHEMA IF EXISTS sakila;
CREATE SCHEMA sakila;
USE sakila;
--
-- Table structure for table `actor`
--
CREATE TABLE actor (
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id),
KEY idx_actor_last_name (last_name)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `address`
--
CREATE TABLE address (
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id SMALLINT UNSIGNED NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (address_id),
KEY idx_fk_city_id (city_id),
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `category`
--
CREATE TABLE category (
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name VARCHAR(25) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (category_id)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `city`
--
CREATE TABLE city (
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
city VARCHAR(50) NOT NULL,
country_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (city_id),
KEY idx_fk_country_id (country_id),
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `country`
--
CREATE TABLE country (
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
country VARCHAR(50) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (country_id)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `customer`
--
CREATE TABLE customer (
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
store_id TINYINT UNSIGNED NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
create_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (customer_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
KEY idx_last_name (last_name),
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film`
--
CREATE TABLE film (
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
title VARCHAR(255) NOT NULL,
description TEXT DEFAULT NULL,
release_year YEAR DEFAULT NULL,
language_id TINYINT UNSIGNED NOT NULL,
original_language_id TINYINT UNSIGNED DEFAULT NULL,
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3,
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99,
length SMALLINT UNSIGNED DEFAULT NULL,
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99,
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G',
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id),
KEY idx_title (title),
KEY idx_fk_language_id (language_id),
KEY idx_fk_original_language_id (original_language_id),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film_actor`
--
CREATE TABLE film_actor (
actor_id SMALLINT UNSIGNED NOT NULL,
film_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id,film_id),
KEY idx_fk_film_id (`film_id`),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film_category`
--
CREATE TABLE film_category (
film_id SMALLINT UNSIGNED NOT NULL,
category_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `film_text`
--
CREATE TABLE film_text (
film_id SMALLINT NOT NULL,
title VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (film_id),
FULLTEXT KEY idx_title_description (title,description)
)ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Triggers for loading film_text from film
--
DELIMITER ;;
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN
INSERT INTO film_text (film_id, title, description)
VALUES (new.film_id, new.title, new.description);
END;;
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN
IF (old.title != new.title) or (old.description != new.description)
THEN
UPDATE film_text
SET title=new.title,
description=new.description,
film_id=new.film_id
WHERE film_id=old.film_id;
END IF;
END;;
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN
DELETE FROM film_text WHERE film_id = old.film_id;
END;;
DELIMITER ;
--
-- Table structure for table `inventory`
--
CREATE TABLE inventory (
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT,
film_id SMALLINT UNSIGNED NOT NULL,
store_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (inventory_id),
KEY idx_fk_film_id (film_id),
KEY idx_store_id_film_id (store_id,film_id),
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `language`
--
CREATE TABLE language (
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name CHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (language_id)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `payment`
--
CREATE TABLE payment (
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
customer_id SMALLINT UNSIGNED NOT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (payment_id),
KEY idx_fk_staff_id (staff_id),
KEY idx_fk_customer_id (customer_id),
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `rental`
--
CREATE TABLE rental (
rental_id INT NOT NULL AUTO_INCREMENT,
rental_date DATETIME NOT NULL,
inventory_id MEDIUMINT UNSIGNED NOT NULL,
customer_id SMALLINT UNSIGNED NOT NULL,
return_date DATETIME DEFAULT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rental_id),
UNIQUE KEY (rental_date,inventory_id,customer_id),
KEY idx_fk_inventory_id (inventory_id),
KEY idx_fk_customer_id (customer_id),
KEY idx_fk_staff_id (staff_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `staff`
--
CREATE TABLE staff (
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
picture BLOB DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id TINYINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) BINARY DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (staff_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `store`
--
CREATE TABLE store (
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
manager_staff_id TINYINT UNSIGNED NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (store_id),
UNIQUE KEY idx_unique_manager (manager_staff_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- View structure for view `customer_list`
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`,
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8'active',_utf8'') AS notes, cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `film_list`
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8' ', actor.last_name) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id;
--
-- View structure for view `nicer_but_slower_film_list`
--
CREATE VIEW nicer_but_slower_film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)),
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)),
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id;
--
-- View structure for view `staff_list`
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone,
city.city AS city, country.country AS country, s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `sales_by_store`
--
CREATE VIEW sales_by_store
AS
SELECT
CONCAT(c.city, _utf8',', cy.country) AS store
, CONCAT(m.first_name, _utf8' ', m.last_name) AS manager
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY s.store_id
ORDER BY cy.country, c.city;
--
-- View structure for view `sales_by_film_category`
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
ORDER BY total_sales DESC;
--
-- View structure for view `actor_info`
--
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM sakila.film f
INNER JOIN sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM sakila.actor a
LEFT JOIN sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
--
-- Procedure structure for procedure `rewards_report`
--
DELIMITER //
CREATE PROCEDURE rewards_report (
IN min_monthly_purchases TINYINT UNSIGNED
, IN min_dollar_amount_purchased DECIMAL(10,2) UNSIGNED
, OUT count_rewardees INT
)
LANGUAGE SQL
NOT DETERMINISTIC
READS SQL DATA
SQL SECURITY DEFINER
COMMENT 'Provides a customizable report on best customers'
proc: BEGIN
DECLARE last_month_start DATE;
DECLARE last_month_end DATE;
/* Some sanity checks... */
IF min_monthly_purchases = 0 THEN
SELECT 'Minimum monthly purchases parameter must be > 0';
LEAVE proc;
END IF;
IF min_dollar_amount_purchased = 0.00 THEN
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00';
LEAVE proc;
END IF;
/* Determine start and end time periods */
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH);
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d');
SET last_month_end = LAST_DAY(last_month_start);
/*
Create a temporary storage area for
Customer IDs.
*/
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY);
/*
Find all customers meeting the
monthly purchase requirements
*/
INSERT INTO tmpCustomer (customer_id)
SELECT p.customer_id
FROM payment AS p
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end
GROUP BY customer_id
HAVING SUM(p.amount) > min_dollar_amount_purchased
AND COUNT(customer_id) > min_monthly_purchases;
/* Populate OUT parameter with count of found customers */
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees;
/*
Output ALL customer information of matching rewardees.
Customize output as needed.
*/
SELECT c.*
FROM tmpCustomer AS t
INNER JOIN customer AS c ON t.customer_id = c.customer_id;
/* Clean up */
DROP TABLE tmpCustomer;
END //
DELIMITER ;
DELIMITER $$
CREATE FUNCTION get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2)
DETERMINISTIC
READS SQL DATA
BEGIN
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS:
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees
FROM film, inventory, rental
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration,
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees
FROM rental, inventory, film
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments
FROM payment
WHERE payment.payment_date <= p_effective_date
AND payment.customer_id = p_customer_id;
RETURN v_rentfees + v_overfees - v_payments;
END $$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND inventory_in_stock(inventory_id);
SELECT FOUND_ROWS() INTO p_film_count;
END $$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND NOT inventory_in_stock(inventory_id);
SELECT FOUND_ROWS() INTO p_film_count;
END $$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION inventory_held_by_customer(p_inventory_id INT) RETURNS INT
READS SQL DATA
BEGIN
DECLARE v_customer_id INT;
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL;
SELECT customer_id INTO v_customer_id
FROM rental
WHERE return_date IS NULL
AND inventory_id = p_inventory_id;
RETURN v_customer_id;
END $$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN
READS SQL DATA
BEGIN
DECLARE v_rentals INT;
DECLARE v_out INT;
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED
SELECT COUNT(*) INTO v_rentals
FROM rental
WHERE inventory_id = p_inventory_id;
IF v_rentals = 0 THEN
RETURN TRUE;
END IF;
SELECT COUNT(rental_id) INTO v_out
FROM inventory LEFT JOIN rental USING(inventory_id)
WHERE inventory.inventory_id = p_inventory_id
AND rental.return_date IS NULL;
IF v_out > 0 THEN
RETURN FALSE;
ELSE
RETURN TRUE;
END IF;
END $$
DELIMITER ;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;

165
packages/nocodb-nest/tests/mysql-sakila-db/02-mysql-sakila-insert-data.sql

File diff suppressed because one or more lines are too long

658
packages/nocodb-nest/tests/mysql-sakila-db/03-test-sakila-schema.sql

@ -0,0 +1,658 @@
-- Sakila Sample Database Schema
-- Version 1.2
-- Copyright (c) 2006, 2019, Oracle and/or its affiliates.
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of Oracle nor the names of its contributors may be used
-- to endorse or promote products derived from this software without
-- specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
-- IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-- THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-- PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-- CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
SET NAMES utf8mb4;
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL';
DROP SCHEMA IF EXISTS test_sakila;
CREATE SCHEMA test_sakila;
USE test_sakila;
--
-- Table structure for table `actor`
--
CREATE TABLE actor (
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id),
KEY idx_actor_last_name (last_name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `address`
--
CREATE TABLE address (
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id SMALLINT UNSIGNED NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
-- Add GEOMETRY column for MySQL 5.7.5 and higher
-- Also include SRID attribute for MySQL 8.0.3 and higher
/*!50705 location GEOMETRY */ /*!80003 SRID 0 */ /*!50705 NOT NULL,*/
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (address_id),
KEY idx_fk_city_id (city_id),
/*!50705 SPATIAL KEY `idx_location` (location),*/
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `category`
--
CREATE TABLE category (
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name VARCHAR(25) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (category_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `city`
--
CREATE TABLE city (
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
city VARCHAR(50) NOT NULL,
country_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (city_id),
KEY idx_fk_country_id (country_id),
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `country`
--
CREATE TABLE country (
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
country VARCHAR(50) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (country_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `customer`
--
CREATE TABLE customer (
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
store_id TINYINT UNSIGNED NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
create_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (customer_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
KEY idx_last_name (last_name),
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film`
--
CREATE TABLE film (
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
title VARCHAR(128) NOT NULL,
description TEXT DEFAULT NULL,
release_year YEAR DEFAULT NULL,
language_id TINYINT UNSIGNED NOT NULL,
original_language_id TINYINT UNSIGNED DEFAULT NULL,
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3,
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99,
length SMALLINT UNSIGNED DEFAULT NULL,
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99,
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G',
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id),
KEY idx_title (title),
KEY idx_fk_language_id (language_id),
KEY idx_fk_original_language_id (original_language_id),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film_actor`
--
CREATE TABLE film_actor (
actor_id SMALLINT UNSIGNED NOT NULL,
film_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (actor_id,film_id),
KEY idx_fk_film_id (`film_id`),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film_category`
--
CREATE TABLE film_category (
film_id SMALLINT UNSIGNED NOT NULL,
category_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `film_text`
--
-- InnoDB added FULLTEXT support in 5.6.10. If you use an
-- earlier version, then consider upgrading (recommended) or
-- changing InnoDB to MyISAM as the film_text engine
--
-- Use InnoDB for film_text as of 5.6.10, MyISAM prior to 5.6.10.
SET @old_default_storage_engine = @@default_storage_engine;
SET @@default_storage_engine = 'MyISAM';
/*!50610 SET @@default_storage_engine = 'InnoDB'*/;
CREATE TABLE film_text (
film_id SMALLINT NOT NULL,
title VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (film_id),
FULLTEXT KEY idx_title_description (title,description)
) DEFAULT CHARSET=utf8mb4;
SET @@default_storage_engine = @old_default_storage_engine;
--
-- Triggers for loading film_text from film
--
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN
INSERT INTO film_text (film_id, title, description)
VALUES (new.film_id, new.title, new.description);
END;
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN
IF (old.title != new.title) OR (old.description != new.description) OR (old.film_id != new.film_id)
THEN
UPDATE film_text
SET title=new.title,
description=new.description,
film_id=new.film_id
WHERE film_id=old.film_id;
END IF;
END;
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN
DELETE FROM film_text WHERE film_id = old.film_id;
END;
--
-- Table structure for table `inventory`
--
CREATE TABLE inventory (
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT,
film_id SMALLINT UNSIGNED NOT NULL,
store_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (inventory_id),
KEY idx_fk_film_id (film_id),
KEY idx_store_id_film_id (store_id,film_id),
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `language`
--
CREATE TABLE language (
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name CHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (language_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `payment`
--
CREATE TABLE payment (
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
customer_id SMALLINT UNSIGNED NOT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date DATETIME NOT NULL,
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (payment_id),
KEY idx_fk_staff_id (staff_id),
KEY idx_fk_customer_id (customer_id),
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `rental`
--
CREATE TABLE rental (
rental_id INT NOT NULL AUTO_INCREMENT,
rental_date DATETIME NOT NULL,
inventory_id MEDIUMINT UNSIGNED NOT NULL,
customer_id SMALLINT UNSIGNED NOT NULL,
return_date DATETIME DEFAULT NULL,
staff_id TINYINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rental_id),
UNIQUE KEY (rental_date,inventory_id,customer_id),
KEY idx_fk_inventory_id (inventory_id),
KEY idx_fk_customer_id (customer_id),
KEY idx_fk_staff_id (staff_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `staff`
--
CREATE TABLE staff (
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
picture BLOB DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id TINYINT UNSIGNED NOT NULL,
active BOOLEAN NOT NULL DEFAULT TRUE,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (staff_id),
KEY idx_fk_store_id (store_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Table structure for table `store`
--
CREATE TABLE store (
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
manager_staff_id TINYINT UNSIGNED NOT NULL,
address_id SMALLINT UNSIGNED NOT NULL,
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (store_id),
UNIQUE KEY idx_unique_manager (manager_staff_id),
KEY idx_fk_address_id (address_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- View structure for view `customer_list`
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8mb4' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`,
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8mb4'active',_utf8mb4'') AS notes, cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `film_list`
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8mb4' ', actor.last_name) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id, category.name;
--
-- View structure for view `nicer_but_slower_film_list`
--
CREATE VIEW nicer_but_slower_film_list
AS
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price,
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)),
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8mb4' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)),
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GROUP BY film.film_id, category.name;
--
-- View structure for view `staff_list`
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8mb4' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone,
city.city AS city, country.country AS country, s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id;
--
-- View structure for view `sales_by_store`
--
CREATE VIEW sales_by_store
AS
SELECT
CONCAT(c.city, _utf8mb4',', cy.country) AS store
, CONCAT(m.first_name, _utf8mb4' ', m.last_name) AS manager
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY s.store_id
ORDER BY cy.country, c.city;
--
-- View structure for view `sales_by_film_category`
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
ORDER BY total_sales DESC;
--
-- View structure for view `actor_info`
--
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM test_sakila.film f
INNER JOIN test_sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN test_sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM test_sakila.actor a
LEFT JOIN test_sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN test_sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN test_sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
--
-- Procedure structure for procedure `rewards_report`
--
CREATE PROCEDURE rewards_report (
IN min_monthly_purchases TINYINT UNSIGNED
, IN min_dollar_amount_purchased DECIMAL(10,2)
, OUT count_rewardees INT
)
LANGUAGE SQL
NOT DETERMINISTIC
READS SQL DATA
SQL SECURITY DEFINER
COMMENT 'Provides a customizable report on best customers'
proc: BEGIN
DECLARE last_month_start DATE;
DECLARE last_month_end DATE;
/* Some sanity checks... */
IF min_monthly_purchases = 0 THEN
SELECT 'Minimum monthly purchases parameter must be > 0';
LEAVE proc;
END IF;
IF min_dollar_amount_purchased = 0.00 THEN
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00';
LEAVE proc;
END IF;
/* Determine start and end time periods */
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH);
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d');
SET last_month_end = LAST_DAY(last_month_start);
/*
Create a temporary storage area for
Customer IDs.
*/
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY);
/*
Find all customers meeting the
monthly purchase requirements
*/
INSERT INTO tmpCustomer (customer_id)
SELECT p.customer_id
FROM payment AS p
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end
GROUP BY customer_id
HAVING SUM(p.amount) > min_dollar_amount_purchased
AND COUNT(customer_id) > min_monthly_purchases;
/* Populate OUT parameter with count of found customers */
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees;
/*
Output ALL customer information of matching rewardees.
Customize output as needed.
*/
SELECT c.*
FROM tmpCustomer AS t
INNER JOIN customer AS c ON t.customer_id = c.customer_id;
/* Clean up */
DROP TABLE tmpCustomer;
END;
CREATE FUNCTION get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2)
DETERMINISTIC
READS SQL DATA
BEGIN
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS:
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees
FROM film, inventory, rental
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration,
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees
FROM rental, inventory, film
WHERE film.film_id = inventory.film_id
AND inventory.inventory_id = rental.inventory_id
AND rental.rental_date <= p_effective_date
AND rental.customer_id = p_customer_id;
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments
FROM payment
WHERE payment.payment_date <= p_effective_date
AND payment.customer_id = p_customer_id;
RETURN v_rentfees + v_overfees - v_payments;
END;
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND inventory_in_stock(inventory_id);
SELECT COUNT(*)
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND inventory_in_stock(inventory_id)
INTO p_film_count;
END;
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
READS SQL DATA
BEGIN
SELECT inventory_id
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND NOT inventory_in_stock(inventory_id);
SELECT COUNT(*)
FROM inventory
WHERE film_id = p_film_id
AND store_id = p_store_id
AND NOT inventory_in_stock(inventory_id)
INTO p_film_count;
END;
CREATE FUNCTION inventory_held_by_customer(p_inventory_id INT) RETURNS INT
READS SQL DATA
BEGIN
DECLARE v_customer_id INT;
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL;
SELECT customer_id INTO v_customer_id
FROM rental
WHERE return_date IS NULL
AND inventory_id = p_inventory_id;
RETURN v_customer_id;
END;
CREATE FUNCTION inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN
READS SQL DATA
BEGIN
DECLARE v_rentals INT;
DECLARE v_out INT;
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED
SELECT COUNT(*) INTO v_rentals
FROM rental
WHERE inventory_id = p_inventory_id;
IF v_rentals = 0 THEN
RETURN TRUE;
END IF;
SELECT COUNT(rental_id) INTO v_out
FROM inventory LEFT JOIN rental USING(inventory_id)
WHERE inventory.inventory_id = p_inventory_id
AND rental.return_date IS NULL;
IF v_out > 0 THEN
RETURN FALSE;
ELSE
RETURN TRUE;
END IF;
END;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;

46449
packages/nocodb-nest/tests/mysql-sakila-db/04-test-sakila-data.sql

File diff suppressed because one or more lines are too long

4759
packages/nocodb-nest/tests/pg-cy-quick/01-cy-quick.sql

File diff suppressed because it is too large Load Diff

1711
packages/nocodb-nest/tests/pg-sakila-db/01-postgres-sakila-schema.sql

File diff suppressed because it is too large Load Diff

231654
packages/nocodb-nest/tests/pg-sakila-db/02-postgres-sakila-insert-data.sql

File diff suppressed because it is too large Load Diff

1710
packages/nocodb-nest/tests/pg-sakila-db/03-postgres-sakila-schema.sql

File diff suppressed because it is too large Load Diff

46702
packages/nocodb-nest/tests/pg-sakila-db/04-postgres-sakila-insert-data.sql

File diff suppressed because it is too large Load Diff

504
packages/nocodb-nest/tests/sql-server-sakila-db/01-sql-server-sakila-schema.sql

@ -0,0 +1,504 @@
/*
Sakila for Microsoft SQL Server is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
This project is designed to help database administrators to decide which database to use for development of new products
The user can run the same SQL against different kind of databases and compare the performance
License: BSD
Copyright DB Software Laboratory
http://www.etl-tools.com
*/
CREATE DATABASE sakila;
GO
USE sakila;
--
-- Table structure for table actor
--
CREATE TABLE actor (
actor_id int NOT NULL IDENTITY ,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (actor_id)
)
GO
ALTER TABLE actor ADD CONSTRAINT [DF_actor_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_actor_last_name ON actor(last_name)
GO
--
-- Table structure for table country
--
CREATE TABLE country (
country_id SMALLINT NOT NULL IDENTITY ,
country VARCHAR(50) NOT NULL,
last_update DATETIME,
PRIMARY KEY NONCLUSTERED (country_id)
)
GO
ALTER TABLE country ADD CONSTRAINT [DF_country_last_update] DEFAULT (getdate()) FOR last_update
GO
--
-- Table structure for table city
--
CREATE TABLE city (
city_id int NOT NULL IDENTITY ,
city VARCHAR(50) NOT NULL,
country_id SMALLINT NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (city_id),
CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
GO
ALTER TABLE city ADD CONSTRAINT [DF_city_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_country_id ON city(country_id)
GO
--
-- Table structure for table address
--
CREATE TABLE address (
address_id int NOT NULL IDENTITY ,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id INT NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (address_id)
)
GO
ALTER TABLE address ADD CONSTRAINT [DF_address_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_city_id ON address(city_id)
GO
ALTER TABLE address ADD CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE
GO
--
-- Table structure for table language
--
CREATE TABLE language (
language_id TINYINT NOT NULL IDENTITY,
name CHAR(20) NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (language_id)
)
GO
ALTER TABLE language ADD CONSTRAINT [DF_language_last_update] DEFAULT (getdate()) FOR last_update
GO
--
-- Table structure for table category
--
CREATE TABLE category (
category_id TINYINT NOT NULL IDENTITY,
name VARCHAR(25) NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (category_id)
)
GO
ALTER TABLE category ADD CONSTRAINT [DF_category_last_update] DEFAULT (getdate()) FOR last_update
GO
--
-- Table structure for table customer
--
CREATE TABLE customer (
customer_id INT NOT NULL IDENTITY ,
store_id INT NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id INT NOT NULL,
active CHAR(1) NOT NULL DEFAULT 'Y',
create_date DATETIME NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (customer_id),
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
GO
ALTER TABLE customer ADD CONSTRAINT [DF_customer_last_update] DEFAULT (getdate()) FOR last_update
GO
ALTER TABLE customer ADD CONSTRAINT [DF_customer_create_date] DEFAULT (getdate()) FOR create_date
GO
CREATE INDEX idx_fk_store_id ON customer(store_id)
GO
CREATE INDEX idx_fk_address_id ON customer(address_id)
GO
CREATE INDEX idx_last_name ON customer(last_name)
GO
--
-- Table structure for table film
--
CREATE TABLE film (
film_id int NOT NULL IDENTITY ,
title VARCHAR(255) NOT NULL,
description TEXT DEFAULT NULL,
release_year VARCHAR(4) NULL,
language_id TINYINT NOT NULL,
original_language_id TINYINT DEFAULT NULL,
rental_duration TINYINT NOT NULL DEFAULT 3,
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99,
length SMALLINT DEFAULT NULL,
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99,
rating VARCHAR(10) DEFAULT 'G',
special_features VARCHAR(255) DEFAULT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (film_id),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id)
)
GO
ALTER TABLE film ADD CONSTRAINT CHECK_special_features CHECK(special_features is null or
special_features like '%Trailers%' or
special_features like '%Commentaries%' or
special_features like '%Deleted Scenes%' or
special_features like '%Behind the Scenes%')
GO
ALTER TABLE film ADD CONSTRAINT CHECK_special_rating CHECK(rating in ('G','PG','PG-13','R','NC-17'))
GO
ALTER TABLE film ADD CONSTRAINT [DF_film_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_language_id ON film(language_id)
GO
CREATE INDEX idx_fk_original_language_id ON film(original_language_id)
GO
--
-- Table structure for table film_actor
--
CREATE TABLE film_actor (
actor_id INT NOT NULL,
film_id INT NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (actor_id,film_id),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
GO
ALTER TABLE film_actor ADD CONSTRAINT [DF_film_actor_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id)
GO
CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id)
GO
--
-- Table structure for table film_category
--
CREATE TABLE film_category (
film_id INT NOT NULL,
category_id TINYINT NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
GO
ALTER TABLE film_category ADD CONSTRAINT [DF_film_category_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_film_category_film ON film_category(film_id)
GO
CREATE INDEX idx_fk_film_category_category ON film_category(category_id)
GO
--
-- Table structure for table film_text
--
CREATE TABLE film_text (
film_id SMALLINT NOT NULL,
title VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY NONCLUSTERED (film_id),
)
--
-- Table structure for table inventory
--
CREATE TABLE inventory (
inventory_id INT NOT NULL IDENTITY,
film_id INT NOT NULL,
store_id INT NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (inventory_id),
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
GO
ALTER TABLE inventory ADD CONSTRAINT [DF_inventory_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_film_id ON inventory(film_id)
GO
CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id)
GO
--
-- Table structure for table staff
--
CREATE TABLE staff (
staff_id TINYINT NOT NULL IDENTITY,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id INT NOT NULL,
picture IMAGE DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id INT NOT NULL,
active BIT NOT NULL DEFAULT 1,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) DEFAULT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (staff_id),
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
GO
ALTER TABLE staff ADD CONSTRAINT [DF_staff_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_store_id ON staff(store_id)
GO
CREATE INDEX idx_fk_address_id ON staff(address_id)
GO
--
-- Table structure for table store
--
CREATE TABLE store (
store_id INT NOT NULL IDENTITY,
manager_staff_id TINYINT NOT NULL,
address_id INT NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (store_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id)
)
GO
ALTER TABLE store ADD CONSTRAINT [DF_store_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE UNIQUE NONCLUSTERED INDEX idx_fk_address_id ON store(manager_staff_id)
GO
CREATE INDEX idx_fk_store_address ON store(address_id)
GO
--
-- Table structure for table payment
--
CREATE TABLE payment (
payment_id int NOT NULL IDENTITY ,
customer_id INT NOT NULL,
staff_id TINYINT NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date DATETIME NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (payment_id),
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id)
)
GO
ALTER TABLE payment ADD CONSTRAINT [DF_payment_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_staff_id ON payment(staff_id)
GO
CREATE INDEX idx_fk_customer_id ON payment(customer_id)
GO
--
-- Table structure for table rental
--
CREATE TABLE rental (
rental_id INT NOT NULL IDENTITY,
rental_date DATETIME NOT NULL,
inventory_id INT NOT NULL,
customer_id INT NOT NULL,
return_date DATETIME DEFAULT NULL,
staff_id TINYINT NOT NULL,
last_update DATETIME NOT NULL,
PRIMARY KEY NONCLUSTERED (rental_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id)
)
GO
ALTER TABLE rental ADD CONSTRAINT [DF_rental_last_update] DEFAULT (getdate()) FOR last_update
GO
CREATE INDEX idx_fk_inventory_id ON rental(inventory_id)
GO
CREATE INDEX idx_fk_customer_id ON rental(customer_id)
GO
CREATE INDEX idx_fk_staff_id ON rental(staff_id)
GO
CREATE UNIQUE INDEX idx_uq ON rental (rental_date,inventory_id,customer_id)
GO
-- FK CONSTRAINTS
ALTER TABLE customer ADD CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE
GO
ALTER TABLE inventory ADD CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE;
GO
ALTER TABLE staff ADD CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE;
GO
ALTER TABLE payment ADD CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE;
GO
--
-- View structure for view customer_list
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID,
cu.first_name + ' ' + cu.last_name AS name,
a.address AS address,
a.postal_code AS zip_code,
a.phone AS phone,
city.city AS city,
country.country AS country,
case when cu.active=1 then 'active' else '' end AS notes,
cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id
GO
--
-- View structure for view film_list
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID,
film.title AS title,
film.description AS description,
category.name AS category,
film.rental_rate AS price,
film.length AS length,
film.rating AS rating,
actor.first_name+' '+actor.last_name AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
GO
--
-- View structure for view staff_list
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID,
s.first_name+' '+s.last_name AS name,
a.address AS address,
a.postal_code AS zip_code,
a.phone AS phone,
city.city AS city,
country.country AS country,
s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id
GO
--
-- View structure for view sales_by_store
--
CREATE VIEW sales_by_store
AS
SELECT
s.store_id
,c.city+','+cy.country AS store
,m.first_name+' '+ m.last_name AS manager
,SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY
s.store_id
, c.city+ ','+cy.country
, m.first_name+' '+ m.last_name
GO
--
-- View structure for view sales_by_film_category
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
GO
--
-- View structure for view actor_info
--
/*
CREATE VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM sakila.film f
INNER JOIN sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM sakila.actor a
LEFT JOIN sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
*/
-- TO DO PROCEDURES
-- TO DO TRIGGERS

231584
packages/nocodb-nest/tests/sql-server-sakila-db/02-sql-server-sakila-insert-data.sql

File diff suppressed because it is too large Load Diff

0
packages/nocodb-nest/tests/sql-server-sakila-db/mssql-ready.sh

BIN
packages/nocodb-nest/tests/sqlite-dump/sakila.db

Binary file not shown.

45
packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-delete-data.sql

@ -0,0 +1,45 @@
/*
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
This project is designed to help database administrators to decide which database to use for development of new products
The user can run the same SQL against different kind of databases and compare the performance
License: BSD
Copyright DB Software Laboratory
http://www.etl-tools.com
*/
-- Delete data
DELETE FROM payment
;
DELETE FROM rental
;
DELETE FROM customer
;
DELETE FROM film_category
;
DELETE FROM film_text
;
DELETE FROM film_actor
;
DELETE FROM inventory
;
DELETE FROM film
;
DELETE FROM category
;
DELETE FROM staff
;
DELETE FROM store
;
DELETE FROM actor
;
DELETE FROM address
;
DELETE FROM city
;
DELETE FROM country
;
DELETE FROM language
;

70
packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-drop-objects.sql

@ -0,0 +1,70 @@
/*
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
This project is designed to help database administrators to decide which database to use for development of new products
The user can run the same SQL against different kind of databases and compare the performance
License: BSD
Copyright DB Software Laboratory
http://www.etl-tools.com
*/
-- Drop Views
DROP VIEW customer_list
;
DROP VIEW film_list
;
--DROP VIEW nicer_but_slower_film_list;
DROP VIEW sales_by_film_category
;
DROP VIEW sales_by_store
;
DROP VIEW staff_list
;
-- Drop Tables
DROP TABLE payment
;
DROP TABLE rental
;
DROP TABLE inventory
;
DROP TABLE film_text
;
DROP TABLE film_category
;
DROP TABLE film_actor
;
DROP TABLE film
;
DROP TABLE language
;
DROP TABLE customer
;
DROP TABLE actor
;
DROP TABLE category
;
DROP TABLE store
;
DROP TABLE address
;
DROP TABLE staff
;
DROP TABLE city
;
DROP TABLE country
;
-- Procedures and views
--drop procedure film_in_stock;
--drop procedure film_not_in_stock;
--drop function get_customer_balance;
--drop function inventory_held_by_customer;
--drop function inventory_in_stock;
--drop procedure rewards_report;

231502
packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-insert-data.sql

File diff suppressed because it is too large Load Diff

645
packages/nocodb-nest/tests/sqlite-dump/sqlite-sakila-schema.sql

@ -0,0 +1,645 @@
/*
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
This project is designed to help database administrators to decide which database to use for development of new products
The user can run the same SQL against different kind of databases and compare the performance
License: BSD
Copyright DB Software Laboratory
http://www.etl-tools.com
*/
--
-- Table structure for table actor
--
--DROP TABLE actor;
CREATE TABLE actor (
actor_id numeric NOT NULL ,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (actor_id)
)
;
CREATE INDEX idx_actor_last_name ON actor(last_name)
;
CREATE TRIGGER actor_trigger_ai AFTER INSERT ON actor
BEGIN
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER actor_trigger_au AFTER UPDATE ON actor
BEGIN
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table country
--
CREATE TABLE country (
country_id SMALLINT NOT NULL,
country VARCHAR(50) NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (country_id)
)
;
CREATE TRIGGER country_trigger_ai AFTER INSERT ON country
BEGIN
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER country_trigger_au AFTER UPDATE ON country
BEGIN
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table city
--
CREATE TABLE city (
city_id int NOT NULL,
city VARCHAR(50) NOT NULL,
country_id SMALLINT NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (city_id),
CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_country_id ON city(country_id)
;
CREATE TRIGGER city_trigger_ai AFTER INSERT ON city
BEGIN
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER city_trigger_au AFTER UPDATE ON city
BEGIN
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table address
--
CREATE TABLE address (
address_id int NOT NULL,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id INT NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (address_id),
CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_city_id ON address(city_id)
;
CREATE TRIGGER address_trigger_ai AFTER INSERT ON address
BEGIN
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER address_trigger_au AFTER UPDATE ON address
BEGIN
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table language
--
CREATE TABLE language (
language_id SMALLINT NOT NULL ,
name CHAR(20) NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (language_id)
)
;
CREATE TRIGGER language_trigger_ai AFTER INSERT ON language
BEGIN
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER language_trigger_au AFTER UPDATE ON language
BEGIN
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table category
--
CREATE TABLE category (
category_id SMALLINT NOT NULL,
name VARCHAR(25) NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (category_id)
);
CREATE TRIGGER category_trigger_ai AFTER INSERT ON category
BEGIN
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER category_trigger_au AFTER UPDATE ON category
BEGIN
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table customer
--
CREATE TABLE customer (
customer_id INT NOT NULL,
store_id INT NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id INT NOT NULL,
active CHAR(1) DEFAULT 'Y' NOT NULL,
create_date TIMESTAMP NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (customer_id),
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_customer_fk_store_id ON customer(store_id)
;
CREATE INDEX idx_customer_fk_address_id ON customer(address_id)
;
CREATE INDEX idx_customer_last_name ON customer(last_name)
;
CREATE TRIGGER customer_trigger_ai AFTER INSERT ON customer
BEGIN
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER customer_trigger_au AFTER UPDATE ON customer
BEGIN
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table film
--
CREATE TABLE film (
film_id int NOT NULL,
title VARCHAR(255) NOT NULL,
description BLOB SUB_TYPE TEXT DEFAULT NULL,
release_year VARCHAR(4) DEFAULT NULL,
language_id SMALLINT NOT NULL,
original_language_id SMALLINT DEFAULT NULL,
rental_duration SMALLINT DEFAULT 3 NOT NULL,
rental_rate DECIMAL(4,2) DEFAULT 4.99 NOT NULL,
length SMALLINT DEFAULT NULL,
replacement_cost DECIMAL(5,2) DEFAULT 19.99 NOT NULL,
rating VARCHAR(10) DEFAULT 'G',
special_features VARCHAR(100) DEFAULT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (film_id),
CONSTRAINT CHECK_special_features CHECK(special_features is null or
special_features like '%Trailers%' or
special_features like '%Commentaries%' or
special_features like '%Deleted Scenes%' or
special_features like '%Behind the Scenes%'),
CONSTRAINT CHECK_special_rating CHECK(rating in ('G','PG','PG-13','R','NC-17')),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id)
)
;
CREATE INDEX idx_fk_language_id ON film(language_id)
;
CREATE INDEX idx_fk_original_language_id ON film(original_language_id)
;
CREATE TRIGGER film_trigger_ai AFTER INSERT ON film
BEGIN
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_trigger_au AFTER UPDATE ON film
BEGIN
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table film_actor
--
CREATE TABLE film_actor (
actor_id INT NOT NULL,
film_id INT NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (actor_id,film_id),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id)
;
CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id)
;
CREATE TRIGGER film_actor_trigger_ai AFTER INSERT ON film_actor
BEGIN
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_actor_trigger_au AFTER UPDATE ON film_actor
BEGIN
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table film_category
--
CREATE TABLE film_category (
film_id INT NOT NULL,
category_id SMALLINT NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_film_category_film ON film_category(film_id)
;
CREATE INDEX idx_fk_film_category_category ON film_category(category_id)
;
CREATE TRIGGER film_category_trigger_ai AFTER INSERT ON film_category
BEGIN
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_category_trigger_au AFTER UPDATE ON film_category
BEGIN
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table film_text
--
CREATE TABLE film_text (
film_id SMALLINT NOT NULL,
title VARCHAR(255) NOT NULL,
description BLOB SUB_TYPE TEXT,
PRIMARY KEY (film_id)
)
;
--
-- Table structure for table inventory
--
CREATE TABLE inventory (
inventory_id INT NOT NULL,
film_id INT NOT NULL,
store_id INT NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (inventory_id),
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_film_id ON inventory(film_id)
;
CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id)
;
CREATE TRIGGER inventory_trigger_ai AFTER INSERT ON inventory
BEGIN
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER inventory_trigger_au AFTER UPDATE ON inventory
BEGIN
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table staff
--
CREATE TABLE staff (
staff_id SMALLINT NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id INT NOT NULL,
picture BLOB DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id INT NOT NULL,
active SMALLINT DEFAULT 1 NOT NULL,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) DEFAULT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (staff_id),
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_staff_store_id ON staff(store_id)
;
CREATE INDEX idx_fk_staff_address_id ON staff(address_id)
;
CREATE TRIGGER staff_trigger_ai AFTER INSERT ON staff
BEGIN
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER staff_trigger_au AFTER UPDATE ON staff
BEGIN
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table store
--
CREATE TABLE store (
store_id INT NOT NULL,
manager_staff_id SMALLINT NOT NULL,
address_id INT NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (store_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id)
)
;
CREATE INDEX idx_store_fk_manager_staff_id ON store(manager_staff_id)
;
CREATE INDEX idx_fk_store_address ON store(address_id)
;
CREATE TRIGGER store_trigger_ai AFTER INSERT ON store
BEGIN
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER store_trigger_au AFTER UPDATE ON store
BEGIN
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- Table structure for table payment
--
CREATE TABLE payment (
payment_id int NOT NULL,
customer_id INT NOT NULL,
staff_id SMALLINT NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date TIMESTAMP NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (payment_id),
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id)
)
;
CREATE INDEX idx_fk_staff_id ON payment(staff_id)
;
CREATE INDEX idx_fk_customer_id ON payment(customer_id)
;
CREATE TRIGGER payment_trigger_ai AFTER INSERT ON payment
BEGIN
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER payment_trigger_au AFTER UPDATE ON payment
BEGIN
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TABLE rental (
rental_id INT NOT NULL,
rental_date TIMESTAMP NOT NULL,
inventory_id INT NOT NULL,
customer_id INT NOT NULL,
return_date TIMESTAMP DEFAULT NULL,
staff_id SMALLINT NOT NULL,
last_update TIMESTAMP NOT NULL,
PRIMARY KEY (rental_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id)
)
;
CREATE INDEX idx_rental_fk_inventory_id ON rental(inventory_id)
;
CREATE INDEX idx_rental_fk_customer_id ON rental(customer_id)
;
CREATE INDEX idx_rental_fk_staff_id ON rental(staff_id)
;
CREATE UNIQUE INDEX idx_rental_uq ON rental (rental_date,inventory_id,customer_id)
;
CREATE TRIGGER rental_trigger_ai AFTER INSERT ON rental
BEGIN
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER rental_trigger_au AFTER UPDATE ON rental
BEGIN
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
--
-- View structure for view customer_list
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID,
cu.first_name||' '||cu.last_name AS name,
a.address AS address,
a.postal_code AS zip_code,
a.phone AS phone,
city.city AS city,
country.country AS country,
case when cu.active=1 then 'active' else '' end AS notes,
cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id
;
--
-- View structure for view film_list
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID,
film.title AS title,
film.description AS description,
category.name AS category,
film.rental_rate AS price,
film.length AS length,
film.rating AS rating,
actor.first_name||' '||actor.last_name AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
;
--
-- View structure for view staff_list
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID,
s.first_name||' '||s.last_name AS name,
a.address AS address,
a.postal_code AS zip_code,
a.phone AS phone,
city.city AS city,
country.country AS country,
s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id
;
--
-- View structure for view sales_by_store
--
CREATE VIEW sales_by_store
AS
SELECT
s.store_id
,c.city||','||cy.country AS store
,m.first_name||' '||m.last_name AS manager
,SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY
s.store_id
, c.city||','||cy.country
, m.first_name||' '||m.last_name
;
--
-- View structure for view sales_by_film_category
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
;
--
-- View structure for view actor_info
--
/*
CREATE VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM sakila.film f
INNER JOIN sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM sakila.actor a
LEFT JOIN sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
*/
-- TO DO PROCEDURES
-- TO DO TRIGGERS

467
packages/nocodb-nest/tests/sqlite-sakila-db/01-sqlite-sakila-schema.sql

@ -0,0 +1,467 @@
/*
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
This project is designed to help database administrators to decide which database to use for development of new products
The user can run the same SQL against different kind of databases and compare the performance
License: BSD
Copyright DB Software Laboratory
http://www.etl-tools.com
*/
--
-- Table structure for table actor
--
--DROP TABLE actor;
CREATE TABLE actor (
actor_id INTEGER NOT NULL ,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (actor_id)
)
;
CREATE INDEX idx_actor_last_name ON actor(last_name)
;
--
-- Table structure for table country
--
CREATE TABLE country (
country_id INTEGER NOT NULL,
country VARCHAR(50) NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (country_id)
)
;
--
-- Table structure for table city
--
CREATE TABLE city (
city_id INTEGER NOT NULL,
city VARCHAR(50) NOT NULL,
country_id INTEGER NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (city_id),
CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_country_id ON city(country_id)
;
--
-- Table structure for table address
--
CREATE TABLE address (
address_id INTEGER NOT NULL,
address VARCHAR(50) NOT NULL,
address2 VARCHAR(50) DEFAULT NULL,
district VARCHAR(20) NOT NULL,
city_id INT NOT NULL,
postal_code VARCHAR(10) DEFAULT NULL,
phone VARCHAR(20) NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (address_id),
CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_city_id ON address(city_id)
;
--
-- Table structure for table language
--
CREATE TABLE language (
language_id INTEGER NOT NULL ,
name CHAR(20) NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (language_id)
)
;
--
-- Table structure for table category
--
CREATE TABLE category (
category_id INTEGER NOT NULL,
name VARCHAR(25) NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (category_id)
);
--
-- Table structure for table customer
--
CREATE TABLE customer (
customer_id INTEGER NOT NULL,
store_id INT NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
email VARCHAR(50) DEFAULT NULL,
address_id INT NOT NULL,
active CHAR(1) DEFAULT 'Y' NOT NULL,
create_date TIMESTAMP NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (customer_id),
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_customer_fk_store_id ON customer(store_id)
;
CREATE INDEX idx_customer_fk_address_id ON customer(address_id)
;
CREATE INDEX idx_customer_last_name ON customer(last_name)
;
--
-- Table structure for table film
--
CREATE TABLE film (
film_id INTEGER NOT NULL,
title VARCHAR(255) NOT NULL,
description BLOB SUB_TYPE TEXT DEFAULT NULL,
release_year VARCHAR(4) DEFAULT NULL,
language_id INTEGER NOT NULL,
original_language_id INTEGER DEFAULT NULL,
rental_duration INTEGER DEFAULT 3 NOT NULL,
rental_rate DECIMAL(4,2) DEFAULT 4.99 NOT NULL,
length INTEGER DEFAULT NULL,
replacement_cost DECIMAL(5,2) DEFAULT 19.99 NOT NULL,
rating VARCHAR(10) DEFAULT 'G',
special_features VARCHAR(100) DEFAULT NULL,
last_update TIMESTAMP,
PRIMARY KEY (film_id),
CONSTRAINT CHECK_special_features CHECK(special_features is null or
special_features like '%Trailers%' or
special_features like '%Commentaries%' or
special_features like '%Deleted Scenes%' or
special_features like '%Behind the Scenes%'),
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ,
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id)
)
;
CREATE INDEX idx_fk_language_id ON film(language_id)
;
CREATE INDEX idx_fk_original_language_id ON film(original_language_id)
;
--
-- Table structure for table film_actor
--
CREATE TABLE film_actor (
actor_id INT NOT NULL,
film_id INT NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (actor_id,film_id),
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id)
;
CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id)
;
--
-- Table structure for table film_category
--
CREATE TABLE film_category (
film_id INT NOT NULL,
category_id INTEGER NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (film_id, category_id),
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_film_category_film ON film_category(film_id)
;
CREATE INDEX idx_fk_film_category_category ON film_category(category_id)
;
--
-- Table structure for table film_text
--
CREATE TABLE film_text (
film_id INTEGER NOT NULL,
title VARCHAR(255) NOT NULL,
description BLOB SUB_TYPE TEXT,
PRIMARY KEY (film_id)
)
;
--
-- Table structure for table inventory
--
CREATE TABLE inventory (
inventory_id INTEGER NOT NULL,
film_id INT NOT NULL,
store_id INT NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (inventory_id),
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_film_id ON inventory(film_id)
;
CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id)
;
--
-- Table structure for table staff
--
CREATE TABLE staff (
staff_id INTEGER NOT NULL,
first_name VARCHAR(45) NOT NULL,
last_name VARCHAR(45) NOT NULL,
address_id INT NOT NULL,
picture BLOB DEFAULT NULL,
email VARCHAR(50) DEFAULT NULL,
store_id INT NOT NULL,
active INTEGER DEFAULT 1 NOT NULL,
username VARCHAR(16) NOT NULL,
password VARCHAR(40) DEFAULT NULL,
last_update TIMESTAMP,
PRIMARY KEY (staff_id),
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
)
;
CREATE INDEX idx_fk_staff_store_id ON staff(store_id)
;
CREATE INDEX idx_fk_staff_address_id ON staff(address_id)
;
--
-- Table structure for table store
--
CREATE TABLE store (
store_id INTEGER NOT NULL,
manager_staff_id INTEGER NOT NULL,
address_id INT NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (store_id),
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ,
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id)
)
;
CREATE INDEX idx_store_fk_manager_staff_id ON store(manager_staff_id)
;
CREATE INDEX idx_fk_store_address ON store(address_id)
;
--
-- Table structure for table payment
--
CREATE TABLE payment (
payment_id INTEGER NOT NULL,
customer_id INT NOT NULL,
staff_id INTEGER NOT NULL,
rental_id INT DEFAULT NULL,
amount DECIMAL(5,2) NOT NULL,
payment_date TIMESTAMP NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (payment_id),
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ,
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id)
)
;
CREATE INDEX idx_fk_staff_id ON payment(staff_id)
;
CREATE INDEX idx_fk_customer_id ON payment(customer_id)
;
CREATE TABLE rental (
rental_id INTEGER NOT NULL,
rental_date TIMESTAMP NOT NULL,
inventory_id INT NOT NULL,
customer_id INT NOT NULL,
return_date TIMESTAMP DEFAULT NULL,
staff_id INTEGER NOT NULL,
last_update TIMESTAMP,
PRIMARY KEY (rental_id),
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ,
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ,
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id)
)
;
CREATE INDEX idx_rental_fk_inventory_id ON rental(inventory_id)
;
CREATE INDEX idx_rental_fk_customer_id ON rental(customer_id)
;
CREATE INDEX idx_rental_fk_staff_id ON rental(staff_id)
;
CREATE UNIQUE INDEX idx_rental_uq ON rental (rental_date,inventory_id,customer_id)
;
--
-- View structure for view customer_list
--
CREATE VIEW customer_list
AS
SELECT cu.customer_id AS ID,
cu.first_name||' '||cu.last_name AS name,
a.address AS address,
a.postal_code AS zip_code,
a.phone AS phone,
city.city AS city,
country.country AS country,
case when cu.active=1 then 'active' else '' end AS notes,
cu.store_id AS SID
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id
;
--
-- View structure for view film_list
--
CREATE VIEW film_list
AS
SELECT film.film_id AS FID,
film.title AS title,
film.description AS description,
category.name AS category,
film.rental_rate AS price,
film.length AS length,
film.rating AS rating,
actor.first_name||' '||actor.last_name AS actors
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
JOIN film_actor ON film.film_id = film_actor.film_id
JOIN actor ON film_actor.actor_id = actor.actor_id
;
--
-- View structure for view staff_list
--
CREATE VIEW staff_list
AS
SELECT s.staff_id AS ID,
s.first_name||' '||s.last_name AS name,
a.address AS address,
a.postal_code AS zip_code,
a.phone AS phone,
city.city AS city,
country.country AS country,
s.store_id AS SID
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
JOIN country ON city.country_id = country.country_id
;
--
-- View structure for view sales_by_store
--
CREATE VIEW sales_by_store
AS
SELECT
s.store_id
,c.city||','||cy.country AS store
,m.first_name||' '||m.last_name AS manager
,SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN store AS s ON i.store_id = s.store_id
INNER JOIN address AS a ON s.address_id = a.address_id
INNER JOIN city AS c ON a.city_id = c.city_id
INNER JOIN country AS cy ON c.country_id = cy.country_id
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
GROUP BY
s.store_id
, c.city||','||cy.country
, m.first_name||' '||m.last_name
;
--
-- View structure for view sales_by_film_category
--
-- Note that total sales will add up to >100% because
-- some titles belong to more than 1 category
--
CREATE VIEW sales_by_film_category
AS
SELECT
c.name AS category
, SUM(p.amount) AS total_sales
FROM payment AS p
INNER JOIN rental AS r ON p.rental_id = r.rental_id
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
INNER JOIN film AS f ON i.film_id = f.film_id
INNER JOIN film_category AS fc ON f.film_id = fc.film_id
INNER JOIN category AS c ON fc.category_id = c.category_id
GROUP BY c.name
;
--
-- View structure for view actor_info
--
/*
CREATE VIEW actor_info
AS
SELECT
a.actor_id,
a.first_name,
a.last_name,
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
FROM sakila.film f
INNER JOIN sakila.film_category fc
ON f.film_id = fc.film_id
INNER JOIN sakila.film_actor fa
ON f.film_id = fa.film_id
WHERE fc.category_id = c.category_id
AND fa.actor_id = a.actor_id
)
)
ORDER BY c.name SEPARATOR '; ')
AS film_info
FROM sakila.actor a
LEFT JOIN sakila.film_actor fa
ON a.actor_id = fa.actor_id
LEFT JOIN sakila.film_category fc
ON fa.film_id = fc.film_id
LEFT JOIN sakila.category c
ON fc.category_id = c.category_id
GROUP BY a.actor_id, a.first_name, a.last_name;
*/
-- TO DO PROCEDURES
-- TO DO TRIGGERS

231502
packages/nocodb-nest/tests/sqlite-sakila-db/02-sqlite-sakila-insert-data.sql

File diff suppressed because it is too large Load Diff

180
packages/nocodb-nest/tests/sqlite-sakila-db/03-sqlite-sakila-triggers.sql

@ -0,0 +1,180 @@
CREATE TRIGGER actor_trigger_ai AFTER INSERT ON actor
BEGIN
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER actor_trigger_au AFTER UPDATE ON actor
BEGIN
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER country_trigger_ai AFTER INSERT ON country
BEGIN
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER country_trigger_au AFTER UPDATE ON country
BEGIN
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER city_trigger_ai AFTER INSERT ON city
BEGIN
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER city_trigger_au AFTER UPDATE ON city
BEGIN
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER address_trigger_ai AFTER INSERT ON address
BEGIN
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER address_trigger_au AFTER UPDATE ON address
BEGIN
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER language_trigger_ai AFTER INSERT ON language
BEGIN
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER language_trigger_au AFTER UPDATE ON language
BEGIN
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER category_trigger_ai AFTER INSERT ON category
BEGIN
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER category_trigger_au AFTER UPDATE ON category
BEGIN
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER customer_trigger_ai AFTER INSERT ON customer
BEGIN
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER customer_trigger_au AFTER UPDATE ON customer
BEGIN
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_trigger_ai AFTER INSERT ON film
BEGIN
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_trigger_au AFTER UPDATE ON film
BEGIN
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_actor_trigger_ai AFTER INSERT ON film_actor
BEGIN
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_actor_trigger_au AFTER UPDATE ON film_actor
BEGIN
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_category_trigger_ai AFTER INSERT ON film_category
BEGIN
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER film_category_trigger_au AFTER UPDATE ON film_category
BEGIN
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER inventory_trigger_ai AFTER INSERT ON inventory
BEGIN
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER inventory_trigger_au AFTER UPDATE ON inventory
BEGIN
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER staff_trigger_ai AFTER INSERT ON staff
BEGIN
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER staff_trigger_au AFTER UPDATE ON staff
BEGIN
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER store_trigger_ai AFTER INSERT ON store
BEGIN
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER store_trigger_au AFTER UPDATE ON store
BEGIN
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER payment_trigger_ai AFTER INSERT ON payment
BEGIN
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER payment_trigger_au AFTER UPDATE ON payment
BEGIN
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER rental_trigger_ai AFTER INSERT ON rental
BEGIN
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;
CREATE TRIGGER rental_trigger_au AFTER UPDATE ON rental
BEGIN
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
END
;

BIN
packages/nocodb-nest/tests/sqlite-sakila-db/sakila.db

Binary file not shown.

4
packages/nocodb-nest/tests/unit/.env.sample

@ -0,0 +1,4 @@
DB_USER=root
DB_PASSWORD=password
DB_PORT=3306
DB_HOST=localhost

5
packages/nocodb-nest/tests/unit/.pg.env

@ -0,0 +1,5 @@
DB_USER=postgres
DB_PASSWORD=password
DB_PORT=5432
DB_HOST=localhost
DB_CLIENT=pg

322
packages/nocodb-nest/tests/unit/TestDbMngr.ts

@ -0,0 +1,322 @@
import fs from 'fs';
import process from 'process';
import { knex } from 'knex';
import SqlMgrv2 from '../../src/db/sql-mgr/v2/SqlMgrv2';
import type { Knex } from 'knex';
import type { DbConfig } from '../../src/interface/config';
import NcConfigFactory from '../../src/utils/NcConfigFactory'
export default class TestDbMngr {
public static readonly dbName = 'test_meta';
public static readonly sakilaDbName = 'test_sakila';
public static metaKnex: Knex;
public static sakilaKnex: Knex;
public static defaultConnection = {
user: 'root',
password: 'password',
host: 'localhost',
port: 3306,
client: 'mysql2',
};
public static pgConnection = {
user: 'postgres',
password: 'password',
host: 'localhost',
port: 5432,
client: 'pg',
};
public static connection: {
user: string;
password: string;
host: string;
port: number;
client: string;
} = TestDbMngr.defaultConnection;
public static dbConfig: DbConfig;
static populateConnectionConfig() {
const { user, password, host, port, client } = TestDbMngr.defaultConnection;
TestDbMngr.connection = {
user: process.env['DB_USER'] || user,
password: process.env['DB_PASSWORD'] || password,
host: process.env['DB_HOST'] || host,
port: Number(process.env['DB_PORT']) || port,
client: process.env['DB_CLIENT'] || client,
};
console.log(TestDbMngr.connection);
}
static async testConnection(config: DbConfig) {
try {
console.log('Testing connection', TestDbMngr.connection);
return await SqlMgrv2.testConnection(config);
} catch (e) {
console.log(e);
return { code: -1, message: 'Connection invalid' };
}
}
static async init() {
TestDbMngr.populateConnectionConfig();
// common for both pg and mysql
if (await TestDbMngr.isDbConfigured()) {
await TestDbMngr.connectDb();
} else {
console.log('Mysql is not configured. Switching to sqlite');
await TestDbMngr.switchToSqlite();
}
}
private static async isDbConfigured() {
const { user, password, host, port, client } = TestDbMngr.connection;
const config = NcConfigFactory.urlToDbConfig(
`${client}://${user}:${password}@${host}:${port}`,
);
config.connection = {
user,
password,
host,
port,
};
const result = await TestDbMngr.testConnection(config);
return result.code !== -1;
}
static async connectDb() {
const { user, password, host, port, client } = TestDbMngr.connection;
if (!process.env[`DATABASE_URL`]) {
process.env[
`DATABASE_URL`
] = `${client}://${user}:${password}@${host}:${port}/${TestDbMngr.dbName}`;
}
TestDbMngr.dbConfig = NcConfigFactory.urlToDbConfig(
NcConfigFactory.extractXcUrlFromJdbc(process.env[`DATABASE_URL`]),
);
this.dbConfig.meta = {
tn: 'nc_evolutions',
dbAlias: 'db',
api: {
type: 'rest',
prefix: '',
graphqlDepthLimit: 10,
},
inflection: {
tn: 'camelize',
cn: 'camelize',
},
};
await TestDbMngr.setupMeta();
await TestDbMngr.setupSakila();
}
static async setupMeta() {
if (TestDbMngr.metaKnex) {
await TestDbMngr.metaKnex.destroy();
}
if (TestDbMngr.isSqlite()) {
await TestDbMngr.resetMetaSqlite();
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig());
return;
}
TestDbMngr.metaKnex = knex(TestDbMngr.getDbConfigWithNoDb());
await TestDbMngr.resetDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName);
await TestDbMngr.metaKnex.destroy();
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig());
await TestDbMngr.useDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName);
}
static async setupSakila() {
if (TestDbMngr.sakilaKnex) {
await TestDbMngr.sakilaKnex.destroy();
}
if (TestDbMngr.isSqlite()) {
await TestDbMngr.seedSakila();
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig());
return;
}
TestDbMngr.sakilaKnex = knex(TestDbMngr.getDbConfigWithNoDb());
await TestDbMngr.resetDatabase(
TestDbMngr.sakilaKnex,
TestDbMngr.sakilaDbName,
);
await TestDbMngr.sakilaKnex.destroy();
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig());
await TestDbMngr.useDatabase(
TestDbMngr.sakilaKnex,
TestDbMngr.sakilaDbName,
);
}
static async switchToSqlite() {
// process.env[`DATABASE_URL`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.sqlite`;
TestDbMngr.dbConfig = {
client: 'sqlite3',
connection: {
filename: `${__dirname}/${TestDbMngr.dbName}.db`,
database: TestDbMngr.dbName,
},
useNullAsDefault: true,
meta: {
tn: 'nc_evolutions',
dbAlias: 'db',
api: {
type: 'rest',
prefix: '',
graphqlDepthLimit: 10,
},
inflection: {
tn: 'camelize',
cn: 'camelize',
},
},
};
process.env[
`NC_DB`
] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.db`;
await TestDbMngr.setupMeta();
await TestDbMngr.setupSakila();
}
private static async resetDatabase(knexClient, dbName) {
if (TestDbMngr.isSqlite()) {
// return knexClient.raw(`DELETE FROM sqlite_sequence`);
} else {
try {
await knexClient.raw(`DROP DATABASE ${dbName}`);
} catch (e) {}
await knexClient.raw(`CREATE DATABASE ${dbName}`);
console.log(`Database ${dbName} created`);
if (!TestDbMngr.isPg()) {
await knexClient.raw(`USE ${dbName}`);
}
}
}
static isSqlite() {
return TestDbMngr.dbConfig.client === 'sqlite3';
}
static isPg() {
return TestDbMngr.dbConfig.client === 'pg';
}
private static async useDatabase(knexClient, dbName) {
if (!TestDbMngr.isSqlite() && !TestDbMngr.isPg()) {
await knexClient.raw(`USE ${dbName}`);
}
}
static getDbConfigWithNoDb() {
const dbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
delete dbConfig.connection.database;
return dbConfig;
}
static getMetaDbConfig() {
return TestDbMngr.dbConfig;
}
private static resetMetaSqlite() {
if (fs.existsSync(`${__dirname}/test_meta.db`)) {
fs.unlinkSync(`${__dirname}/test_meta.db`);
}
}
static getSakilaDbConfig() {
const sakilaDbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig));
sakilaDbConfig.connection.database = TestDbMngr.sakilaDbName;
sakilaDbConfig.connection.multipleStatements = true;
if (TestDbMngr.isSqlite()) {
sakilaDbConfig.connection.filename = `${__dirname}/test_sakila.db`;
}
return sakilaDbConfig;
}
static async seedSakila() {
const testsDir = __dirname.replace('tests/unit', 'tests');
if (TestDbMngr.isSqlite()) {
if (fs.existsSync(`${__dirname}/test_sakila.db`)) {
fs.unlinkSync(`${__dirname}/test_sakila.db`);
}
fs.copyFileSync(
`${testsDir}/sqlite-sakila-db/sakila.db`,
`${__dirname}/test_sakila.db`,
);
} else if (TestDbMngr.isPg()) {
const schemaFile = fs
.readFileSync(`${testsDir}/pg-sakila-db/01-postgres-sakila-schema.sql`)
.toString();
const dataFile = fs
.readFileSync(
`${testsDir}/pg-sakila-db/02-postgres-sakila-insert-data.sql`,
)
.toString();
await TestDbMngr.sakilaKnex.raw(schemaFile);
await TestDbMngr.sakilaKnex.raw(dataFile);
} else {
const schemaFile = fs
.readFileSync(`${testsDir}/mysql-sakila-db/03-test-sakila-schema.sql`)
.toString();
const dataFile = fs
.readFileSync(`${testsDir}/mysql-sakila-db/04-test-sakila-data.sql`)
.toString();
await TestDbMngr.sakilaKnex.raw(schemaFile);
await TestDbMngr.sakilaKnex.raw(dataFile);
}
}
static async disableForeignKeyChecks(knexClient) {
if (TestDbMngr.isSqlite()) {
await knexClient.raw('PRAGMA foreign_keys = OFF');
} else if (TestDbMngr.isPg()) {
await knexClient.raw(`SET session_replication_role = 'replica'`);
} else {
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 0`);
}
}
static async enableForeignKeyChecks(knexClient) {
if (TestDbMngr.isSqlite()) {
await knexClient.raw(`PRAGMA foreign_keys = ON;`);
} else if (TestDbMngr.isPg()) {
await knexClient.raw(`SET session_replication_role = 'origin'`);
} else {
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 1`);
}
}
static async showAllTables(knexClient) {
if (TestDbMngr.isSqlite()) {
const tables = await knexClient.raw(
`SELECT name FROM sqlite_master WHERE type='table'`,
);
return tables
.filter((t) => t.name !== 'sqlite_sequence' && t.name !== '_evolutions')
.map((t) => t.name);
} else if (TestDbMngr.isPg()) {
const tables = await knexClient.raw(
`SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';`,
);
return tables.rows.map((t) => t.tablename);
} else {
const response = await knexClient.raw(`SHOW TABLES`);
return response[0].map((table) => Object.values(table)[0]);
}
}
}

274
packages/nocodb-nest/tests/unit/factory/column.ts

@ -0,0 +1,274 @@
import { UITypes } from 'nocodb-sdk';
import request from 'supertest';
import Column from '../../../src/models/Column';
import FormViewColumn from '../../../src/models/FormViewColumn';
import GalleryViewColumn from '../../../src/models/GalleryViewColumn';
import GridViewColumn from '../../../src/models/GridViewColumn';
import Model from '../../../src/models/Model';
import Project from '../../../src/models/Project';
import View from '../../../src/models/View';
import { isSqlite, isPg } from '../init/db';
const defaultColumns = function (context) {
return [
{
column_name: 'id',
title: 'Id',
uidt: 'ID',
},
{
column_name: 'title',
title: 'Title',
uidt: 'SingleLineText',
},
{
cdf: isPg(context) ? 'now()' : 'CURRENT_TIMESTAMP',
column_name: 'created_at',
title: 'CreatedAt',
dtxp: '',
dtxs: '',
uidt: 'DateTime',
dt: isPg(context) ? 'timestamp without time zone' : undefined,
},
{
cdf: isSqlite(context)
? 'CURRENT_TIMESTAMP'
: isPg(context)
? 'now()'
: 'CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP',
column_name: 'updated_at',
title: 'UpdatedAt',
dtxp: '',
dtxs: '',
uidt: 'DateTime',
dt: isPg(context) ? 'timestamp without time zone' : undefined,
},
];
};
const createColumn = async (context, table, columnAttr) => {
await request(context.app)
.post(`/api/v1/db/meta/tables/${table.id}/columns`)
.set('xc-auth', context.token)
.send({
...columnAttr,
});
const column: Column = (await table.getColumns()).find(
(column) => column.title === columnAttr.title
);
return column;
};
const createRollupColumn = async (
context,
{
project,
title,
rollupFunction,
table,
relatedTableName,
relatedTableColumnTitle,
}: {
project: Project;
title: string;
rollupFunction: string;
table: Model;
relatedTableName: string;
relatedTableColumnTitle: string;
}
) => {
const childBases = await project.getBases();
const childTable = await Model.getByIdOrName({
project_id: project.id,
base_id: childBases[0].id!,
table_name: relatedTableName,
});
const childTableColumns = await childTable.getColumns();
const childTableColumn = await childTableColumns.find(
(column) => column.title === relatedTableColumnTitle
);
const ltarColumn = (await table.getColumns()).find(
(column) =>
column.uidt === UITypes.LinkToAnotherRecord &&
column.colOptions?.fk_related_model_id === childTable.id
);
const rollupColumn = await createColumn(context, table, {
title: title,
uidt: UITypes.Rollup,
fk_relation_column_id: ltarColumn?.id,
fk_rollup_column_id: childTableColumn?.id,
rollup_function: rollupFunction,
table_name: table.table_name,
column_name: title,
});
return rollupColumn;
};
const createLookupColumn = async (
context,
{
project,
title,
table,
relatedTableName,
relatedTableColumnTitle,
}: {
project: Project;
title: string;
table: Model;
relatedTableName: string;
relatedTableColumnTitle: string;
}
) => {
const childBases = await project.getBases();
const childTable = await Model.getByIdOrName({
project_id: project.id,
base_id: childBases[0].id!,
table_name: relatedTableName,
});
const childTableColumns = await childTable.getColumns();
const childTableColumn = await childTableColumns.find(
(column) => column.title === relatedTableColumnTitle
);
if (!childTableColumn) {
throw new Error(
`Could not find column ${relatedTableColumnTitle} in ${relatedTableName}`
);
}
const ltarColumn = (await table.getColumns()).find(
(column) =>
column.uidt === UITypes.LinkToAnotherRecord &&
column.colOptions?.fk_related_model_id === childTable.id
);
const lookupColumn = await createColumn(context, table, {
title: title,
uidt: UITypes.Lookup,
fk_relation_column_id: ltarColumn?.id,
fk_lookup_column_id: childTableColumn?.id,
table_name: table.table_name,
column_name: title,
});
return lookupColumn;
};
const createQrCodeColumn = async (
context,
{
title,
table,
referencedQrValueTableColumnTitle,
}: {
title: string;
table: Model;
referencedQrValueTableColumnTitle: string;
}
) => {
const referencedQrValueTableColumnId = await table
.getColumns()
.then(
(cols) =>
cols.find(
(column) => column.title == referencedQrValueTableColumnTitle
)['id']
);
const qrCodeColumn = await createColumn(context, table, {
title: title,
uidt: UITypes.QrCode,
column_name: title,
fk_qr_value_column_id: referencedQrValueTableColumnId,
});
return qrCodeColumn;
};
const createBarcodeColumn = async (
context,
{
title,
table,
referencedBarcodeValueTableColumnTitle,
}: {
title: string;
table: Model;
referencedBarcodeValueTableColumnTitle: string;
}
) => {
const referencedBarcodeValueTableColumnId = await table
.getColumns()
.then(
(cols) =>
cols.find(
(column) => column.title == referencedBarcodeValueTableColumnTitle
)['id']
);
const barcodeColumn = await createColumn(context, table, {
title: title,
uidt: UITypes.Barcode,
column_name: title,
fk_barcode_value_column_id: referencedBarcodeValueTableColumnId,
});
return barcodeColumn;
};
const createLtarColumn = async (
context,
{
title,
parentTable,
childTable,
type,
}: {
title: string;
parentTable: Model;
childTable: Model;
type: string;
}
) => {
const ltarColumn = await createColumn(context, parentTable, {
title: title,
column_name: title,
uidt: UITypes.LinkToAnotherRecord,
parentId: parentTable.id,
childId: childTable.id,
type: type,
});
return ltarColumn;
};
const updateViewColumn = async (
context,
{ view, column, attr }: { column: Column; view: View; attr: any }
) => {
const res = await request(context.app)
.patch(`/api/v1/db/meta/views/${view.id}/columns/${column.id}`)
.set('xc-auth', context.token)
.send({
...attr,
});
const updatedColumn: FormViewColumn | GridViewColumn | GalleryViewColumn = (
await view.getColumns()
).find((column) => column.id === column.id)!;
return updatedColumn;
};
export {
defaultColumns,
createColumn,
createQrCodeColumn,
createBarcodeColumn,
createRollupColumn,
createLookupColumn,
createLtarColumn,
updateViewColumn,
};

81
packages/nocodb-nest/tests/unit/factory/project.ts

@ -0,0 +1,81 @@
import request from 'supertest';
import Project from '../../../src/models/Project';
const sakilaProjectConfig = (context) => {
let base;
if (
context.sakilaDbConfig.client === 'mysql2' ||
context.sakilaDbConfig.client === 'pg'
) {
base = {
type: context.sakilaDbConfig.client,
config: {
client: context.sakilaDbConfig.client,
connection: context.sakilaDbConfig.connection,
},
};
} else {
base = {
type: context.sakilaDbConfig.client,
config: {
client: context.sakilaDbConfig.client,
connection: {
client: context.sakilaDbConfig.client,
connection: context.sakilaDbConfig.connection,
},
},
};
}
base = {
...base,
inflection_column: 'camelize',
inflection_table: 'camelize',
};
return {
title: 'sakila',
bases: [base],
external: true,
};
};
const defaultProjectValue = {
title: 'Title',
};
const defaultSharedBaseValue = {
roles: 'viewer',
password: 'password123',
};
const createSharedBase = async (app, token, project, sharedBaseArgs = {}) => {
await request(app)
.post(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', token)
.send({
...defaultSharedBaseValue,
...sharedBaseArgs,
});
};
const createSakilaProject = async (context) => {
const response = await request(context.app)
.post('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send(sakilaProjectConfig(context));
return (await Project.getByTitleOrId(response.body.id)) as Project;
};
const createProject = async (context, projectArgs = defaultProjectValue) => {
const response = await request(context.app)
.post('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send(projectArgs);
return (await Project.getByTitleOrId(response.body.id)) as Project;
};
export { createProject, createSharedBase, createSakilaProject };

373
packages/nocodb-nest/tests/unit/factory/row.ts

@ -0,0 +1,373 @@
import { ColumnType, UITypes } from 'nocodb-sdk';
import request from 'supertest';
import Column from '../../../src/models/Column';
import Filter from '../../../src/models/Filter';
import Model from '../../../src/models/Model';
import Project from '../../../src/models/Project';
import Sort from '../../../src/models/Sort';
import NcConnectionMgrv2 from '../../../src/utils/common/NcConnectionMgrv2';
const rowValue = (column: ColumnType, index: number) => {
switch (column.uidt) {
case UITypes.Number:
return index;
case UITypes.SingleLineText:
return `test-${index}`;
case UITypes.Date:
return '2020-01-01';
case UITypes.DateTime:
return '2020-01-01 00:00:00';
case UITypes.Email:
return `test-${index}@example.com`;
default:
return `test-${index}`;
}
};
const rowMixedValue = (column: ColumnType, index: number) => {
// Array of country names
const countries = [
'Afghanistan',
'Albania',
'',
'Andorra',
'Angola',
'Antigua and Barbuda',
'Argentina',
null,
'Armenia',
'Australia',
'Austria',
'',
null,
];
// Array of sample random paragraphs (comma separated list of cities and countries). Not more than 200 characters
const longText = [
'Aberdeen, United Kingdom',
'Abidjan, Côte d’Ivoire',
'Abuja, Nigeria',
'',
'Addis Ababa, Ethiopia',
'Adelaide, Australia',
'Ahmedabad, India',
'Albuquerque, United States',
null,
'Alexandria, Egypt',
'Algiers, Algeria',
'Allahabad, India',
'',
null,
];
// Array of random integers, not more than 10000
const numbers = [33, null, 456, 333, 267, 34, 8754, 3234, 44, 33, null];
const decimals = [
33.3,
456.34,
333.3,
null,
267.5674,
34.0,
8754.0,
3234.547,
44.2647,
33.98,
null,
];
const duration = [10, 20, 30, 40, 50, 60, null, 70, 80, 90, null];
const rating = [0, 1, 2, 3, null, 0, 4, 5, 0, 1, null];
// Array of random sample email strings (not more than 100 characters)
const emails = [
'jbutt@gmail.com',
'josephine_darakjy@darakjy.org',
'art@venere.org',
'',
null,
'donette.foller@cox.net',
'simona@morasca.com',
'mitsue_tollner@yahoo.com',
'leota@hotmail.com',
'sage_wieser@cox.net',
'',
null,
];
// Array of random sample phone numbers
const phoneNumbers = [
'1-541-754-3010',
'504-621-8927',
'810-292-9388',
'856-636-8749',
'907-385-4412',
'513-570-1893',
'419-503-2484',
'773-573-6914',
'',
null,
];
// Array of random sample URLs
const urls = [
'https://www.google.com',
'https://www.facebook.com',
'https://www.youtube.com',
'https://www.amazon.com',
'https://www.wikipedia.org',
'https://www.twitter.com',
'https://www.instagram.com',
'https://www.linkedin.com',
'https://www.reddit.com',
'https://www.tiktok.com',
'https://www.pinterest.com',
'https://www.netflix.com',
'https://www.microsoft.com',
'https://www.apple.com',
'',
null,
];
const singleSelect = [
'jan',
'feb',
'mar',
'apr',
'may',
'jun',
'jul',
'aug',
'sep',
'oct',
'nov',
'dec',
null,
];
const multiSelect = [
'jan,feb,mar',
'apr,may,jun',
'jul,aug,sep',
'oct,nov,dec',
'jan,feb,mar',
null,
];
switch (column.uidt) {
case UITypes.Number:
case UITypes.Percent:
return numbers[index % numbers.length];
case UITypes.Decimal:
case UITypes.Currency:
return decimals[index % decimals.length];
case UITypes.Duration:
return duration[index % duration.length];
case UITypes.Rating:
return rating[index % rating.length];
case UITypes.SingleLineText:
return countries[index % countries.length];
case UITypes.Email:
return emails[index % emails.length];
case UITypes.PhoneNumber:
return phoneNumbers[index % phoneNumbers.length];
case UITypes.LongText:
return longText[index % longText.length];
case UITypes.Date:
// set startDate as 400 days before today
// eslint-disable-next-line no-case-declarations
const result = new Date();
result.setDate(result.getDate() - 400 + index);
return result.toISOString().slice(0, 10);
case UITypes.URL:
return urls[index % urls.length];
case UITypes.SingleSelect:
return singleSelect[index % singleSelect.length];
case UITypes.MultiSelect:
return multiSelect[index % multiSelect.length];
default:
return `test-${index}`;
}
};
const getRow = async (context, { project, table, id }) => {
const response = await request(context.app)
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/${id}`)
.set('xc-auth', context.token);
if (response.status !== 200) {
return undefined;
}
return response.body;
};
const listRow = async ({
project,
table,
options,
}: {
project: Project;
table: Model;
options?: {
limit?: any;
offset?: any;
filterArr?: Filter[];
sortArr?: Sort[];
};
}) => {
const bases = await project.getBases();
const baseModel = await Model.getBaseModelSQL({
id: table.id,
dbDriver: await NcConnectionMgrv2.get(bases[0]!),
});
const ignorePagination = !options;
return await baseModel.list(options, ignorePagination);
};
const getOneRow = async (
context,
{ project, table }: { project: Project; table: Model }
) => {
const response = await request(context.app)
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/find-one`)
.set('xc-auth', context.token);
return response.body;
};
const generateDefaultRowAttributes = ({
columns,
index = 0,
}: {
columns: ColumnType[];
index?: number;
}) =>
columns.reduce((acc, column) => {
if (
column.uidt === UITypes.LinkToAnotherRecord ||
column.uidt === UITypes.ForeignKey ||
column.uidt === UITypes.ID
) {
return acc;
}
acc[column.title!] = rowValue(column, index);
return acc;
}, {});
const createRow = async (
context,
{
project,
table,
index = 0,
}: {
project: Project;
table: Model;
index?: number;
}
) => {
const columns = await table.getColumns();
const rowData = generateDefaultRowAttributes({ columns, index });
const response = await request(context.app)
.post(`/api/v1/db/data/noco/${project.id}/${table.id}`)
.set('xc-auth', context.token)
.send(rowData);
return response.body;
};
const createBulkRows = async (
context,
{
project,
table,
values,
}: {
project: Project;
table: Model;
values: any[];
}
) => {
await request(context.app)
.post(`/api/v1/db/data/bulk/noco/${project.id}/${table.id}`)
.set('xc-auth', context.token)
.send(values)
.expect(200);
};
// Links 2 table rows together. Will create rows if ids are not provided
const createChildRow = async (
context,
{
project,
table,
childTable,
column,
rowId,
childRowId,
type,
}: {
project: Project;
table: Model;
childTable: Model;
column: Column;
rowId?: string;
childRowId?: string;
type: string;
}
) => {
if (!rowId) {
const row = await createRow(context, { project, table });
rowId = row['Id'];
}
if (!childRowId) {
const row = await createRow(context, { table: childTable, project });
childRowId = row['Id'];
}
await request(context.app)
.post(
`/api/v1/db/data/noco/${project.id}/${table.id}/${rowId}/${type}/${column.title}/${childRowId}`
)
.set('xc-auth', context.token);
const row = await getRow(context, { project, table, id: rowId });
return row;
};
// Mixed row attributes
const generateMixedRowAttributes = ({
columns,
index = 0,
}: {
columns: ColumnType[];
index?: number;
}) =>
columns.reduce((acc, column) => {
if (
column.uidt === UITypes.LinkToAnotherRecord ||
column.uidt === UITypes.ForeignKey ||
column.uidt === UITypes.ID
) {
return acc;
}
acc[column.title!] = rowMixedValue(column, index);
return acc;
}, {});
export {
createRow,
getRow,
createChildRow,
getOneRow,
listRow,
generateDefaultRowAttributes,
generateMixedRowAttributes,
createBulkRows,
rowMixedValue,
};

48
packages/nocodb-nest/tests/unit/factory/table.ts

@ -0,0 +1,48 @@
import request from 'supertest';
import { Model } from '../../../src/models';
import { defaultColumns } from './column';
import type { Project } from '../../../src/models';
const defaultTableValue = (context) => ({
table_name: 'Table1',
title: 'Table1_Title',
columns: defaultColumns(context),
});
const createTable = async (context, project, args = {}) => {
const defaultValue = defaultTableValue(context);
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({ ...defaultValue, ...args });
const table: Model = await Model.get(response.body.id);
return table;
};
const getTable = async ({
project,
name,
}: {
project: Project;
name: string;
}) => {
const bases = await project.getBases();
return await Model.getByIdOrName({
project_id: project.id,
base_id: bases[0].id!,
table_name: name,
});
};
const getAllTables = async ({ project }: { project: Project }) => {
const bases = await project.getBases();
const tables = await Model.list({
project_id: project.id,
base_id: bases[0].id!,
});
return tables;
};
export { createTable, getTable, getAllTables };

18
packages/nocodb-nest/tests/unit/factory/user.ts

@ -0,0 +1,18 @@
import request from 'supertest';
import { User } from '../../../src/models'
const defaultUserArgs = {
email: 'test@example.com',
password: 'A1234abh2@dsad',
};
const createUser = async (context, userArgs = {}) => {
const args = { ...defaultUserArgs, ...userArgs };
const response = await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args);
const user = await User.getByEmail(args.email);
return { token: response.body.token, user };
};
export { createUser, defaultUserArgs };

38
packages/nocodb-nest/tests/unit/factory/view.ts

@ -0,0 +1,38 @@
import { ViewTypes } from 'nocodb-sdk';
import request from 'supertest';
import Model from '../../../src/models/Model';
import View from '../../../src/models/View';
const createView = async (context, {title, table, type}: {title: string, table: Model, type: ViewTypes}) => {
const viewTypeStr = (type) => {
switch (type) {
case ViewTypes.GALLERY:
return 'galleries';
case ViewTypes.FORM:
return 'forms';
case ViewTypes.GRID:
return 'grids';
case ViewTypes.KANBAN:
return 'kanbans';
default:
throw new Error('Invalid view type');
}
};
const response = await request(context.app)
.post(`/api/v1/db/meta/tables/${table.id}/${viewTypeStr(type)}`)
.set('xc-auth', context.token)
.send({
title,
type,
});
if(response.status !== 200) {
throw new Error('createView',response.body.message);
}
const view = await View.getByTitleOrId({fk_model_id: table.id, titleOrId:title}) as View;
return view
}
export {createView}

24
packages/nocodb-nest/tests/unit/index.test.ts

@ -0,0 +1,24 @@
import 'mocha';
import restTests from './rest/index.test';
import modelTests from './model/index.test';
import TestDbMngr from './TestDbMngr'
import dotenv from 'dotenv';
process.env.NODE_ENV = 'test';
process.env.TEST = 'true';
process.env.NC_DISABLE_CACHE = 'true';
process.env.NC_DISABLE_TELE = 'true';
// Load environment variables from .env file
dotenv.config({
path: __dirname + '/.env'
});
(async function() {
await TestDbMngr.init();
modelTests();
restTests();
run();
})();

60
packages/nocodb-nest/tests/unit/init/cleanupMeta.ts

@ -0,0 +1,60 @@
import { Model, Project } from '../../../src/models';
import NcConnectionMgrv2 from '../../../src/utils/common/NcConnectionMgrv2';
import { orderedMetaTables } from '../../../src/utils/globals';
import TestDbMngr from '../TestDbMngr';
import { isPg } from './db';
const dropTablesAllNonExternalProjects = async () => {
const projects = await Project.list({});
const userCreatedTableNames: string[] = [];
await Promise.all(
projects
.filter((project) => project.is_meta)
.map(async (project) => {
await project.getBases();
const base = project.bases && project.bases[0];
if (!base) return;
const models = await Model.list({
project_id: project.id,
base_id: base.id!,
});
models.forEach((model) => {
userCreatedTableNames.push(model.table_name);
});
}),
);
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex);
for (const tableName of userCreatedTableNames) {
if (TestDbMngr.isPg()) {
await TestDbMngr.metaKnex.raw(`DROP TABLE "${tableName}" CASCADE`);
} else {
await TestDbMngr.metaKnex.raw(`DROP TABLE ${tableName}`);
}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex);
};
const cleanupMetaTables = async () => {
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex);
for (const tableName of orderedMetaTables) {
try {
await TestDbMngr.metaKnex.raw(`DELETE FROM ${tableName}`);
} catch (e) {}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex);
};
export default async function () {
try {
await NcConnectionMgrv2.destroyAll();
await dropTablesAllNonExternalProjects();
await cleanupMetaTables();
} catch (e) {
console.error('cleanupMeta', e);
}
}

116
packages/nocodb-nest/tests/unit/init/cleanupSakila.ts

@ -0,0 +1,116 @@
import { Audit, Project } from '../../../src/models'
import TestDbMngr from '../TestDbMngr';
const dropTablesOfSakila = async () => {
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.sakilaKnex);
for (const tableName of sakilaTableNames) {
try {
if (TestDbMngr.isPg()) {
await TestDbMngr.sakilaKnex.raw(
`DROP TABLE IF EXISTS "${tableName}" CASCADE`
);
} else {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
}
} catch (e) {}
}
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.sakilaKnex);
};
const dropSchemaAndSeedSakila = async () => {
try {
await TestDbMngr.sakilaKnex.raw(`DROP SCHEMA "public" CASCADE`);
await TestDbMngr.sakilaKnex.raw(`CREATE SCHEMA "public"`);
await TestDbMngr.seedSakila();
} catch (e) {
console.error('dropSchemaAndSeedSakila', e);
throw e;
}
};
const resetAndSeedSakila = async () => {
try {
await dropTablesOfSakila();
await TestDbMngr.seedSakila();
} catch (e) {
console.error('resetSakila', e);
throw e;
}
};
const cleanUpSakila = async () => {
try {
const sakilaProject = await Project.getByTitle('sakila');
const audits =
sakilaProject && (await Audit.projectAuditList(sakilaProject.id, {}));
if (audits?.length > 0) {
// if PG, drop schema
if (TestDbMngr.isPg()) {
return await dropSchemaAndSeedSakila();
}
// if mysql, drop tables
return await resetAndSeedSakila();
}
const tablesInSakila = await TestDbMngr.showAllTables(
TestDbMngr.sakilaKnex
);
await Promise.all(
tablesInSakila
.filter((tableName) => !sakilaTableNames.includes(tableName))
.map(async (tableName) => {
try {
if (TestDbMngr.isPg()) {
await TestDbMngr.sakilaKnex.raw(
`DROP TABLE "${tableName}" CASCADE`
);
} else {
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`);
}
} catch (e) {
console.error(e);
}
})
);
} catch (e) {
console.error('cleanUpSakila', e);
}
};
const sakilaTableNames = [
'actor',
'address',
'category',
'city',
'country',
'customer',
'film',
'film_actor',
'film_category',
'inventory',
'language',
'payment',
'payment_p2007_01',
'payment_p2007_02',
'payment_p2007_03',
'payment_p2007_04',
'payment_p2007_05',
'payment_p2007_06',
'rental',
'staff',
'store',
'actor_info',
'customer_list',
'film_list',
'nicer_but_slower_film_list',
'sales_by_film_category',
'sales_by_store',
'staff_list',
];
export { cleanUpSakila, resetAndSeedSakila };

18
packages/nocodb-nest/tests/unit/init/db.ts

@ -0,0 +1,18 @@
import { DbConfig } from '../../../src/interface/config';
const isSqlite = (context) => {
return (
(context.dbConfig as DbConfig).client === 'sqlite' ||
(context.dbConfig as DbConfig).client === 'sqlite3'
);
};
const isPg = (context) => {
return (context.dbConfig as DbConfig).client === 'pg';
};
const isMysql = (context) =>
(context.dbConfig as DbConfig).client === 'mysql' ||
(context.dbConfig as DbConfig).client === 'mysql2';
export { isSqlite, isMysql, isPg };

43
packages/nocodb-nest/tests/unit/init/index.ts

@ -0,0 +1,43 @@
import express from 'express';
import nocobuild from '../../../src/nocobuild'
// import { Noco } from '../../../src/lib';
import cleanupMeta from './cleanupMeta';
import {cleanUpSakila, resetAndSeedSakila} from './cleanupSakila';
import { createUser } from '../factory/user';
let server;
const serverInit = async () => {
const serverInstance = express();
serverInstance.enable('trust proxy');
// serverInstance.use(await Noco.init());
await nocobuild(serverInstance)
serverInstance.use(function(req, res, next){
// 50 sec timeout
req.setTimeout(500000, function(){
console.log('Request has timed out.');
res.send(408);
});
next();
});
return serverInstance;
};
const isFirstTimeRun = () => !server
export default async function () {
const {default: TestDbMngr} = await import('../TestDbMngr');
if (isFirstTimeRun()) {
await resetAndSeedSakila();
server = await serverInit();
}
await cleanUpSakila();
await cleanupMeta();
const { token } = await createUser({ app: server }, { roles: 'editor' });
return { app: server, token, dbConfig: TestDbMngr.dbConfig, sakilaDbConfig: TestDbMngr.getSakilaDbConfig() };
}

10
packages/nocodb-nest/tests/unit/model/index.test.ts

@ -0,0 +1,10 @@
import 'mocha';
import baseModelSqlTest from './tests/baseModelSql.test';
function modelTests() {
baseModelSqlTest();
}
export default function () {
describe('Model', modelTests);
}

591
packages/nocodb-nest/tests/unit/model/tests/baseModelSql.test.ts

@ -0,0 +1,591 @@
import 'mocha';
import { BaseModelSqlv2 } from '../../../../src/db/BaseModelSqlv2'
import NcConnectionMgrv2 from '../../../../src/utils/common/NcConnectionMgrv2'
import init from '../../init';
import { createProject } from '../../factory/project';
import { createTable } from '../../factory/table';
import Base from '../../../../src/models/Base';
import Model from '../../../../src/models/Model';
import Project from '../../../../src/models/Project';
import View from '../../../../src/models/View';
import { createRow, generateDefaultRowAttributes } from '../../factory/row';
import Audit from '../../../../src/models/Audit';
import { expect } from 'chai';
import Filter from '../../../../src/models/Filter';
import { createLtarColumn } from '../../factory/column';
import LinkToAnotherRecordColumn from '../../../../src/models/LinkToAnotherRecordColumn';
import { isPg, isSqlite } from '../../init/db';
function baseModelSqlTests() {
let context;
let project: Project;
let table: Model;
let view: View;
let baseModelSql: BaseModelSqlv2;
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project);
view = await table.getViews()[0];
const base = await Base.get(table.base_id);
baseModelSql = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(base),
model: table,
view,
});
});
it('Insert record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const columns = await table.getColumns();
let inputData: any = generateDefaultRowAttributes({ columns });
const response = await baseModelSql.insert(
generateDefaultRowAttributes({ columns }),
undefined,
request
);
const insertedRow = (await baseModelSql.list())[0];
if (isPg(context)) {
inputData.CreatedAt = new Date(inputData.CreatedAt).toISOString();
inputData.UpdatedAt = new Date(inputData.UpdatedAt).toISOString();
insertedRow.CreatedAt = new Date(insertedRow.CreatedAt).toISOString();
insertedRow.UpdatedAt = new Date(insertedRow.UpdatedAt).toISOString();
response.CreatedAt = new Date(response.CreatedAt).toISOString();
response.UpdatedAt = new Date(response.UpdatedAt).toISOString();
}
expect(insertedRow).to.include(inputData);
expect(insertedRow).to.include(response);
const rowInsertedAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'INSERT');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'INSERT',
description: 'Record with ID 1 has been inserted into Table Table1_Title',
});
});
it('Bulk insert record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const insertedRows = await baseModelSql.list();
if (isPg(context)) {
insertedRows.forEach((row) => {
row.CreatedAt = new Date(row.CreatedAt).toISOString();
row.UpdatedAt = new Date(row.UpdatedAt).toISOString();
});
}
bulkData.forEach((inputData: any, index) => {
if (isPg(context)) {
inputData.CreatedAt = new Date(inputData.CreatedAt).toISOString();
inputData.UpdatedAt = new Date(inputData.UpdatedAt).toISOString();
}
expect(insertedRows[index]).to.include(inputData);
});
const rowBulkInsertedAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_INSERT');
expect(rowBulkInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_INSERT',
status: null,
description: '10 records have been bulk inserted in Table1_Title',
details: null,
});
});
it('Update record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const columns = await table.getColumns();
await baseModelSql.insert(generateDefaultRowAttributes({ columns }));
const rowId = 1;
await baseModelSql.updateByPk(rowId, { Title: 'test' }, undefined, request);
const updatedRow = await baseModelSql.readByPk(1);
expect(updatedRow).to.include({ Id: rowId, Title: 'test' });
const rowUpdatedAudit = (await Audit.projectAuditList(project.id, {})).find(
(audit) => audit.op_sub_type === 'UPDATE'
);
expect(rowUpdatedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'UPDATE',
description: 'Record with ID 1 has been updated in Table Table1_Title.\nColumn "Title" got changed from "test-0" to "test"',
});
});
it('Bulk update record', async () => {
// Since sqlite doesn't support multiple sql connections, we can't test bulk update in sqlite
if (isSqlite(context)) return;
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const insertedRows: any[] = await baseModelSql.list();
await baseModelSql.bulkUpdate(
insertedRows.map((row) => ({ ...row, Title: `new-${row['Title']}` })),
{ cookie: request }
);
const updatedRows = await baseModelSql.list();
updatedRows.forEach((row, index) => {
expect(row['Title']).to.equal(`new-test-${index}`);
});
const rowBulkUpdateAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
expect(rowBulkUpdateAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_UPDATE',
status: null,
description: '10 records have been bulk updated in Table1_Title',
details: null,
});
});
it('Bulk update all record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const idColumn = columns.find((column) => column.title === 'Id')!;
await baseModelSql.bulkUpdateAll(
{
filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
}),
],
},
{ Title: 'new-1' },
{ cookie: request }
);
const updatedRows = await baseModelSql.list();
updatedRows.forEach((row) => {
if (row.id < 5) expect(row['Title']).to.equal('new-1');
});
const rowBulkUpdateAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_UPDATE');
expect(rowBulkUpdateAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_UPDATE',
status: null,
description: '4 records have been bulk updated in Table1_Title',
details: null,
});
});
it('Delete record', async () => {
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
params: { id: 1 },
};
const columns = await table.getColumns();
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const rowIdToDeleted = 1;
await baseModelSql.delByPk(rowIdToDeleted, undefined, request);
const deletedRow = await baseModelSql.readByPk(rowIdToDeleted);
expect(deletedRow).to.be.an('object').that.is.empty;
console.log('Delete record', await Audit.projectAuditList(project.id, {}));
const rowDeletedAudit = (await Audit.projectAuditList(project.id, {})).find(
(audit) => audit.op_sub_type === 'DELETE'
);
expect(rowDeletedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'DELETE',
description: 'Record with ID 1 has been deleted in Table Table1_Title',
});
});
it('Bulk delete records', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const insertedRows: any[] = await baseModelSql.list();
await baseModelSql.bulkDelete(
insertedRows
.filter((row) => row['Id'] < 5)
.map((row) => ({ id: row['Id'] })),
{ cookie: request }
);
const remainingRows = await baseModelSql.list();
expect(remainingRows).to.length(6);
const rowBulkDeleteAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_DELETE');
expect(rowBulkDeleteAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_DELETE',
status: null,
description: '4 records have been bulk deleted in Table1_Title',
details: null,
});
});
it('Bulk delete all record', async () => {
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
const bulkData = Array(10)
.fill(0)
.map((_, index) => generateDefaultRowAttributes({ columns, index }));
await baseModelSql.bulkInsert(bulkData, { cookie: request });
const idColumn = columns.find((column) => column.title === 'Id')!;
await baseModelSql.bulkDeleteAll(
{
filterArr: [
new Filter({
logical_op: 'and',
fk_column_id: idColumn.id,
comparison_op: 'lt',
value: 5,
}),
],
},
{ cookie: request }
);
const remainingRows = await baseModelSql.list();
expect(remainingRows).to.length(6);
const rowBulkDeleteAudit = (
await Audit.projectAuditList(project.id, {})
).find((audit) => audit.op_sub_type === 'BULK_DELETE');
expect(rowBulkDeleteAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
fk_model_id: table.id,
project_id: project.id,
row_id: null,
op_type: 'DATA',
op_sub_type: 'BULK_DELETE',
status: null,
description: '4 records have been bulk deleted in Table1_Title',
details: null,
});
});
it('Nested insert', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
});
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: 'hm',
});
const childRow = await createRow(context, {
project,
table: childTable,
});
const ltarColOptions =
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
await baseModelSql.nestedInsert(
{
...generateDefaultRowAttributes({ columns }),
[ltarColumn.title]: [{ Id: childRow['Id'] }],
},
undefined,
request
);
const childBaseModel = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view,
});
const insertedChildRow = await childBaseModel.readByPk(childRow['Id']);
expect(insertedChildRow[childCol.column_name]).to.equal(childRow['Id']);
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {}))
.filter((audit) => audit.fk_model_id === table.id)
.find((audit) => audit.op_sub_type === 'INSERT');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'INSERT',
description: 'Record with ID 1 has been inserted into Table Table1_Title',
});
});
it('Link child', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
});
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: 'hm',
});
const insertedChildRow = await createRow(context, {
project,
table: childTable,
});
const ltarColOptions =
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
await baseModelSql.insert(
generateDefaultRowAttributes({ columns }),
undefined,
request
);
const insertedRow = await baseModelSql.readByPk(1);
await baseModelSql.addChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request,
});
const childBaseModel = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view,
});
const updatedChildRow = await childBaseModel.readByPk(
insertedChildRow['Id']
);
expect(updatedChildRow[childCol.column_name]).to.equal(insertedRow['Id']);
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {}))
.filter((audit) => audit.fk_model_id === table.id)
.find((audit) => audit.op_sub_type === 'LINK_RECORD');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'LINK_RECORD',
description:
'Record [id:1] has been linked with record [id:1] in Table1_Title',
});
});
it('Unlink child', async () => {
const childTable = await createTable(context, project, {
title: 'Child Table',
table_name: 'child_table',
});
const ltarColumn = await createLtarColumn(context, {
title: 'Ltar Column',
parentTable: table,
childTable,
type: 'hm',
});
const insertedChildRow = await createRow(context, {
project,
table: childTable,
});
const ltarColOptions =
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>();
const childCol = await ltarColOptions.getChildColumn();
const columns = await table.getColumns();
const request = {
clientIp: '::ffff:192.0.0.1',
user: { email: 'test@example.com' },
};
await baseModelSql.insert(
generateDefaultRowAttributes({ columns }),
undefined,
request
);
const insertedRow = await baseModelSql.readByPk(1);
await baseModelSql.addChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request,
});
await baseModelSql.removeChild({
colId: ltarColumn.id,
rowId: insertedRow['Id'],
childId: insertedChildRow['Id'],
cookie: request,
});
const childBaseModel = new BaseModelSqlv2({
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)),
model: childTable,
view,
});
const updatedChildRow = await childBaseModel.readByPk(
insertedChildRow['Id']
);
expect(updatedChildRow[childCol.column_name]).to.be.null;
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {}))
.filter((audit) => audit.fk_model_id === table.id)
.find((audit) => audit.op_sub_type === 'UNLINK_RECORD');
expect(rowInsertedAudit).to.include({
user: 'test@example.com',
ip: '::ffff:192.0.0.1',
base_id: null,
project_id: project.id,
fk_model_id: table.id,
row_id: '1',
op_type: 'DATA',
op_sub_type: 'UNLINK_RECORD',
description:
'Record [id:1] has been unlinked with record [id:1] in Table1_Title',
});
});
}
export default function () {
describe('BaseModelSql', baseModelSqlTests);
}

26
packages/nocodb-nest/tests/unit/rest/index.test.ts

@ -0,0 +1,26 @@
import 'mocha';
import authTests from './tests/auth.test';
import orgTests from './tests/org.test';
import projectTests from './tests/project.test';
import columnTypeSpecificTests from './tests/columnTypeSpecific.test';
import tableTests from './tests/table.test';
import tableRowTests from './tests/tableRow.test';
import viewRowTests from './tests/viewRow.test';
import attachmentTests from './tests/attachment.test';
import filterTest from './tests/filter.test';
function restTests() {
authTests();
orgTests();
projectTests();
tableTests();
tableRowTests();
viewRowTests();
columnTypeSpecificTests();
attachmentTests();
filterTest();
}
export default function () {
describe('Rest', restTests);
}

172
packages/nocodb-nest/tests/unit/rest/tests/attachment.test.ts

@ -0,0 +1,172 @@
import { expect } from 'chai';
import fs from 'fs';
import { OrgUserRoles, ProjectRoles } from 'nocodb-sdk';
import path from 'path';
import 'mocha';
import request from 'supertest';
import { createProject } from '../../factory/project';
import init from '../../init';
const FILE_PATH = path.join(__dirname, 'test.txt');
// Test case list
// 1. Upload file - Super admin
// 2. Upload file - Without token
// 3. Upload file - Org level viewer
// 4. Upload file - Org level creator
// 5. Upload file - Org level viewer with editor role in a project
function attachmentTests() {
let context;
beforeEach(async function () {
context = await init();
fs.writeFileSync(FILE_PATH, 'test', `utf-8`);
context = await init();
});
afterEach(function () {
fs.unlinkSync(FILE_PATH);
});
it('Upload file - Super admin', async () => {
const response = await request(context.app)
.post('/api/v1/db/storage/upload')
.attach('files', FILE_PATH)
.set('xc-auth', context.token)
.expect(200);
const attachments = response.body;
expect(attachments).to.be.an('array');
expect(attachments[0].title).to.be.eq(path.basename(FILE_PATH));
});
it('Upload file - Without token', async () => {
const response = await request(context.app)
.post('/api/v1/db/storage/upload')
.attach('files', FILE_PATH)
.expect(401);
const msg = response.body.msg;
expect(msg).to.be.eq('Unauthorized');
});
it('Upload file - Org level viewer', async () => {
// signup a user
const args = {
email: 'dummyuser@example.com',
password: 'A1234abh2@dsad',
};
const signupResponse = await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args)
.expect(200);
const response = await request(context.app)
.post('/api/v1/db/storage/upload')
.attach('files', FILE_PATH)
.set('xc-auth', signupResponse.body.token)
.expect(400);
const msg = response.body.msg;
expect(msg).to.be.eq('Upload not allowed');
});
it('Upload file - Org level creator', async () => {
// signup a user
const args = {
email: 'dummyuser@example.com',
password: 'A1234abh2@dsad',
};
await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args)
.expect(200);
// update user role to creator
const usersListResponse = await request(context.app)
.get('/api/v1/users')
.set('xc-auth', context.token)
.expect(200);
const user = usersListResponse.body.list.find(
(u) => u.email === args.email
);
expect(user).to.have.property('roles').to.be.equal(OrgUserRoles.VIEWER);
await request(context.app)
.patch('/api/v1/users/' + user.id)
.set('xc-auth', context.token)
.send({ roles: OrgUserRoles.CREATOR })
.expect(200);
const signinResponse = await request(context.app)
.post('/api/v1/auth/user/signin')
// pass empty data in await request
.send(args)
.expect(200);
const response = await request(context.app)
.post('/api/v1/db/storage/upload')
.attach('files', FILE_PATH)
.set('xc-auth', signinResponse.body.token)
.expect(200);
const attachments = response.body;
expect(attachments).to.be.an('array');
expect(attachments[0].title).to.be.eq(path.basename(FILE_PATH));
});
it('Upload file - Org level viewer with editor role in a project', async () => {
// signup a new user
const args = {
email: 'dummyuser@example.com',
password: 'A1234abh2@dsad',
};
await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args)
.expect(200);
const newProject = await createProject(context, {
title: 'NewTitle1',
});
// invite user to project with editor role
await request(context.app)
.post(`/api/v1/db/meta/projects/${newProject.id}/users`)
.set('xc-auth', context.token)
.send({
roles: ProjectRoles.EDITOR,
email: args.email,
project_id: newProject.id,
projectName: newProject.title,
})
.expect(200);
// signin to get user token
const signinResponse = await request(context.app)
.post('/api/v1/auth/user/signin')
// pass empty data in await request
.send(args)
.expect(200);
const response = await request(context.app)
.post('/api/v1/db/storage/upload')
.attach('files', FILE_PATH)
.set('xc-auth', signinResponse.body.token)
.expect(200);
const attachments = response.body;
expect(attachments).to.be.an('array');
expect(attachments[0].title).to.be.eq(path.basename(FILE_PATH));
});
}
export default function () {
describe('Attachment', attachmentTests);
}

190
packages/nocodb-nest/tests/unit/rest/tests/auth.test.ts

@ -0,0 +1,190 @@
import { expect } from 'chai';
import 'mocha';
import request from 'supertest';
import init from '../../init';
import { defaultUserArgs } from '../../factory/user';
// Test case list
// 1. Signup with valid email
// 2. Signup with invalid email
// 3. Signup with invalid password
// 4. Signin with valid credentials
// 5. Signin without email and password
// 6. Signin with invalid credentials
// 7. Signin with invalid password
// 8. me without token
// 9. me with token
// 10. forgot password with non-existing email id
// 11. TBD: forgot password with existing email id
// 12. Change password
// 13. Change password - after logout
// 14. TBD: Reset Password with an invalid token
// 15. TBD: Email validate with an invalid token
// 16. TBD: Email validate with a valid token
// 17. TBD: Forgot password validate with a valid token
// 18. TBD: Reset Password with an valid token
// 19. TBD: refresh token api
function authTests() {
let context;
beforeEach(async function () {
context = await init();
});
it('Signup with valid email', async () => {
const response = await request(context.app)
.post('/api/v1/auth/user/signup')
.send({ email: 'new@example.com', password: defaultUserArgs.password })
.expect(200);
const token = response.body.token;
expect(token).to.be.a('string');
});
it('Signup with invalid email', async () => {
await request(context.app)
.post('/api/v1/auth/user/signup')
.send({ email: 'test', password: defaultUserArgs.password })
.expect(400);
});
it('Signup with invalid passsword', async () => {
await request(context.app)
.post('/api/v1/auth/user/signup')
.send({ email: defaultUserArgs.email, password: 'weakpass' })
.expect(400);
});
it('Signin with valid credentials', async () => {
const response = await request(context.app)
.post('/api/v1/auth/user/signin')
.send({
email: defaultUserArgs.email,
password: defaultUserArgs.password,
})
.expect(200);
const token = response.body.token;
expect(token).to.be.a('string');
});
it('Signup without email and password', async () => {
await request(context.app)
.post('/api/v1/auth/user/signin')
// pass empty data in await request
.send({})
.expect(400);
});
it('Signin with invalid credentials', async () => {
await request(context.app)
.post('/api/v1/auth/user/signin')
.send({ email: 'abc@abc.com', password: defaultUserArgs.password })
.expect(400);
});
it('Signin with invalid password', async () => {
await request(context.app)
.post('/api/v1/auth/user/signin')
.send({ email: defaultUserArgs.email, password: 'wrongPassword' })
.expect(400);
});
it('me without token', async () => {
const response = await request(context.app)
.get('/api/v1/auth/user/me')
.unset('xc-auth')
.expect(200);
if (!response.body?.roles?.guest) {
return new Error('User should be guest');
}
});
it('me with token', async () => {
const response = await request(context.app)
.get('/api/v1/auth/user/me')
.set('xc-auth', context.token)
.expect(200);
const email = response.body.email;
expect(email).to.equal(defaultUserArgs.email);
});
it('Forgot password with a non-existing email id', async () => {
await request(context.app)
.post('/api/v1/auth/password/forgot')
.send({ email: 'nonexisting@email.com' })
.expect(400);
});
// todo: fix mailer issues
// it('Forgot password with an existing email id', function () {});
it('Change password', async () => {
await request(context.app)
.post('/api/v1/auth/password/change')
.set('xc-auth', context.token)
.send({
currentPassword: defaultUserArgs.password,
newPassword: 'NEW' + defaultUserArgs.password,
})
.expect(200);
});
it('Change password - after logout', async () => {
await request(context.app)
.post('/api/v1/auth/password/change')
.unset('xc-auth')
.send({
currentPassword: defaultUserArgs.password,
newPassword: 'NEW' + defaultUserArgs.password,
})
.expect(401);
});
// todo:
it('Reset Password with an invalid token', async () => {
await request(context.app)
.post('/api/v1/auth/password/reset/someRandomValue')
.send({ email: defaultUserArgs.email })
.expect(400);
});
it('Email validate with an invalid token', async () => {
await request(context.app)
.post('/api/v1/auth/email/validate/someRandomValue')
.send({ email: defaultUserArgs.email })
.expect(400);
});
// todo:
// it('Email validate with a valid token', async () => {
// // await request(context.app)
// // .post('/auth/email/validate/someRandomValue')
// // .send({email: EMAIL_ID})
// // .expect(500, done);
// });
// todo:
// it('Forgot password validate with a valid token', async () => {
// // await request(context.app)
// // .post('/auth/token/validate/someRandomValue')
// // .send({email: EMAIL_ID})
// // .expect(500, done);
// });
// todo:
// it('Reset Password with an valid token', async () => {
// // await request(context.app)
// // .post('/auth/password/reset/someRandomValue')
// // .send({password: 'anewpassword'})
// // .expect(500, done);
// });
// todo: refresh token api
}
export default function () {
describe('Auth', authTests);
}

102
packages/nocodb-nest/tests/unit/rest/tests/columnTypeSpecific.test.ts

@ -0,0 +1,102 @@
import 'mocha';
import { title } from 'process';
import request from 'supertest';
import { UITypes } from 'nocodb-sdk';
import { expect } from 'chai';
import init from '../../init';
import { createProject, createSakilaProject } from '../../factory/project';
import { createColumn, createQrCodeColumn } from '../../factory/column';
import { getTable } from '../../factory/table';
import type Model from '../../../../src/models/Model';
import type Project from '../../../../src/models/Project';
import type Column from '../../../../src/models/Column';
// Test case list
// 1. Qr Code Column
// a. adding a QR code column which references another column
// - delivers the same cell values as the referenced column
// - gets deleted if the referenced column gets deleted
function columnTypeSpecificTests() {
let context;
let project: Project;
let sakilaProject: Project;
let customerTable: Model;
let qrValueReferenceColumn: Column;
const qrValueReferenceColumnTitle = 'Qr Value Column';
const qrCodeReferenceColumnTitle = 'Qr Code Column';
beforeEach(async function () {
context = await init();
sakilaProject = await createSakilaProject(context);
project = await createProject(context);
customerTable = await getTable({
project: sakilaProject,
name: 'customer',
});
});
describe('Qr Code Column', () => {
beforeEach(async function () {
qrValueReferenceColumn = await createColumn(context, customerTable, {
title: qrValueReferenceColumnTitle,
uidt: UITypes.SingleLineText,
table_name: customerTable.table_name,
column_name: title,
});
});
describe('adding a QR code column which references another column ', async () => {
beforeEach(async function () {
await createQrCodeColumn(context, {
title: qrCodeReferenceColumnTitle,
table: customerTable,
referencedQrValueTableColumnTitle: qrValueReferenceColumnTitle,
});
});
it('delivers the same cell values as the referenced column', async () => {
const resp = await request(context.app)
.get(`/api/v1/db/data/noco/${sakilaProject.id}/${customerTable.id}`)
.set('xc-auth', context.token)
.expect(200);
expect(resp.body.list[0][qrValueReferenceColumnTitle]).to.eql(
resp.body.list[0][qrCodeReferenceColumnTitle],
);
expect(
resp.body.list.map((row) => row[qrValueReferenceColumnTitle]),
).to.eql(resp.body.list.map((row) => row[qrCodeReferenceColumnTitle]));
});
it('gets deleted if the referenced column gets deleted', async () => {
// delete referenced value column
const columnsBeforeReferencedColumnDeleted =
await customerTable.getColumns();
expect(
columnsBeforeReferencedColumnDeleted.some(
(col) => col['title'] === qrCodeReferenceColumnTitle,
),
).to.eq(true);
const response = await request(context.app)
.delete(`/api/v1/db/meta/columns/${qrValueReferenceColumn.id}`)
.set('xc-auth', context.token)
.send({});
const columnsAfterReferencedColumnDeleted =
await customerTable.getColumns();
expect(
columnsAfterReferencedColumnDeleted.some(
(col) => col['title'] === qrCodeReferenceColumnTitle,
),
).to.eq(false);
});
});
});
}
export default function () {
describe('Column types specific behavior', columnTypeSpecificTests);
}

962
packages/nocodb-nest/tests/unit/rest/tests/filter.test.ts

@ -0,0 +1,962 @@
import 'mocha';
import init from '../../init';
import { createProject } from '../../factory/project';
import Project from '../../../../src/models/Project';
import { createTable } from '../../factory/table';
import { UITypes } from 'nocodb-sdk';
import { createBulkRows, rowMixedValue, listRow } from '../../factory/row';
import Model from '../../../../src/models/Model';
import { expect } from 'chai';
import request from 'supertest';
const debugMode = true;
// Test case list
async function retrieveRecordsAndValidate(
filter: {
comparison_op: string;
value: string;
fk_column_id: any;
status: string;
logical_op: string;
},
title: string
) {
let expectedRecords = [];
let toFloat = false;
if (
['Number', 'Decimal', 'Currency', 'Percent', 'Duration', 'Rating'].includes(
title
)
) {
toFloat = true;
}
// case for all comparison operators
switch (filter.comparison_op) {
case 'eq':
expectedRecords = unfilteredRecords.filter(
(record) =>
(toFloat ? parseFloat(record[title]) : record[title]) ===
(toFloat ? parseFloat(filter.value) : filter.value)
);
break;
case 'neq':
expectedRecords = unfilteredRecords.filter(
(record) =>
(toFloat ? parseFloat(record[title]) : record[title]) !==
(toFloat ? parseFloat(filter.value) : filter.value)
);
break;
case 'null':
expectedRecords = unfilteredRecords.filter(
(record) => record[title] === null
);
break;
case 'notnull':
expectedRecords = unfilteredRecords.filter(
(record) => record[title] !== null
);
break;
case 'empty':
expectedRecords = unfilteredRecords.filter(
(record) => record[title] === ''
);
break;
case 'notempty':
expectedRecords = unfilteredRecords.filter(
(record) => record[title] !== ''
);
break;
case 'like':
expectedRecords = unfilteredRecords.filter((record) =>
record[title]?.includes(filter.value)
);
break;
case 'nlike':
expectedRecords = unfilteredRecords.filter(
(record) => !record[title]?.includes(filter.value)
);
break;
case 'gt':
expectedRecords = unfilteredRecords.filter(
(record) =>
(toFloat ? parseFloat(record[title]) : record[title]) >
(toFloat ? parseFloat(filter.value) : filter.value) &&
record[title] !== null
);
break;
case 'gte':
expectedRecords = unfilteredRecords.filter(
(record) =>
(toFloat ? parseFloat(record[title]) : record[title]) >=
(toFloat ? parseFloat(filter.value) : filter.value) &&
record[title] !== null
);
break;
case 'lt':
expectedRecords = unfilteredRecords.filter((record) =>
title === 'Rating'
? (toFloat ? parseFloat(record[title]) : record[title]) <
(toFloat ? parseFloat(filter.value) : filter.value) ||
record[title] === null
: (toFloat ? parseFloat(record[title]) : record[title]) <
(toFloat ? parseFloat(filter.value) : filter.value) &&
record[title] !== null
);
break;
case 'lte':
expectedRecords = unfilteredRecords.filter((record) =>
title === 'Rating'
? (toFloat ? parseFloat(record[title]) : record[title]) <=
(toFloat ? parseFloat(filter.value) : filter.value) ||
record[title] === null
: (toFloat ? parseFloat(record[title]) : record[title]) <=
(toFloat ? parseFloat(filter.value) : filter.value) &&
record[title] !== null
);
break;
case 'anyof':
expectedRecords = unfilteredRecords.filter((record) => {
const values = filter.value.split(',');
const recordValue = record[title]?.split(',');
return values.some((value) => recordValue?.includes(value));
});
break;
case 'nanyof':
expectedRecords = unfilteredRecords.filter((record) => {
const values = filter.value.split(',');
const recordValue = record[title]?.split(',');
return !values.some((value) => recordValue?.includes(value));
});
break;
case 'allof':
expectedRecords = unfilteredRecords.filter((record) => {
const values = filter.value.split(',');
return values.every((value) => record[title]?.includes(value));
});
break;
case 'nallof':
expectedRecords = unfilteredRecords.filter((record) => {
const values = filter.value.split(',');
return !values.every((value) => record[title]?.includes(value));
});
break;
}
// retrieve filtered records
const response = await request(context.app)
.get(`/api/v1/db/data/noco/${project.id}/${table.id}`)
.set('xc-auth', context.token)
.query({
filterArrJson: JSON.stringify([filter]),
})
.expect(200);
// validate
if (debugMode) {
if (response.body.pageInfo.totalRows !== expectedRecords.length) {
console.log(`Failed for filter: ${JSON.stringify(filter)}`);
console.log(`Expected: ${expectedRecords.length}`);
console.log(`Actual: ${response.body.pageInfo.totalRows}`);
throw new Error('fix me!');
}
response.body.list.forEach((row, index) => {
if (row[title] !== expectedRecords[index][title]) {
console.log(`Failed for filter: ${JSON.stringify(filter)}`);
console.log(`Expected: ${expectedRecords[index][title]}`);
console.log(`Actual: ${row[title]}`);
throw new Error('fix me!');
}
});
} else {
expect(response.body.pageInfo.totalRows).to.equal(expectedRecords.length);
response.body.list.forEach((row, index) => {
expect(row[title] !== expectedRecords[index][title]);
});
}
}
let context;
let project: Project;
let table: Model;
let columns: any[];
let unfilteredRecords: any[] = [];
async function verifyFilters(dataType, columnId, filterList) {
const filter = {
fk_column_id: columnId,
status: 'create',
logical_op: 'and',
comparison_op: '',
value: '',
};
for (let i = 0; i < filterList.length; i++) {
filter.comparison_op = filterList[i].comparison_op;
filter.value = filterList[i].value;
await retrieveRecordsAndValidate(filter, dataType);
}
}
function filterTextBased() {
// prepare data for test cases
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project, {
table_name: 'textBased',
title: 'TextBased',
columns: [
{
column_name: 'Id',
title: 'Id',
uidt: UITypes.ID,
},
{
column_name: 'SingleLineText',
title: 'SingleLineText',
uidt: UITypes.SingleLineText,
},
{
column_name: 'MultiLineText',
title: 'MultiLineText',
uidt: UITypes.LongText,
},
{
column_name: 'Email',
title: 'Email',
uidt: UITypes.Email,
},
{
column_name: 'Phone',
title: 'Phone',
uidt: UITypes.PhoneNumber,
},
{
column_name: 'Url',
title: 'Url',
uidt: UITypes.URL,
},
],
});
columns = await table.getColumns();
let rowAttributes = [];
for (let i = 0; i < 400; i++) {
let row = {
SingleLineText: rowMixedValue(columns[1], i),
MultiLineText: rowMixedValue(columns[2], i),
Email: rowMixedValue(columns[3], i),
Phone: rowMixedValue(columns[4], i),
Url: rowMixedValue(columns[5], i),
};
rowAttributes.push(row);
}
await createBulkRows(context, {
project,
table,
values: rowAttributes,
});
unfilteredRecords = await listRow({ project, table });
// verify length of unfiltered records to be 400
expect(unfilteredRecords.length).to.equal(400);
});
it('Type: Single Line Text', async () => {
let filterList = [
{ comparison_op: 'eq', value: 'Afghanistan' },
{ comparison_op: 'neq', value: 'Afghanistan' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'empty', value: '' },
// { comparison_op: 'notempty', value: '' },
{ comparison_op: 'like', value: 'Au' },
{ comparison_op: 'nlike', value: 'Au' },
];
await verifyFilters('SingleLineText', columns[1].id, filterList);
});
it('Type: Multi Line Text', async () => {
let filterList = [
{ comparison_op: 'eq', value: 'Aberdeen, United Kingdom' },
{ comparison_op: 'neq', value: 'Aberdeen, United Kingdom' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'empty', value: '' },
// { comparison_op: 'notempty', value: '' },
{ comparison_op: 'like', value: 'abad' },
{ comparison_op: 'nlike', value: 'abad' },
];
await verifyFilters('MultiLineText', columns[2].id, filterList);
});
it('Type: Email', async () => {
let filterList = [
{ comparison_op: 'eq', value: 'leota@hotmail.com' },
{ comparison_op: 'neq', value: 'leota@hotmail.com' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'empty', value: '' },
// { comparison_op: 'notempty', value: '' },
{ comparison_op: 'like', value: 'cox.net' },
{ comparison_op: 'nlike', value: 'cox.net' },
];
await verifyFilters('Email', columns[3].id, filterList);
});
it('Type: Phone', async () => {
let filterList = [
{ comparison_op: 'eq', value: '504-621-8927' },
{ comparison_op: 'neq', value: '504-621-8927' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'empty', value: '' },
// { comparison_op: 'notempty', value: '' },
{ comparison_op: 'like', value: '504' },
{ comparison_op: 'nlike', value: '504' },
];
await verifyFilters('Phone', columns[4].id, filterList);
});
it('Type: Url', async () => {
let filterList = [
{ comparison_op: 'eq', value: 'https://www.youtube.com' },
{ comparison_op: 'neq', value: 'https://www.youtube.com' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'empty', value: '' },
// { comparison_op: 'notempty', value: '' },
{ comparison_op: 'like', value: 'e.com' },
{ comparison_op: 'nlike', value: 'e.com' },
];
await verifyFilters('Url', columns[5].id, filterList);
});
}
function filterNumberBased() {
// prepare data for test cases
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project, {
table_name: 'numberBased',
title: 'numberBased',
columns: [
{
column_name: 'Id',
title: 'Id',
uidt: UITypes.ID,
},
{
column_name: 'Number',
title: 'Number',
uidt: UITypes.Number,
},
{
column_name: 'Decimal',
title: 'Decimal',
uidt: UITypes.Decimal,
},
{
column_name: 'Currency',
title: 'Currency',
uidt: UITypes.Currency,
},
{
column_name: 'Percent',
title: 'Percent',
uidt: UITypes.Percent,
},
{
column_name: 'Duration',
title: 'Duration',
uidt: UITypes.Duration,
},
{
column_name: 'Rating',
title: 'Rating',
uidt: UITypes.Rating,
},
],
});
columns = await table.getColumns();
let rowAttributes = [];
for (let i = 0; i < 400; i++) {
let row = {
Number: rowMixedValue(columns[1], i),
Decimal: rowMixedValue(columns[2], i),
Currency: rowMixedValue(columns[3], i),
Percent: rowMixedValue(columns[4], i),
Duration: rowMixedValue(columns[5], i),
Rating: rowMixedValue(columns[6], i),
};
rowAttributes.push(row);
}
await createBulkRows(context, {
project,
table,
values: rowAttributes,
});
unfilteredRecords = await listRow({ project, table });
// verify length of unfiltered records to be 400
expect(unfilteredRecords.length).to.equal(400);
});
it('Type: Number', async () => {
let filterList = [
{ comparison_op: 'eq', value: '33' },
{ comparison_op: 'neq', value: '33' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'gt', value: '44' },
{ comparison_op: 'gte', value: '44' },
{ comparison_op: 'lt', value: '44' },
{ comparison_op: 'lte', value: '44' },
];
await verifyFilters('Number', columns[1].id, filterList);
});
it('Type: Decimal', async () => {
let filterList = [
{ comparison_op: 'eq', value: '33.3' },
{ comparison_op: 'neq', value: '33.3' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'gt', value: '44.26' },
{ comparison_op: 'gte', value: '44.26' },
{ comparison_op: 'lt', value: '44.26' },
{ comparison_op: 'lte', value: '44.26' },
];
await verifyFilters('Decimal', columns[2].id, filterList);
});
it('Type: Currency', async () => {
let filterList = [
{ comparison_op: 'eq', value: '33.3' },
{ comparison_op: 'neq', value: '33.3' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'gt', value: '44.26' },
{ comparison_op: 'gte', value: '44.26' },
{ comparison_op: 'lt', value: '44.26' },
{ comparison_op: 'lte', value: '44.26' },
];
await verifyFilters('Decimal', columns[3].id, filterList);
});
it('Type: Percent', async () => {
let filterList = [
{ comparison_op: 'eq', value: '33' },
{ comparison_op: 'neq', value: '33' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'gt', value: '44' },
{ comparison_op: 'gte', value: '44' },
{ comparison_op: 'lt', value: '44' },
{ comparison_op: 'lte', value: '44' },
];
await verifyFilters('Percent', columns[4].id, filterList);
});
it('Type: Duration', async () => {
let filterList = [
{ comparison_op: 'eq', value: '10' },
{ comparison_op: 'neq', value: '10' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'gt', value: '50' },
{ comparison_op: 'gte', value: '50' },
{ comparison_op: 'lt', value: '50' },
{ comparison_op: 'lte', value: '50' },
];
await verifyFilters('Duration', columns[5].id, filterList);
});
it('Type: Rating', async () => {
let filterList = [
{ comparison_op: 'eq', value: '3' },
{ comparison_op: 'neq', value: '3' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'gt', value: '2' },
{ comparison_op: 'gte', value: '2' },
{ comparison_op: 'lt', value: '2' },
{ comparison_op: 'lte', value: '2' },
];
await verifyFilters('Rating', columns[6].id, filterList);
});
}
function filterSelectBased() {
// prepare data for test cases
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project, {
table_name: 'selectBased',
title: 'selectBased',
columns: [
{
column_name: 'Id',
title: 'Id',
uidt: UITypes.ID,
},
{
column_name: 'SingleSelect',
title: 'SingleSelect',
uidt: UITypes.SingleSelect,
dtxp: "'jan','feb','mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'",
},
{
column_name: 'MultiSelect',
title: 'MultiSelect',
uidt: UITypes.MultiSelect,
dtxp: "'jan','feb','mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'",
},
],
});
columns = await table.getColumns();
let rowAttributes = [];
for (let i = 0; i < 400; i++) {
let row = {
SingleSelect: rowMixedValue(columns[1], i),
MultiSelect: rowMixedValue(columns[2], i),
};
rowAttributes.push(row);
}
await createBulkRows(context, {
project,
table,
values: rowAttributes,
});
unfilteredRecords = await listRow({ project, table });
// verify length of unfiltered records to be 400
expect(unfilteredRecords.length).to.equal(400);
});
it('Type: Single select', async () => {
let filterList = [
{ comparison_op: 'eq', value: 'jan' },
{ comparison_op: 'neq', value: 'jan' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'like', value: 'j' },
{ comparison_op: 'nlike', value: 'j' },
{ comparison_op: 'anyof', value: 'jan,feb,mar' },
{ comparison_op: 'nanyof', value: 'jan,feb,mar' },
];
await verifyFilters('SingleSelect', columns[1].id, filterList);
});
it('Type: Multi select', async () => {
let filterList = [
{ comparison_op: 'eq', value: 'jan,feb,mar' },
{ comparison_op: 'neq', value: 'jan,feb,mar' },
{ comparison_op: 'null', value: '' },
{ comparison_op: 'notnull', value: '' },
{ comparison_op: 'like', value: 'jan' },
{ comparison_op: 'nlike', value: 'jan' },
{ comparison_op: 'anyof', value: 'jan,feb,mar' },
{ comparison_op: 'nanyof', value: 'jan,feb,mar' },
{ comparison_op: 'allof', value: 'jan,feb,mar' },
{ comparison_op: 'nallof', value: 'jan,feb,mar' },
];
await verifyFilters('MultiSelect', columns[2].id, filterList);
});
}
async function applyDateFilter(filterParams, expectedRecords) {
const response = await request(context.app)
.get(`/api/v1/db/data/noco/${project.id}/${table.id}`)
.set('xc-auth', context.token)
.query({
filterArrJson: JSON.stringify([filterParams]),
})
.expect(200);
// expect(response.body.pageInfo.totalRows).to.equal(expectedRecords);
if (response.body.pageInfo.totalRows !== expectedRecords) {
console.log('filterParams', filterParams);
console.log(
'response.body.pageInfo.totalRows',
response.body.pageInfo.totalRows
);
console.log('expectedRecords', expectedRecords);
}
return response.body.list;
}
function filterDateBased() {
// prepare data for test cases
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project, {
table_name: 'dateBased',
title: 'dateBased',
columns: [
{
column_name: 'Id',
title: 'Id',
uidt: UITypes.ID,
},
{
column_name: 'Date',
title: 'Date',
uidt: UITypes.Date,
},
],
});
columns = await table.getColumns();
let rowAttributes = [];
for (let i = 0; i < 800; i++) {
let row = {
Date: rowMixedValue(columns[1], i),
};
rowAttributes.push(row);
}
await createBulkRows(context, {
project,
table,
values: rowAttributes,
});
unfilteredRecords = await listRow({ project, table });
// verify length of unfiltered records to be 800
expect(unfilteredRecords.length).to.equal(800);
});
it('Type: Date ', async () => {
const today = new Date().setHours(0, 0, 0, 0);
const tomorrow = new Date(
new Date().setDate(new Date().getDate() + 1)
).setHours(0, 0, 0, 0);
const yesterday = new Date(
new Date().setDate(new Date().getDate() - 1)
).setHours(0, 0, 0, 0);
const oneWeekAgo = new Date(
new Date().setDate(new Date().getDate() - 7)
).setHours(0, 0, 0, 0);
const oneWeekFromNow = new Date(
new Date().setDate(new Date().getDate() + 7)
).setHours(0, 0, 0, 0);
const oneMonthAgo = new Date(
new Date().setMonth(new Date().getMonth() - 1)
).setHours(0, 0, 0, 0);
const oneMonthFromNow = new Date(
new Date().setMonth(new Date().getMonth() + 1)
).setHours(0, 0, 0, 0);
const daysAgo45 = new Date(
new Date().setDate(new Date().getDate() - 45)
).setHours(0, 0, 0, 0);
const daysFromNow45 = new Date(
new Date().setDate(new Date().getDate() + 45)
).setHours(0, 0, 0, 0);
const thisMonth15 = new Date(new Date().setDate(15)).setHours(0, 0, 0, 0);
const oneYearAgo = new Date(
new Date().setFullYear(new Date().getFullYear() - 1)
).setHours(0, 0, 0, 0);
const oneYearFromNow = new Date(
new Date().setFullYear(new Date().getFullYear() + 1)
).setHours(0, 0, 0, 0);
// records array with time set to 00:00:00; store time in unix epoch
const recordsTimeSetToZero = unfilteredRecords.map((r) => {
const date = new Date(r['Date']);
date.setHours(0, 0, 0, 0);
return date.getTime();
});
const isFilterList = [
{
opSub: 'today',
rowCount: recordsTimeSetToZero.filter((r) => r === today).length,
},
{
opSub: 'tomorrow',
rowCount: recordsTimeSetToZero.filter((r) => r === tomorrow).length,
},
{
opSub: 'yesterday',
rowCount: recordsTimeSetToZero.filter((r) => r === yesterday).length,
},
{
opSub: 'oneWeekAgo',
rowCount: recordsTimeSetToZero.filter((r) => r === oneWeekAgo).length,
},
{
opSub: 'oneWeekFromNow',
rowCount: recordsTimeSetToZero.filter((r) => r === oneWeekFromNow)
.length,
},
{
opSub: 'oneMonthAgo',
rowCount: recordsTimeSetToZero.filter((r) => r === oneMonthAgo).length,
},
{
opSub: 'oneMonthFromNow',
rowCount: recordsTimeSetToZero.filter((r) => r === oneMonthFromNow)
.length,
},
{
opSub: 'daysAgo',
value: 45,
rowCount: recordsTimeSetToZero.filter((r) => r === daysAgo45).length,
},
{
opSub: 'daysFromNow',
value: 45,
rowCount: recordsTimeSetToZero.filter((r) => r === daysFromNow45)
.length,
},
{
opSub: 'exactDate',
value: new Date(thisMonth15).toISOString().split('T')[0],
rowCount: recordsTimeSetToZero.filter((r) => r === thisMonth15).length,
},
];
// "is after" filter list
const isAfterFilterList = [
{
opSub: 'today',
rowCount: recordsTimeSetToZero.filter((r) => r > today).length,
},
{
opSub: 'tomorrow',
rowCount: recordsTimeSetToZero.filter((r) => r > tomorrow).length,
},
{
opSub: 'yesterday',
rowCount: recordsTimeSetToZero.filter((r) => r > yesterday).length,
},
{
opSub: 'oneWeekAgo',
rowCount: recordsTimeSetToZero.filter((r) => r > oneWeekAgo).length,
},
{
opSub: 'oneWeekFromNow',
rowCount: recordsTimeSetToZero.filter((r) => r > oneWeekFromNow).length,
},
{
opSub: 'oneMonthAgo',
rowCount: recordsTimeSetToZero.filter((r) => r > oneMonthAgo).length,
},
{
opSub: 'oneMonthFromNow',
rowCount: recordsTimeSetToZero.filter((r) => r > oneMonthFromNow)
.length,
},
{
opSub: 'daysAgo',
value: 45,
rowCount: recordsTimeSetToZero.filter((r) => r > daysAgo45).length,
},
{
opSub: 'daysFromNow',
value: 45,
rowCount: recordsTimeSetToZero.filter((r) => r > daysFromNow45).length,
},
{
opSub: 'exactDate',
value: new Date().toISOString().split('T')[0],
rowCount: recordsTimeSetToZero.filter((r) => r > today).length,
},
];
// "is within" filter list
const isWithinFilterList = [
{
opSub: 'pastWeek',
rowCount: recordsTimeSetToZero.filter(
(r) => r >= oneWeekAgo && r <= today
).length,
},
{
opSub: 'pastMonth',
rowCount: recordsTimeSetToZero.filter(
(r) => r >= oneMonthAgo && r <= today
).length,
},
{
opSub: 'pastYear',
rowCount: recordsTimeSetToZero.filter(
(r) => r >= oneYearAgo && r <= today
).length,
},
{
opSub: 'nextWeek',
rowCount: recordsTimeSetToZero.filter(
(r) => r >= today && r <= oneWeekFromNow
).length,
},
{
opSub: 'nextMonth',
rowCount: recordsTimeSetToZero.filter(
(r) => r >= today && r <= oneMonthFromNow
).length,
},
{
opSub: 'nextYear',
rowCount: recordsTimeSetToZero.filter(
(r) => r >= today && r <= oneYearFromNow
).length,
},
{
opSub: 'nextNumberOfDays',
value: 45,
rowCount: recordsTimeSetToZero.filter(
(r) => r >= today && r <= daysFromNow45
).length,
},
{
opSub: 'pastNumberOfDays',
value: 45,
rowCount: recordsTimeSetToZero.filter(
(r) => r >= daysAgo45 && r <= today
).length,
},
];
// rest of the filters (without subop type)
const filterList = [
{
opType: 'blank',
rowCount: unfilteredRecords.filter(
(r) => r['Date'] === null || r['Date'] === ''
).length,
},
{
opType: 'notblank',
rowCount: unfilteredRecords.filter(
(r) => r['Date'] !== null && r['Date'] !== ''
).length,
},
];
// is
for (let i = 0; i < isFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'eq',
comparison_sub_op: isFilterList[i].opSub,
value: isFilterList[i].value,
};
await applyDateFilter(filter, isFilterList[i].rowCount);
}
// is not
for (let i = 0; i < isFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'neq',
comparison_sub_op: isFilterList[i].opSub,
value: isFilterList[i].value,
};
await applyDateFilter(filter, 800 - isFilterList[i].rowCount);
}
// is before
for (let i = 0; i < isAfterFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'gt',
comparison_sub_op: isAfterFilterList[i].opSub,
value: isAfterFilterList[i].value,
};
await applyDateFilter(filter, isAfterFilterList[i].rowCount);
}
// is before or on
for (let i = 0; i < isAfterFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'gte',
comparison_sub_op: isAfterFilterList[i].opSub,
value: isAfterFilterList[i].value,
};
await applyDateFilter(filter, isAfterFilterList[i].rowCount + 1);
}
// is after
for (let i = 0; i < isAfterFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'lt',
comparison_sub_op: isAfterFilterList[i].opSub,
value: isAfterFilterList[i].value,
};
await applyDateFilter(filter, 800 - isAfterFilterList[i].rowCount - 1);
}
// is after or on
for (let i = 0; i < isAfterFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'lte',
comparison_sub_op: isAfterFilterList[i].opSub,
value: isAfterFilterList[i].value,
};
await applyDateFilter(filter, 800 - isAfterFilterList[i].rowCount);
}
// is within
for (let i = 0; i < isWithinFilterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: 'isWithin',
comparison_sub_op: isWithinFilterList[i].opSub,
value: isWithinFilterList[i].value,
};
await applyDateFilter(filter, isWithinFilterList[i].rowCount);
}
// rest of the filters (without subop type)
for (let i = 0; i < filterList.length; i++) {
const filter = {
fk_column_id: columns[1].id,
status: 'create',
logical_op: 'and',
comparison_op: filterList[i].opType,
value: '',
};
await applyDateFilter(filter, filterList[i].rowCount);
}
});
}
export default function () {
describe('Filter: Text based', filterTextBased);
describe('Filter: Numerical', filterNumberBased);
describe('Filter: Select based', filterSelectBased);
describe('Filter: Date based', filterDateBased);
}

223
packages/nocodb-nest/tests/unit/rest/tests/org.test.ts

@ -0,0 +1,223 @@
import { expect } from 'chai';
import 'mocha';
import request from 'supertest';
import { OrgUserRoles } from 'nocodb-sdk';
import init from '../../init';
// Test case list in this file
// 1. Get users list
// 2. Invite a new user
// 3. Update user role
// 4. Remove user
// 5. Get token list
// 6. Generate token
// 7. Delete token
// 8. Disable/Enable signup
function authTests() {
let context;
beforeEach(async function () {
context = await init();
});
it('Get users list', async () => {
const response = await request(context.app)
.get('/api/v1/users')
.set('xc-auth', context.token)
.expect(200);
expect(response.body).to.have.keys(['list', 'pageInfo']);
expect(response.body.list).to.have.length(1);
});
it('Invite a new user', async () => {
const response = await request(context.app)
.post('/api/v1/users')
.set('xc-auth', context.token)
.send({ email: 'a@nocodb.com' })
.expect(200);
console.log(response.body);
expect(response.body).to.have.property('invite_token').to.be.a('string');
// todo: verify invite token
});
it('Update user role', async () => {
const email = 'a@nocodb.com';
// invite a user
await request(context.app)
.post('/api/v1/users')
.set('xc-auth', context.token)
.send({ email })
.expect(200);
const response = await request(context.app)
.get('/api/v1/users')
.set('xc-auth', context.token)
.expect(200);
expect(response.body.list).to.have.length(2);
const user = response.body.list.find((u) => u.email === email);
expect(user).to.have.property('roles').to.be.equal(OrgUserRoles.VIEWER);
await request(context.app)
.patch('/api/v1/users/' + user.id)
.set('xc-auth', context.token)
.send({ roles: OrgUserRoles.CREATOR })
.expect(200);
const response2 = await request(context.app)
.get('/api/v1/users')
.set('xc-auth', context.token)
.expect(200);
expect(response2.body.list).to.have.length(2);
const user2 = response2.body.list.find((u) => u.email === email);
expect(user2).to.have.property('roles').to.be.equal(OrgUserRoles.CREATOR);
});
it('Remove user', async () => {
const email = 'a@nocodb.com';
// invite a user
await request(context.app)
.post('/api/v1/users')
.set('xc-auth', context.token)
.send({ email })
.expect(200);
const response = await request(context.app)
.get('/api/v1/users')
.set('xc-auth', context.token)
.expect(200);
expect(response.body.list).to.have.length(2);
const user = response.body.list.find((u) => u.email === email);
expect(user).to.have.property('roles').to.be.equal(OrgUserRoles.VIEWER);
await request(context.app)
.delete('/api/v1/users/' + user.id)
.set('xc-auth', context.token)
.expect(200);
const response2 = await request(context.app)
.get('/api/v1/users')
.set('xc-auth', context.token)
.expect(200);
expect(response2.body.list).to.have.length(1);
});
it('Get token list', async () => {
const response = await request(context.app)
.get('/api/v1/tokens')
.set('xc-auth', context.token)
.expect(200);
expect(response.body).to.have.keys(['list', 'pageInfo']);
expect(response.body.list).to.have.length(0);
});
it('Generate token', async () => {
const r = await request(context.app)
.post('/api/v1/tokens')
.set('xc-auth', context.token)
.send({ description: 'test' })
.expect(200);
const response = await request(context.app)
.get('/api/v1/tokens')
.set('xc-auth', context.token)
.expect(200);
expect(response.body).to.have.keys(['list', 'pageInfo']);
expect(response.body.list).to.have.length(1);
expect(response.body.list[0]).to.have.property('token').to.be.a('string');
expect(response.body.list[0])
.to.have.property('description')
.to.be.a('string')
.to.be.eq('test');
});
it('Delete token', async () => {
const r = await request(context.app)
.post('/api/v1/tokens')
.set('xc-auth', context.token)
.send({ description: 'test' })
.expect(200);
let response = await request(context.app)
.get('/api/v1/tokens')
.set('xc-auth', context.token)
.expect(200);
expect(response.body).to.have.keys(['list', 'pageInfo']);
expect(response.body.list).to.have.length(1);
await request(context.app)
.delete('/api/v1/tokens/' + r.body.token)
.set('xc-auth', context.token)
.expect(200);
response = await request(context.app)
.get('/api/v1/tokens')
.set('xc-auth', context.token)
.expect(200);
expect(response.body).to.have.keys(['list', 'pageInfo']);
expect(response.body.list).to.have.length(0);
});
it.only('Disable/Enable signup', async () => {
const args = {
email: 'dummyuser@example.com',
password: 'A1234abh2@dsad',
};
await request(context.app)
.post('/api/v1/app-settings')
.set('xc-auth', context.token)
.send({ invite_only_signup: true })
.expect(200);
const failedRes = await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args)
.expect(400);
expect(failedRes.body)
.to.be.an('object')
.to.have.property('msg')
.to.be.equal('Not allowed to signup, contact super admin.');
await request(context.app)
.post('/api/v1/app-settings')
.set('xc-auth', context.token)
.send({ invite_only_signup: false })
.expect(200);
const successRes = await request(context.app)
.post('/api/v1/auth/user/signup')
.send(args)
.expect(200);
expect(successRes.body)
.to.be.an('object')
.to.have.property('token')
.to.be.a('string');
const userMeRes = await request(context.app)
.get('/api/v1/auth/user/me')
.set('xc-auth', successRes.body.token)
.expect(200);
expect(userMeRes.body)
.to.be.an('object')
.to.have.property('email')
.to.be.eq(args.email);
});
}
export default function () {}

360
packages/nocodb-nest/tests/unit/rest/tests/project.test.ts

@ -0,0 +1,360 @@
import 'mocha';
import request from 'supertest';
import { Project } from '../../../../src/models'
import { createTable } from '../../factory/table';
import init from '../../init';
import { createProject, createSharedBase } from '../../factory/project';
import { beforeEach } from 'mocha';
import { Exception } from 'handlebars';
import { expect } from 'chai';
// Test case list
// 1. Get project info
// 2. UI ACL
// 3. Create project
// 4. Create project with existing title
// 5. Update project
// 6. Update project with existing title
// 7. Create project shared base
// 8. Created project shared base should have only editor or viewer role
// 9. Updated project shared base should have only editor or viewer role
// 10. Updated project shared base
// 11. Get project shared base
// 12. Delete project shared base
// 13. Meta diff sync
// 14. Meta diff sync
// 15. Meta diff sync
// 16. Get all projects meta
function projectTest() {
let context;
let project;
beforeEach(async function () {
context = await init();
project = await createProject(context);
});
it('Get project info', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/info`)
.set('xc-auth', context.token)
.send({})
.expect(200);
});
// todo: Test by creating models under project and check if the UCL is working
it('UI ACL', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/visibility-rules`)
.set('xc-auth', context.token)
.send({})
.expect(200);
});
// todo: Test creating visibility set
it('List projects', async () => {
const response = await request(context.app)
.get('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send({})
.expect(200);
if (response.body.list.length !== 1)
new Error('Should list only 1 project');
if (!response.body.pageInfo) new Error('Should have pagination info');
});
it('Create project', async () => {
const response = await request(context.app)
.post('/api/v1/db/meta/projects/')
.set('xc-auth', context.token)
.send({
title: 'Title1',
})
.expect(200);
const newProject = await Project.getByTitleOrId(response.body.id);
if (!newProject) return new Error('Project not created');
});
it('Create projects with existing title', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/`)
.set('xc-auth', context.token)
.send({
title: project.title,
})
.expect(400);
});
// todo: fix passport user role popluation bug
// it('Delete project', async async () => {
// const toBeDeletedProject = await createProject(app, token, {
// title: 'deletedTitle',
// });
// await request(app)
// .delete('/api/v1/db/meta/projects/${toBeDeletedProject.id}')
// .set('xc-auth', token)
// .send({
// title: 'Title1',
// })
// .expect(200, async (err) => {
// // console.log(res);
//
// const deletedProject = await Project.getByTitleOrId(
// toBeDeletedProject.id
// );
// if (deletedProject) return new Error('Project not delete');
// new Error();
// });
// });
it('Read project', async () => {
const response = await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}`)
.set('xc-auth', context.token)
.send()
.expect(200);
if (response.body.id !== project.id)
return new Error('Got the wrong project');
});
it('Update projects', async () => {
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}`)
.set('xc-auth', context.token)
.send({
title: 'NewTitle',
})
.expect(200);
const newProject = await Project.getByTitleOrId(project.id);
if (newProject.title !== 'NewTitle') {
return new Error('Project not updated');
}
});
it('Update projects with existing title', async function () {
const newProject = await createProject(context, {
title: 'NewTitle1',
});
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}`)
.set('xc-auth', context.token)
.send({
title: newProject.title,
})
.expect(400);
});
it('Create project shared base', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'viewer',
password: 'password123',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (
!updatedProject.uuid ||
updatedProject.roles !== 'viewer' ||
updatedProject.password !== 'password123'
) {
return new Error('Shared base not configured properly');
}
});
it('Created project shared base should have only editor or viewer role', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'commenter',
password: 'password123',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.roles === 'commenter') {
return new Error('Shared base not configured properly');
}
});
it('Updated project shared base should have only editor or viewer role', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'commenter',
password: 'password123',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.roles === 'commenter') {
throw new Exception('Shared base not updated properly');
}
});
it('Updated project shared base', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.patch(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send({
roles: 'editor',
password: 'password123',
})
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.roles !== 'editor') {
throw new Exception('Shared base not updated properly');
}
});
it('Get project shared base', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send()
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (!updatedProject.uuid) {
throw new Exception('Shared base not created');
}
});
it('Delete project shared base', async () => {
await createSharedBase(context.app, context.token, project);
await request(context.app)
.delete(`/api/v1/db/meta/projects/${project.id}/shared`)
.set('xc-auth', context.token)
.send()
.expect(200);
const updatedProject = await Project.getByTitleOrId(project.id);
if (updatedProject.uuid) {
throw new Exception('Shared base not deleted');
}
});
// todo: Do compare api test
it('Meta diff sync', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/meta-diff`)
.set('xc-auth', context.token)
.send()
.expect(200);
});
it('Meta diff sync', async () => {
await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/meta-diff`)
.set('xc-auth', context.token)
.send()
.expect(200);
});
// todo: improve test. Check whether the all the actions are present in the response and correct as well
it('Meta diff sync', async () => {
await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/audits`)
.set('xc-auth', context.token)
.send()
.expect(200);
});
it('Get all projects meta', async () => {
await createTable(context, project, {
table_name: 'table1',
title: 'table1',
});
await createTable(context, project, {
table_name: 'table2',
title: 'table2',
});
await createTable(context, project, {
table_name: 'table3',
title: 'table3',
});
await request(context.app)
.get(`/api/v1/aggregated-meta-info`)
.set('xc-auth', context.token)
.send({})
.expect(200)
.then((res) => {
expect(res.body).to.have.all.keys(
'userCount',
'sharedBaseCount',
'projectCount',
'projects'
);
expect(res.body).to.have.property('projectCount').to.eq(1);
expect(res.body).to.have.property('projects').to.be.an('array');
expect(res.body.projects[0].tableCount.table).to.be.eq(3);
expect(res.body)
.to.have.nested.property('projects[0].tableCount.table')
.to.be.a('number');
expect(res.body)
.to.have.nested.property('projects[0].tableCount.view')
.to.be.a('number');
expect(res.body)
.to.have.nested.property('projects[0].viewCount')
.to.be.an('object')
.have.keys(
'formCount',
'gridCount',
'galleryCount',
'kanbanCount',
'total',
'sharedFormCount',
'sharedGridCount',
'sharedGalleryCount',
'sharedKanbanCount',
'sharedTotal',
'sharedLockedCount'
);
expect(res.body.projects[0]).have.keys(
'external',
'webhookCount',
'filterCount',
'sortCount',
'userCount',
'rowCount',
'tableCount',
'viewCount'
);
expect(res.body)
.to.have.nested.property('projects[0].rowCount')
.to.be.an('array');
expect(res.body)
.to.have.nested.property('projects[0].external')
.to.be.an('boolean');
});
});
}
export default function () {
describe('Project', projectTest);
}

264
packages/nocodb-nest/tests/unit/rest/tests/table.test.ts

@ -0,0 +1,264 @@
import 'mocha';
import request from 'supertest';
import init from '../../init';
import { createTable, getAllTables } from '../../factory/table';
import { createProject } from '../../factory/project';
import { defaultColumns } from '../../factory/column';
import Model from '../../../../src/models/Model';
import { expect } from 'chai';
// Test case list
// 1. Get table list
// 2. Create table
// 3. Create table with same table name
// 4. Create table with same title
// 5. Create table with title length more than the limit
// 6. Create table with title having leading white space
// 7. Update table
// 8. Delete table
// 9. Get table
// 10. Reorder table
function tableTest() {
let context;
let project;
let table;
beforeEach(async function () {
context = await init();
project = await createProject(context);
table = await createTable(context, project);
});
it('Get table list', async function () {
const response = await request(context.app)
.get(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({})
.expect(200);
expect(response.body.list).to.be.an('array').not.empty;
});
it('Create table', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'table2',
title: 'new_title_2',
columns: defaultColumns(context),
})
.expect(200);
const tables = await getAllTables({ project });
if (tables.length !== 2) {
return new Error('Tables is not be created');
}
if (response.body.columns.length !== defaultColumns(context)) {
return new Error('Columns not saved properly');
}
if (
!(
response.body.table_name.startsWith(project.prefix) &&
response.body.table_name.endsWith('table2')
)
) {
return new Error('table name not configured properly');
}
});
it('Create table with no table name', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: undefined,
title: 'new_title',
columns: defaultColumns(context),
})
.expect(400);
if (
!response.text.includes(
'Missing table name `table_name` property in request body'
)
) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
console.log(tables);
return new Error(
`Tables should not be created, tables.length:${tables.length}`
);
}
});
it('Create table with same table name', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: table.table_name,
title: 'New_title',
columns: defaultColumns(context),
})
.expect(400);
if (!response.text.includes('Duplicate table name')) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Create table with same title', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'New_table_name',
title: table.title,
columns: defaultColumns(context),
})
.expect(400);
if (!response.text.includes('Duplicate table alias')) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Create table with title length more than the limit', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'a'.repeat(256),
title: 'new_title',
columns: defaultColumns(context),
})
.expect(400);
if (!response.text.includes('Table name exceeds ')) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Create table with title having leading white space', async function () {
const response = await request(context.app)
.post(`/api/v1/db/meta/projects/${project.id}/tables`)
.set('xc-auth', context.token)
.send({
table_name: 'table_name_with_whitespace ',
title: 'new_title',
columns: defaultColumns(context),
})
.expect(400);
if (
!response.text.includes(
'Leading or trailing whitespace not allowed in table names'
)
) {
console.error(response.text);
return new Error('Wrong api response');
}
const tables = await getAllTables({ project });
if (tables.length !== 1) {
return new Error('Tables should not be created');
}
});
it('Update table', async function () {
const response = await request(context.app)
.patch(`/api/v1/db/meta/tables/${table.id}`)
.set('xc-auth', context.token)
.send({
project_id: project.id,
table_name: 'new_title',
})
.expect(200);
const updatedTable = await Model.get(table.id);
if (!updatedTable.table_name.endsWith('new_title')) {
return new Error('Table was not updated');
}
});
it('Delete table', async function () {
const response = await request(context.app)
.delete(`/api/v1/db/meta/tables/${table.id}`)
.set('xc-auth', context.token)
.send({})
.expect(200);
const tables = await getAllTables({ project });
if (tables.length !== 0) {
return new Error('Table is not deleted');
}
});
// todo: Check the condtion where the table being deleted is being refered by multiple tables
// todo: Check the if views are also deleted
it('Get table', async function () {
const response = await request(context.app)
.get(`/api/v1/db/meta/tables/${table.id}`)
.set('xc-auth', context.token)
.send({})
.expect(200);
if (response.body.id !== table.id) new Error('Wrong table');
});
// todo: flaky test, order condition is sometimes not met
it('Reorder table', async function () {
const newOrder = table.order === 0 ? 1 : 0;
const response = await request(context.app)
.post(`/api/v1/db/meta/tables/${table.id}/reorder`)
.set('xc-auth', context.token)
.send({
order: newOrder,
})
.expect(200);
// .expect(200, async (err) => {
// if (err) return new Error(err);
// const updatedTable = await Model.get(table.id);
// console.log(Number(updatedTable.order), newOrder);
// if (Number(updatedTable.order) !== newOrder) {
// return new Error('Reordering failed');
// }
// new Error();
// });
});
}
export default async function () {
describe('Table', tableTest);
}

2412
packages/nocodb-nest/tests/unit/rest/tests/tableRow.test.ts

File diff suppressed because it is too large Load Diff

1521
packages/nocodb-nest/tests/unit/rest/tests/viewRow.test.ts

File diff suppressed because it is too large Load Diff

73
packages/nocodb-nest/tests/unit/tsconfig.json

@ -0,0 +1,73 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"skipLibCheck": true,
"composite": true,
"target": "es2017",
"outDir": "build/main",
"rootDir": "src",
"moduleResolution": "node",
"module": "commonjs",
"declaration": true,
"inlineSourceMap": true,
"esModuleInterop": true
/* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
"allowJs": false,
// "strict": true /* Enable all strict type-checking options. */,
/* Strict Type-Checking Options */
// "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
// "strictNullChecks": true /* Enable strict null checks. */,
// "strictFunctionTypes": true /* Enable strict checking of function types. */,
// "strictPropertyInitialization": true /* Enable strict checking of property initialization in classes. */,
// "noImplicitThis": true /* Raise error on 'this' expressions with an implied 'any' type. */,
// "alwaysStrict": true /* Parse in strict mode and emit "use strict" for each source file. */,
"resolveJsonModule": true,
/* Additional Checks */
"noUnusedLocals": false
/* Report errors on unused locals. */,
"noUnusedParameters": false
/* Report errors on unused parameters. */,
"noImplicitReturns": false
/* Report error when not all code paths in function return a value. */,
"noFallthroughCasesInSwitch": false
/* Report errors for fallthrough cases in switch statement. */,
/* Debugging Options */
"traceResolution": false
/* Report module resolution log messages. */,
"listEmittedFiles": false
/* Print names of generated files part of the compilation. */,
"listFiles": false
/* Print names of files part of the compilation. */,
"pretty": true
/* Stylize errors and messages using color and context. */,
/* Experimental Options */
// "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */,
// "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */,
"lib": [
"es2017"
],
"types": [
"mocha", "node"
],
"typeRoots": [
"node_modules/@types",
"src/types"
]
},
"parserOptions": {
"sourceType": "module",
"tsconfigRootDir": "./",
"project": "./tsconfig.json"
},
"include": [
"./tests/**/**/**.ts",
"./tests/**/**.ts"
// "**/*.ts",
// "**/*.json"
],
"exclude": [
],
"compileOnSave": false
}
Loading…
Cancel
Save