mirror of https://github.com/nocodb/nocodb
Pranav C
1 year ago
60 changed files with 1273057 additions and 1 deletions
@ -0,0 +1,16 @@
|
||||
import { Global, Module } from '@nestjs/common' |
||||
import { Connection } from '../../connection/connection' |
||||
import { MetaService } from '../../meta/meta.service' |
||||
|
||||
@Global() |
||||
@Module({ |
||||
providers:[ |
||||
Connection, |
||||
MetaService |
||||
], |
||||
exports: [ |
||||
Connection, |
||||
MetaService |
||||
] |
||||
}) |
||||
export class GlobalModule {} |
@ -0,0 +1,14 @@
|
||||
apt-get update && apt-get install -y wget |
||||
|
||||
|
||||
wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ |
||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ |
||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz |
||||
|
||||
echo "waiting for MSSQL........... " |
||||
|
||||
|
||||
dockerize -wait http://$DOCKER_DB_HOST:80 -wait-retry-interval 5s -timeout 20m |
||||
|
||||
echo "MSSQL is UP >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" |
||||
|
@ -0,0 +1,23 @@
|
||||
## config.json |
||||
{ |
||||
"srcProject": "sample", |
||||
"dstProject": "sample-copy", |
||||
"baseURL": "http://localhost:8080", |
||||
"xc-auth": "Copy Auth Token" |
||||
} |
||||
- baseURL & xc-auth are common configurations for both import & export |
||||
|
||||
## Export |
||||
- `srcProject`: specify source project name to be exported. |
||||
- Export JSON file will be created as `srcProject.json` |
||||
- execute |
||||
`cd packages/nocodb/tests/export-import` |
||||
`node exportSchema.js` |
||||
|
||||
## Import |
||||
- `srcProject`: specify JSON file name to be imported (sans .JSON suffix) |
||||
- `dstProject`: new project name to be imported as |
||||
- Data will also be imported if `srcProject` exists in NocoDB. Note that, data import isn't via exported JSON |
||||
- execute |
||||
`cd packages/nocodb/tests/export-import` |
||||
`node importSchema.js` |
@ -0,0 +1,7 @@
|
||||
{ |
||||
"srcProject": "sample", |
||||
"dstProject": "sample-copy", |
||||
"excludeDt": true, |
||||
"baseURL": "http://localhost:8080", |
||||
"xc-auth": "Copy Auth Token" |
||||
} |
@ -0,0 +1,276 @@
|
||||
const Api = require('nocodb-sdk').Api; |
||||
const { UITypes } = require('nocodb-sdk'); |
||||
const jsonfile = require('jsonfile'); |
||||
|
||||
const GRID = 3, GALLERY = 2, FORM = 1; |
||||
|
||||
let ncMap = { /* id: name <string> */ }; |
||||
let tblSchema = []; |
||||
let api = {}; |
||||
let viewStore = { columns: {}, sort: {}, filter: {} }; |
||||
|
||||
let inputConfig = jsonfile.readFileSync(`config.json`) |
||||
let ncConfig = { |
||||
projectName: inputConfig.srcProject, |
||||
baseURL: inputConfig.baseURL, |
||||
headers: { |
||||
'xc-auth': `${inputConfig["xc-auth"]}` |
||||
} |
||||
}; |
||||
|
||||
|
||||
// helper routines
|
||||
// remove objects containing 0/ false/ null
|
||||
// fixme: how to handle when cdf (default value) is configured as 0/ null/ false
|
||||
function removeEmpty(obj) { |
||||
return Object.fromEntries( |
||||
Object.entries(obj) |
||||
.filter(([_, v]) => v != null && v != 0 && v != false) |
||||
.map(([k, v]) => [k, v === Object(v) ? removeEmpty(v) : v]) |
||||
); |
||||
} |
||||
|
||||
|
||||
function addColumnSpecificData(c) { |
||||
// pick required fields to proceed further
|
||||
let col; |
||||
if(inputConfig.excludeDt) { |
||||
col = removeEmpty( |
||||
(({ id, title, column_name, uidt, pk, pv, rqd, dtxp, system, ai }) => ({ |
||||
id, title, column_name, uidt, pk, pv, rqd, dtxp, system, ai |
||||
}))(c) |
||||
); |
||||
} else { |
||||
col = removeEmpty( |
||||
(({ id, title, column_name, uidt, dt, pk, pv, rqd, dtxp, system, ai }) => ({ |
||||
id, title, column_name, uidt, dt, pk, pv, rqd, dtxp, system, ai |
||||
}))(c) |
||||
); |
||||
} |
||||
|
||||
switch (c.uidt) { |
||||
case UITypes.Formula: |
||||
col.formula = c.colOptions.formula; |
||||
col.formula_raw = c.colOptions.formula_raw; |
||||
break; |
||||
case UITypes.LinkToAnotherRecord: |
||||
col[`colOptions`] = { |
||||
fk_model_id: c.fk_model_id, |
||||
fk_related_model_id: c.colOptions.fk_related_model_id, |
||||
fk_child_column_id: c.colOptions.fk_child_column_id, |
||||
fk_parent_column_id: c.colOptions.fk_parent_column_id, |
||||
type: c.colOptions.type |
||||
}; |
||||
break; |
||||
case UITypes.Lookup: |
||||
col[`colOptions`] = { |
||||
fk_model_id: c.fk_model_id, |
||||
fk_relation_column_id: c.colOptions.fk_relation_column_id, |
||||
fk_lookup_column_id: c.colOptions.fk_lookup_column_id |
||||
}; |
||||
break; |
||||
case UITypes.Rollup: |
||||
col[`colOptions`] = { |
||||
fk_model_id: c.fk_model_id, |
||||
fk_relation_column_id: c.colOptions.fk_relation_column_id, |
||||
fk_rollup_column_id: c.colOptions.fk_rollup_column_id, |
||||
rollup_function: c.colOptions.rollup_function |
||||
}; |
||||
break; |
||||
} |
||||
|
||||
return col; |
||||
} |
||||
|
||||
function addViewDetails(v) { |
||||
// pick required fields to proceed further
|
||||
let view = (({ id, title, type, show_system_fields, lock_type, order }) => ({ |
||||
id, |
||||
title, |
||||
type, |
||||
show_system_fields, |
||||
lock_type, |
||||
order |
||||
}))(v); |
||||
|
||||
// form view
|
||||
if (v.type === FORM) { |
||||
view.property = (({ |
||||
heading, |
||||
subheading, |
||||
success_msg, |
||||
redirect_after_secs, |
||||
email, |
||||
submit_another_form, |
||||
show_blank_form |
||||
}) => ({ |
||||
heading, |
||||
subheading, |
||||
success_msg, |
||||
redirect_after_secs, |
||||
email, |
||||
submit_another_form, |
||||
show_blank_form |
||||
}))(v.view); |
||||
} |
||||
|
||||
// gallery view
|
||||
else if (v.type === GALLERY) { |
||||
view.property = { |
||||
fk_cover_image_col_id: ncMap[v.view.fk_cover_image_col_id] |
||||
}; |
||||
} |
||||
|
||||
// gallery view doesn't share column information in api yet
|
||||
if (v.type !== GALLERY) { |
||||
if (v.type === GRID) |
||||
view.columns = viewStore.columns[v.id].map(a => |
||||
(({ id, width, order, show }) => ({ id, width, order, show }))(a) |
||||
); |
||||
if (v.type === FORM) |
||||
view.columns = viewStore.columns[v.id].map(a => |
||||
(({ id, order, show, label, help, description, required }) => ({ |
||||
id, |
||||
order, |
||||
show, |
||||
label, |
||||
help, |
||||
description, |
||||
required |
||||
}))(a) |
||||
); |
||||
|
||||
for (let i = 0; i < view.columns?.length; i++) |
||||
view.columns[i].title = ncMap[viewStore.columns[v.id][i].id]; |
||||
|
||||
// skip hm & mm columns
|
||||
view.columns = view.columns |
||||
?.filter(a => a.title?.includes('_nc_m2m_') === false) |
||||
.filter(a => a.title?.includes('nc_') === false); |
||||
} |
||||
|
||||
// filter & sort configurations
|
||||
if (v.type !== FORM) { |
||||
view.sort = viewStore.sort[v.id].map(a => |
||||
(({ fk_column_id, direction, order }) => ({ |
||||
fk_column_id, |
||||
direction, |
||||
order |
||||
}))(a) |
||||
); |
||||
view.filter = viewStore.filter[v.id].map(a => |
||||
(({ fk_column_id, logical_op, comparison_op, value, order }) => ({ |
||||
fk_column_id, |
||||
logical_op, |
||||
comparison_op, |
||||
value, |
||||
order |
||||
}))(a) |
||||
); |
||||
} |
||||
return view; |
||||
} |
||||
|
||||
// view data stored as is for quick access
|
||||
async function storeViewDetails(tableId) { |
||||
// read view data for each table
|
||||
let viewList = await api.dbView.list(tableId); |
||||
for (let j = 0; j < viewList.list.length; j++) { |
||||
let v = viewList.list[j]; |
||||
let viewDetails = []; |
||||
|
||||
// invoke view specific read to populate columns information
|
||||
if (v.type === FORM) viewDetails = (await api.dbView.formRead(v.id)).columns; |
||||
else if (v.type === GALLERY) viewDetails = await api.dbView.galleryRead(v.id); |
||||
else if (v.type === GRID) viewDetails = await api.dbView.gridColumnsList(v.id); |
||||
viewStore.columns[v.id] = viewDetails; |
||||
|
||||
// populate sort information
|
||||
let vSort = await api.dbTableSort.list(v.id); |
||||
viewStore.sort[v.id] = vSort.sorts.list; |
||||
|
||||
let vFilter = await api.dbTableFilter.read(v.id); |
||||
viewStore.filter[v.id] = vFilter; |
||||
} |
||||
} |
||||
|
||||
// mapping table for quick information access
|
||||
// store maps for tableId, columnId, viewColumnId & viewId to their names
|
||||
async function generateMapTbl(pId) { |
||||
const tblList = await api.dbTable.list(pId); |
||||
|
||||
for (let i = 0; i < tblList.list.length; i++) { |
||||
let tblId = tblList.list[i].id; |
||||
let tbl = await api.dbTable.read(tblId); |
||||
|
||||
// table ID <> name
|
||||
ncMap[tblId] = tbl.title; |
||||
|
||||
// column ID <> name
|
||||
tbl.columns.map(x => (ncMap[x.id] = x.title)); |
||||
|
||||
// view ID <> name
|
||||
tbl.views.map(x => (ncMap[x.id] = x.tn)); |
||||
|
||||
for (let i = 0; i < tbl.views.length; i++) { |
||||
let x = tbl.views[i]; |
||||
let viewColumns = []; |
||||
if (x.type === FORM) viewColumns = (await api.dbView.formRead(x.id)).columns; |
||||
else if (x.type === GALLERY) |
||||
viewColumns = (await api.dbView.galleryRead(x.id)).columns; |
||||
else if (x.type === GRID) viewColumns = await api.dbView.gridColumnsList(x.id); |
||||
|
||||
// view column ID <> name
|
||||
viewColumns?.map(a => (ncMap[a.id] = ncMap[a.fk_column_id])); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// main
|
||||
//
|
||||
async function exportSchema() { |
||||
api = new Api(ncConfig); |
||||
|
||||
// fetch project details (id et.al)
|
||||
const x = await api.project.list(); |
||||
const p = x.list.find(a => a.title === ncConfig.projectName); |
||||
|
||||
await generateMapTbl(p.id); |
||||
|
||||
// read project
|
||||
const tblList = await api.dbTable.list(p.id); |
||||
|
||||
// for each table
|
||||
for (let i = 0; i < tblList.list.length; i++) { |
||||
let tblId = tblList.list[i].id; |
||||
await storeViewDetails(tblId); |
||||
|
||||
let tbl = await api.dbTable.read(tblId); |
||||
|
||||
// prepare schema
|
||||
let tSchema = { |
||||
id: tbl.id, |
||||
title: tbl.title, |
||||
table_name: tbl?.table_name, |
||||
columns: [...tbl.columns.map(c => addColumnSpecificData(c))] |
||||
.filter(a => a.title.includes('_nc_m2m_') === false) // mm
|
||||
.filter(a => a.title.includes(p.prefix) === false) // hm
|
||||
.filter( |
||||
a => !(a?.system === 1 && a.uidt === UITypes.LinkToAnotherRecord) |
||||
), |
||||
views: [...tbl.views.map(v => addViewDetails(v))] |
||||
}; |
||||
tblSchema.push(tSchema); |
||||
} |
||||
} |
||||
|
||||
(async () => { |
||||
await exportSchema(); |
||||
jsonfile.writeFileSync( |
||||
`${ncConfig.projectName.replace(/ /g, '_')}.json`, |
||||
tblSchema, |
||||
{ spaces: 2 } |
||||
); |
||||
})().catch(e => { |
||||
console.log(e); |
||||
}); |
@ -0,0 +1,515 @@
|
||||
// tbd
|
||||
// - formula dependency list
|
||||
// - nested lookup/ rollup
|
||||
|
||||
const Api = require('nocodb-sdk').Api; |
||||
const { UITypes } = require('nocodb-sdk'); |
||||
const jsonfile = require('jsonfile'); |
||||
|
||||
let inputConfig = jsonfile.readFileSync(`config.json`) |
||||
let ncConfig = { |
||||
srcProject: inputConfig.srcProject, |
||||
projectName: inputConfig.dstProject, |
||||
baseURL: inputConfig.baseURL, |
||||
headers: { |
||||
'xc-auth': `${inputConfig["xc-auth"]}` |
||||
} |
||||
}; |
||||
let ncIn = jsonfile.readFileSync(`${ncConfig.srcProject}.json`); |
||||
|
||||
let api = {}; |
||||
let ncProject = {}; |
||||
let link = []; |
||||
let lookup = []; |
||||
let rollup = []; |
||||
let formula = []; |
||||
|
||||
let rootLinks = []; |
||||
|
||||
// maps v1 table ID, v2 table ID & table title to table schema
|
||||
let ncTables = {}; |
||||
|
||||
|
||||
async function createBaseTables() { |
||||
console.log(`createBaseTables`); |
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let reducedColumnSet = tblSchema.columns.filter( |
||||
a => |
||||
a.uidt !== UITypes.LinkToAnotherRecord && |
||||
a.uidt !== UITypes.Lookup && |
||||
a.uidt !== UITypes.Rollup && |
||||
a.uidt !== UITypes.Formula |
||||
); |
||||
link.push( |
||||
...tblSchema.columns.filter(a => a.uidt === UITypes.LinkToAnotherRecord) |
||||
); |
||||
lookup.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Lookup)); |
||||
rollup.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Rollup)); |
||||
formula.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Formula)); |
||||
formula.map(a => (a['table_id'] = tblSchema.id)); |
||||
|
||||
let tbl = await api.dbTable.create(ncProject.id, { |
||||
title: tblSchema.title, |
||||
table_name: tblSchema.title, |
||||
columns: reducedColumnSet.map(({ id, ...rest }) => ({ ...rest })) |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[tblSchema.id] = tbl; |
||||
} |
||||
} |
||||
|
||||
let linksCreated = []; |
||||
function isLinkCreated(pId, cId) { |
||||
let idx = linksCreated.findIndex(a => a.cId === pId && a.pId === cId); |
||||
if (idx === -1) { |
||||
linksCreated.push({ pId: pId, cId: cId }); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
// retrieve nc-view column ID from corresponding nc-column ID
|
||||
async function nc_getViewColumnId(viewId, viewType, ncColumnId) { |
||||
// retrieve view Info
|
||||
let viewDetails; |
||||
|
||||
if (viewType === 'form') |
||||
viewDetails = (await api.dbView.formRead(viewId)).columns; |
||||
else if (viewType === 'gallery') |
||||
viewDetails = (await api.dbView.galleryRead(viewId)).columns; |
||||
else viewDetails = await api.dbView.gridColumnsList(viewId); |
||||
|
||||
return viewDetails.find(x => x.fk_column_id === ncColumnId)?.id; |
||||
} |
||||
|
||||
async function createFormula() { |
||||
for (let i = 0; i < formula.length; i++) { |
||||
let tbl = await api.dbTableColumn.create(ncTables[formula[i].table_id].id, { |
||||
uidt: UITypes.Formula, |
||||
title: formula[i].title, |
||||
formula_raw: formula[i].formula_raw |
||||
}); |
||||
} |
||||
} |
||||
|
||||
async function createLinks() { |
||||
console.log(`createLinks`); |
||||
|
||||
for (let i = 0; i < link.length; i++) { |
||||
if ( |
||||
(link[i].colOptions.type === 'mm' && |
||||
false === |
||||
isLinkCreated( |
||||
link[i].colOptions.fk_parent_column_id, |
||||
link[i].colOptions.fk_child_column_id |
||||
)) || |
||||
link[i].colOptions.type === 'hm' |
||||
) { |
||||
let srcTbl = ncTables[link[i].colOptions.fk_model_id]; |
||||
let dstTbl = ncTables[link[i].colOptions.fk_related_model_id]; |
||||
|
||||
// create link
|
||||
let tbl = await api.dbTableColumn.create(srcTbl.id, { |
||||
uidt: UITypes.LinkToAnotherRecord, |
||||
title: link[i].title, |
||||
parentId: srcTbl.id, |
||||
childId: dstTbl.id, |
||||
type: link[i].colOptions.type |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[link[i].colOptions.fk_model_id] = tbl; |
||||
|
||||
// for data-link procedure later
|
||||
rootLinks.push({ linkColumn: link[i], linkSrcTbl: srcTbl }); |
||||
|
||||
// symmetry field update
|
||||
//
|
||||
let v2ColSchema = tbl.columns.find(x => x.title === link[i].title); |
||||
// read related table again after link is created
|
||||
dstTbl = await api.dbTable.read(dstTbl.id); |
||||
let v2SymmetricColumn = |
||||
link[i].colOptions.type === 'mm' |
||||
? dstTbl.columns.find( |
||||
x => |
||||
x.uidt === UITypes.LinkToAnotherRecord && |
||||
x?.colOptions.fk_parent_column_id === |
||||
v2ColSchema.colOptions.fk_child_column_id && |
||||
x?.colOptions.fk_child_column_id === |
||||
v2ColSchema.colOptions.fk_parent_column_id |
||||
) |
||||
: dstTbl.columns.find( |
||||
x => |
||||
x.uidt === UITypes.LinkToAnotherRecord && |
||||
x?.colOptions.fk_parent_column_id === |
||||
v2ColSchema.colOptions.fk_parent_column_id && |
||||
x?.colOptions.fk_child_column_id === |
||||
v2ColSchema.colOptions.fk_child_column_id |
||||
); |
||||
let v1SymmetricColumn = |
||||
link[i].colOptions.type === 'mm' |
||||
? link.find( |
||||
x => |
||||
x.colOptions.fk_parent_column_id === |
||||
link[i].colOptions.fk_child_column_id && |
||||
x.colOptions.fk_child_column_id === |
||||
link[i].colOptions.fk_parent_column_id && |
||||
x.colOptions.type === 'mm' |
||||
) |
||||
: link.find( |
||||
x => |
||||
x.colOptions.fk_parent_column_id === |
||||
link[i].colOptions.fk_parent_column_id && |
||||
x.colOptions.fk_child_column_id === |
||||
link[i].colOptions.fk_child_column_id && |
||||
x.colOptions.type === 'bt' |
||||
); |
||||
|
||||
tbl = await api.dbTableColumn.update(v2SymmetricColumn.id, { |
||||
...v2SymmetricColumn, |
||||
title: v1SymmetricColumn.title, |
||||
column_name: null |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[v1SymmetricColumn.colOptions.fk_model_id] = tbl; |
||||
} |
||||
} |
||||
} |
||||
|
||||
function get_v2Id(v1ColId) { |
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let colSchema = {}; |
||||
if ( |
||||
undefined !== (colSchema = tblSchema.columns.find(x => x.id === v1ColId)) |
||||
) { |
||||
let colName = colSchema.title; |
||||
let v2Tbl = ncTables[tblSchema.id]; |
||||
return v2Tbl.columns.find(y => y.title === colName)?.id; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function createLookup() { |
||||
console.log(`createLookup`); |
||||
|
||||
for (let i = 0; i < lookup.length; i++) { |
||||
let srcTbl = ncTables[lookup[i].colOptions.fk_model_id]; |
||||
let v2_fk_relation_column_id = get_v2Id( |
||||
lookup[i].colOptions.fk_relation_column_id |
||||
); |
||||
let v2_lookup_column_id = get_v2Id( |
||||
lookup[i].colOptions.fk_lookup_column_id |
||||
); |
||||
|
||||
if (v2_lookup_column_id) { |
||||
let tbl = await api.dbTableColumn.create(srcTbl.id, { |
||||
uidt: UITypes.Lookup, |
||||
title: lookup[i].title, |
||||
fk_relation_column_id: v2_fk_relation_column_id, |
||||
fk_lookup_column_id: v2_lookup_column_id |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[lookup[i].colOptions.fk_model_id] = tbl; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function createRollup() { |
||||
console.log(`createRollup`); |
||||
|
||||
for (let i = 0; i < rollup.length; i++) { |
||||
let srcTbl = ncTables[rollup[i].colOptions.fk_model_id]; |
||||
let v2_fk_relation_column_id = get_v2Id( |
||||
rollup[i].colOptions.fk_relation_column_id |
||||
); |
||||
let v2_rollup_column_id = get_v2Id( |
||||
rollup[i].colOptions.fk_rollup_column_id |
||||
); |
||||
|
||||
if (v2_rollup_column_id) { |
||||
let tbl = await api.dbTableColumn.create(srcTbl.id, { |
||||
uidt: UITypes.Rollup, |
||||
title: rollup[i].title, |
||||
column_name: rollup[i].title, |
||||
fk_relation_column_id: v2_fk_relation_column_id, |
||||
fk_rollup_column_id: v2_rollup_column_id, |
||||
rollup_function: rollup[i].colOptions.rollup_function |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[rollup[i].colOptions.fk_model_id] = tbl; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function configureGrid() { |
||||
console.log(`configureGrid`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let gridList = tblSchema.views.filter(a => a.type === 3); |
||||
let srcTbl = await api.dbTable.read(tblId); |
||||
|
||||
const view = await api.dbView.list(tblId); |
||||
|
||||
// create / rename view
|
||||
for (let gridCnt = 0; gridCnt < gridList.length; gridCnt++) { |
||||
let viewCreated = {}; |
||||
// rename first view; default view already created
|
||||
if (gridCnt === 0) { |
||||
viewCreated = await api.dbView.update(view.list[0].id, { |
||||
title: gridList[gridCnt].title |
||||
}); |
||||
} |
||||
// create new views
|
||||
else { |
||||
viewCreated = await api.dbView.gridCreate(tblId, { |
||||
title: gridList[gridCnt].title |
||||
}); |
||||
} |
||||
|
||||
// retrieve view Info
|
||||
let viewId = viewCreated.id; |
||||
let viewDetails = await api.dbView.gridColumnsList(viewId); |
||||
|
||||
// column visibility
|
||||
for ( |
||||
let colCnt = 0; |
||||
colCnt < gridList[gridCnt].columns.length; |
||||
colCnt++ |
||||
) { |
||||
let ncColumnId = srcTbl.columns.find( |
||||
a => a.title === gridList[gridCnt].columns[colCnt].title |
||||
)?.id; |
||||
// let ncViewColumnId = await nc_getViewColumnId( viewCreated.id, "grid", ncColumnId )
|
||||
let ncViewColumnId = viewDetails.find( |
||||
x => x.fk_column_id === ncColumnId |
||||
)?.id; |
||||
// column order & visibility
|
||||
await api.dbViewColumn.update(viewCreated.id, ncViewColumnId, { |
||||
show: gridList[gridCnt].columns[colCnt].show, |
||||
order: gridList[gridCnt].columns[colCnt].order |
||||
}); |
||||
await api.dbView.gridColumnUpdate(ncViewColumnId, { |
||||
width: gridList[gridCnt].columns[colCnt].width |
||||
}); |
||||
} |
||||
|
||||
// sort
|
||||
for (let sCnt = 0; sCnt < gridList[gridCnt].sort.length; sCnt++) { |
||||
let sColName = tblSchema.columns.find( |
||||
a => gridList[gridCnt].sort[sCnt].fk_column_id === a.id |
||||
).title; |
||||
await api.dbTableSort.create(viewId, { |
||||
fk_column_id: srcTbl.columns.find(a => a.title === sColName)?.id, |
||||
direction: gridList[gridCnt].sort[sCnt].direction |
||||
}); |
||||
} |
||||
|
||||
// filter
|
||||
for (let fCnt = 0; fCnt < gridList[gridCnt].filter.length; fCnt++) { |
||||
let fColName = tblSchema.columns.find( |
||||
a => gridList[gridCnt].sort[fCnt].fk_column_id === a.id |
||||
).title; |
||||
await api.dbTableFilter.create(viewId, { |
||||
...gridList[gridCnt].filter[fCnt], |
||||
fk_column_id: srcTbl.columns.find(a => a.title === fColName)?.id |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function configureGallery() { |
||||
console.log(`configureGallery`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let galleryList = tblSchema.views.filter(a => a.type === 2); |
||||
for (let cnt = 0; cnt < galleryList.length; cnt++) { |
||||
const viewCreated = await api.dbView.galleryCreate(tblId, { |
||||
title: galleryList[cnt].title |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function configureForm() { |
||||
console.log(`configureForm`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let formList = tblSchema.views.filter(a => a.type === 1); |
||||
let srcTbl = await api.dbTable.read(tblId); |
||||
|
||||
for (let formCnt = 0; formCnt < formList.length; formCnt++) { |
||||
const formData = { |
||||
title: formList[formCnt].title, |
||||
...formList[formCnt].property |
||||
}; |
||||
const viewCreated = await api.dbView.formCreate(tblId, formData); |
||||
|
||||
// column visibility
|
||||
for ( |
||||
let colCnt = 0; |
||||
colCnt < formList[formCnt].columns.length; |
||||
colCnt++ |
||||
) { |
||||
let ncColumnId = srcTbl.columns.find( |
||||
a => a.title === formList[formCnt].columns[colCnt].title |
||||
)?.id; |
||||
let ncViewColumnId = await nc_getViewColumnId( |
||||
viewCreated.id, |
||||
'form', |
||||
ncColumnId |
||||
); |
||||
// column order & visibility
|
||||
await api.dbView.formColumnUpdate(ncViewColumnId, { |
||||
show: formList[formCnt].columns[colCnt].show, |
||||
order: formList[formCnt].columns[colCnt].order, |
||||
label: formList[formCnt].columns[colCnt].label, |
||||
description: formList[formCnt].columns[colCnt].description, |
||||
required: formList[formCnt].columns[colCnt].required |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function restoreBaseData() { |
||||
console.log(`restoreBaseData`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let pk = tblSchema.columns.find(a => a.pk).title; |
||||
|
||||
let moreRecords = true; |
||||
let offset = 0, |
||||
limit = 25; |
||||
|
||||
while (moreRecords) { |
||||
let recList = await api.dbTableRow.list( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
tblSchema.title, |
||||
{}, |
||||
{ |
||||
query: { limit: limit, offset: offset } |
||||
} |
||||
); |
||||
moreRecords = !recList.pageInfo.isLastPage; |
||||
offset += limit; |
||||
|
||||
for (let recCnt = 0; recCnt < recList.list.length; recCnt++) { |
||||
let record = await api.dbTableRow.read( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
tblSchema.title, |
||||
recList.list[recCnt][pk] |
||||
); |
||||
|
||||
// post-processing on the record
|
||||
for (const [key, value] of Object.entries(record)) { |
||||
let table = ncTables[tblId]; |
||||
// retrieve datatype
|
||||
const dt = table.columns.find(x => x.title === key)?.uidt; |
||||
if (dt === UITypes.LinkToAnotherRecord) delete record[key]; |
||||
if (dt === UITypes.Lookup) delete record[key]; |
||||
if (dt === UITypes.Rollup) delete record[key]; |
||||
} |
||||
await api.dbTableRow.create( |
||||
'nc', |
||||
ncConfig.projectName, |
||||
tblSchema.title, |
||||
record |
||||
); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function restoreLinks() { |
||||
console.log(`restoreLinks`); |
||||
|
||||
for (let i = 0; i < rootLinks.length; i++) { |
||||
let pk = rootLinks[i].linkSrcTbl.columns.find(a => a.pk).title; |
||||
let moreRecords = true; |
||||
let offset = 0, |
||||
limit = 25; |
||||
|
||||
while (moreRecords) { |
||||
let recList = await api.dbTableRow.list( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
rootLinks[i].linkSrcTbl.title, |
||||
{}, |
||||
{ |
||||
query: { limit: limit, offset: offset } |
||||
} |
||||
); |
||||
moreRecords = !recList.pageInfo.isLastPage; |
||||
offset += limit; |
||||
|
||||
for (let recCnt = 0; recCnt < recList.list.length; recCnt++) { |
||||
let record = await api.dbTableRow.read( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
rootLinks[i].linkSrcTbl.title, |
||||
recList.list[recCnt][pk] |
||||
); |
||||
let linkField = record[rootLinks[i].linkColumn.title]; |
||||
if (linkField.length) { |
||||
await api.dbTableRow.nestedAdd( |
||||
'nc', |
||||
ncConfig.projectName, |
||||
rootLinks[i].linkSrcTbl.title, |
||||
record[pk], |
||||
rootLinks[i].linkColumn.colOptions.type, |
||||
encodeURIComponent(rootLinks[i].linkColumn.title), |
||||
linkField[0][pk] |
||||
); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function importSchema() { |
||||
api = new Api(ncConfig); |
||||
|
||||
const x = await api.project.list(); |
||||
const p = x.list.find(a => a.title === ncConfig.projectName); |
||||
if (p) await api.project.delete(p.id); |
||||
ncProject = await api.project.create({ title: ncConfig.projectName }); |
||||
|
||||
await createBaseTables(); |
||||
await createLinks(); |
||||
await createLookup(); |
||||
await createRollup(); |
||||
await createFormula(); |
||||
|
||||
// configure views
|
||||
await configureGrid(); |
||||
await configureGallery(); |
||||
await configureForm(); |
||||
|
||||
// restore data only if source project exists
|
||||
const p2 = x.list.find(a => a.title === ncConfig.srcProject); |
||||
if (p2 !== undefined) { |
||||
await restoreBaseData(); |
||||
await restoreLinks(); |
||||
} |
||||
} |
||||
(async () => { |
||||
await importSchema(); |
||||
console.log('completed'); |
||||
})().catch(e => console.log(e)); |
@ -0,0 +1,21 @@
|
||||
-- Delete data |
||||
ALTER TABLE staff DROP FOREIGN KEY fk_staff_store , DROP FOREIGN KEY fk_staff_address; |
||||
DELETE FROM payment ; |
||||
DELETE FROM rental ; |
||||
DELETE FROM customer ; |
||||
DELETE FROM film_category ; |
||||
DELETE FROM film_text ; |
||||
DELETE FROM film_actor ; |
||||
DELETE FROM inventory ; |
||||
DELETE FROM film ; |
||||
DELETE FROM category ; |
||||
ALTER TABLE store CHANGE COLUMN manager_staff_id manager_staff_id TINYINT UNSIGNED NULL; |
||||
update store set manager_staff_id=null; |
||||
DELETE FROM staff ; |
||||
DELETE FROM store ; |
||||
DELETE FROM actor ; |
||||
DELETE FROM address ; |
||||
DELETE FROM city ; |
||||
DELETE FROM country ; |
||||
DELETE FROM language ; |
||||
ALTER TABLE store CHANGE COLUMN manager_staff_id manager_staff_id TINYINT UNSIGNED NOT NULL; |
@ -0,0 +1,37 @@
|
||||
-- Drop Views |
||||
|
||||
DROP VIEW customer_list; |
||||
DROP VIEW film_list; |
||||
DROP VIEW nicer_but_slower_film_list; |
||||
DROP VIEW sales_by_film_category; |
||||
DROP VIEW sales_by_store; |
||||
DROP VIEW staff_list; |
||||
|
||||
-- Drop Tables |
||||
|
||||
DROP TABLE payment; |
||||
DROP TABLE rental; |
||||
DROP TABLE inventory; |
||||
DROP TABLE film_text; |
||||
DROP TABLE film_category; |
||||
DROP TABLE film_actor; |
||||
DROP TABLE film; |
||||
DROP TABLE language; |
||||
DROP TABLE customer; |
||||
DROP TABLE actor; |
||||
DROP TABLE category; |
||||
ALTER TABLE staff DROP FOREIGN KEY fk_staff_store , DROP FOREIGN KEY fk_staff_address; |
||||
DROP TABLE store; |
||||
DROP TABLE address; |
||||
DROP TABLE staff; |
||||
DROP TABLE city; |
||||
DROP TABLE country; |
||||
|
||||
-- Procedures and views |
||||
drop procedure film_in_stock; |
||||
drop procedure film_not_in_stock; |
||||
drop function get_customer_balance; |
||||
drop function inventory_held_by_customer; |
||||
drop function inventory_in_stock; |
||||
drop procedure rewards_report; |
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,643 @@
|
||||
-- Sakila Sample Database Schema |
||||
-- Version 0.8 |
||||
|
||||
-- Copyright (c) 2006, MySQL AB |
||||
-- All rights reserved. |
||||
|
||||
-- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: |
||||
|
||||
-- * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. |
||||
-- * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. |
||||
-- * Neither the name of MySQL AB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. |
||||
|
||||
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
|
||||
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; |
||||
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; |
||||
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL'; |
||||
|
||||
DROP SCHEMA IF EXISTS sakila; |
||||
CREATE SCHEMA sakila; |
||||
USE sakila; |
||||
|
||||
-- |
||||
-- Table structure for table `actor` |
||||
-- |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id), |
||||
KEY idx_actor_last_name (last_name) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `address` |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id SMALLINT UNSIGNED NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (address_id), |
||||
KEY idx_fk_city_id (city_id), |
||||
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `category` |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (category_id) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `city` |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (city_id), |
||||
KEY idx_fk_country_id (country_id), |
||||
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `country` |
||||
-- |
||||
|
||||
CREATE TABLE country ( |
||||
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (country_id) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `customer` |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
create_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (customer_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
KEY idx_last_name (last_name), |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film` |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT DEFAULT NULL, |
||||
release_year YEAR DEFAULT NULL, |
||||
language_id TINYINT UNSIGNED NOT NULL, |
||||
original_language_id TINYINT UNSIGNED DEFAULT NULL, |
||||
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3, |
||||
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99, |
||||
length SMALLINT UNSIGNED DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99, |
||||
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G', |
||||
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id), |
||||
KEY idx_title (title), |
||||
KEY idx_fk_language_id (language_id), |
||||
KEY idx_fk_original_language_id (original_language_id), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film_actor` |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id,film_id), |
||||
KEY idx_fk_film_id (`film_id`), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film_category` |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
category_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film_text` |
||||
-- |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id SMALLINT NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT, |
||||
PRIMARY KEY (film_id), |
||||
FULLTEXT KEY idx_title_description (title,description) |
||||
)ENGINE=MyISAM DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Triggers for loading film_text from film |
||||
-- |
||||
|
||||
DELIMITER ;; |
||||
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN |
||||
INSERT INTO film_text (film_id, title, description) |
||||
VALUES (new.film_id, new.title, new.description); |
||||
END;; |
||||
|
||||
|
||||
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN |
||||
IF (old.title != new.title) or (old.description != new.description) |
||||
THEN |
||||
UPDATE film_text |
||||
SET title=new.title, |
||||
description=new.description, |
||||
film_id=new.film_id |
||||
WHERE film_id=old.film_id; |
||||
END IF; |
||||
END;; |
||||
|
||||
|
||||
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN |
||||
DELETE FROM film_text WHERE film_id = old.film_id; |
||||
END;; |
||||
|
||||
DELIMITER ; |
||||
|
||||
-- |
||||
-- Table structure for table `inventory` |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (inventory_id), |
||||
KEY idx_fk_film_id (film_id), |
||||
KEY idx_store_id_film_id (store_id,film_id), |
||||
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `language` |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name CHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (language_id) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `payment` |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (payment_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table `rental` |
||||
-- |
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INT NOT NULL AUTO_INCREMENT, |
||||
rental_date DATETIME NOT NULL, |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
return_date DATETIME DEFAULT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (rental_id), |
||||
UNIQUE KEY (rental_date,inventory_id,customer_id), |
||||
KEY idx_fk_inventory_id (inventory_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `staff` |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
picture MEDIUMBLOB DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) BINARY DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (staff_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `store` |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
manager_staff_id TINYINT UNSIGNED NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (store_id), |
||||
UNIQUE KEY idx_unique_manager (manager_staff_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- View structure for view `customer_list` |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, |
||||
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8'active',_utf8'') AS notes, cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `film_list` |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8' ', actor.last_name) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id; |
||||
|
||||
-- |
||||
-- View structure for view `nicer_but_slower_film_list` |
||||
-- |
||||
|
||||
CREATE VIEW nicer_but_slower_film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)), |
||||
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)), |
||||
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id; |
||||
|
||||
-- |
||||
-- View structure for view `staff_list` |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone, |
||||
city.city AS city, country.country AS country, s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_store` |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
CONCAT(c.city, _utf8',', cy.country) AS store |
||||
, CONCAT(m.first_name, _utf8' ', m.last_name) AS manager |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY s.store_id |
||||
ORDER BY cy.country, c.city; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_film_category` |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
ORDER BY total_sales DESC; |
||||
|
||||
-- |
||||
-- View structure for view `actor_info` |
||||
-- |
||||
|
||||
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM sakila.film f |
||||
INNER JOIN sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM sakila.actor a |
||||
LEFT JOIN sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
|
||||
-- |
||||
-- Procedure structure for procedure `rewards_report` |
||||
-- |
||||
|
||||
DELIMITER // |
||||
|
||||
CREATE PROCEDURE rewards_report ( |
||||
IN min_monthly_purchases TINYINT UNSIGNED |
||||
, IN min_dollar_amount_purchased DECIMAL(10,2) UNSIGNED |
||||
, OUT count_rewardees INT |
||||
) |
||||
LANGUAGE SQL |
||||
NOT DETERMINISTIC |
||||
READS SQL DATA |
||||
SQL SECURITY DEFINER |
||||
COMMENT 'Provides a customizable report on best customers' |
||||
proc: BEGIN |
||||
|
||||
DECLARE last_month_start DATE; |
||||
DECLARE last_month_end DATE; |
||||
|
||||
/* Some sanity checks... */ |
||||
IF min_monthly_purchases = 0 THEN |
||||
SELECT 'Minimum monthly purchases parameter must be > 0'; |
||||
LEAVE proc; |
||||
END IF; |
||||
IF min_dollar_amount_purchased = 0.00 THEN |
||||
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00'; |
||||
LEAVE proc; |
||||
END IF; |
||||
|
||||
/* Determine start and end time periods */ |
||||
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH); |
||||
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d'); |
||||
SET last_month_end = LAST_DAY(last_month_start); |
||||
|
||||
/* |
||||
Create a temporary storage area for |
||||
Customer IDs. |
||||
*/ |
||||
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY); |
||||
|
||||
/* |
||||
Find all customers meeting the |
||||
monthly purchase requirements |
||||
*/ |
||||
INSERT INTO tmpCustomer (customer_id) |
||||
SELECT p.customer_id |
||||
FROM payment AS p |
||||
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end |
||||
GROUP BY customer_id |
||||
HAVING SUM(p.amount) > min_dollar_amount_purchased |
||||
AND COUNT(customer_id) > min_monthly_purchases; |
||||
|
||||
/* Populate OUT parameter with count of found customers */ |
||||
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees; |
||||
|
||||
/* |
||||
Output ALL customer information of matching rewardees. |
||||
Customize output as needed. |
||||
*/ |
||||
SELECT c.* |
||||
FROM tmpCustomer AS t |
||||
INNER JOIN customer AS c ON t.customer_id = c.customer_id; |
||||
|
||||
/* Clean up */ |
||||
DROP TABLE tmpCustomer; |
||||
END // |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE FUNCTION get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2) |
||||
DETERMINISTIC |
||||
READS SQL DATA |
||||
BEGIN |
||||
|
||||
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE |
||||
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS: |
||||
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS |
||||
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE |
||||
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST |
||||
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED |
||||
|
||||
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY |
||||
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS |
||||
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY |
||||
|
||||
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees |
||||
FROM film, inventory, rental |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration, |
||||
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees |
||||
FROM rental, inventory, film |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
|
||||
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments |
||||
FROM payment |
||||
|
||||
WHERE payment.payment_date <= p_effective_date |
||||
AND payment.customer_id = p_customer_id; |
||||
|
||||
RETURN v_rentfees + v_overfees - v_payments; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND inventory_in_stock(inventory_id); |
||||
|
||||
SELECT FOUND_ROWS() INTO p_film_count; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND NOT inventory_in_stock(inventory_id); |
||||
|
||||
SELECT FOUND_ROWS() INTO p_film_count; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE FUNCTION inventory_held_by_customer(p_inventory_id INT) RETURNS INT |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_customer_id INT; |
||||
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL; |
||||
|
||||
SELECT customer_id INTO v_customer_id |
||||
FROM rental |
||||
WHERE return_date IS NULL |
||||
AND inventory_id = p_inventory_id; |
||||
|
||||
RETURN v_customer_id; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE FUNCTION inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_rentals INT; |
||||
DECLARE v_out INT; |
||||
|
||||
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE |
||||
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED |
||||
|
||||
SELECT COUNT(*) INTO v_rentals |
||||
FROM rental |
||||
WHERE inventory_id = p_inventory_id; |
||||
|
||||
IF v_rentals = 0 THEN |
||||
RETURN TRUE; |
||||
END IF; |
||||
|
||||
SELECT COUNT(rental_id) INTO v_out |
||||
FROM inventory LEFT JOIN rental USING(inventory_id) |
||||
WHERE inventory.inventory_id = p_inventory_id |
||||
AND rental.return_date IS NULL; |
||||
|
||||
IF v_out > 0 THEN |
||||
RETURN FALSE; |
||||
ELSE |
||||
RETURN TRUE; |
||||
END IF; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
SET SQL_MODE=@OLD_SQL_MODE; |
||||
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; |
||||
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; |
||||
|
||||
|
@ -0,0 +1,642 @@
|
||||
-- Sakila Sample Database Schema |
||||
-- Version 0.8 |
||||
|
||||
-- Copyright (c) 2006, MySQL AB |
||||
-- All rights reserved. |
||||
|
||||
-- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: |
||||
|
||||
-- * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. |
||||
-- * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. |
||||
-- * Neither the name of MySQL AB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. |
||||
|
||||
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
|
||||
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; |
||||
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; |
||||
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL'; |
||||
|
||||
DROP SCHEMA IF EXISTS sakila; |
||||
CREATE SCHEMA sakila; |
||||
USE sakila; |
||||
|
||||
-- |
||||
-- Table structure for table `actor` |
||||
-- |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id), |
||||
KEY idx_actor_last_name (last_name) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `address` |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id SMALLINT UNSIGNED NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (address_id), |
||||
KEY idx_fk_city_id (city_id), |
||||
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `category` |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (category_id) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `city` |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (city_id), |
||||
KEY idx_fk_country_id (country_id), |
||||
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `country` |
||||
-- |
||||
|
||||
CREATE TABLE country ( |
||||
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (country_id) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `customer` |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
create_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (customer_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
KEY idx_last_name (last_name), |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film` |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT DEFAULT NULL, |
||||
release_year YEAR DEFAULT NULL, |
||||
language_id TINYINT UNSIGNED NOT NULL, |
||||
original_language_id TINYINT UNSIGNED DEFAULT NULL, |
||||
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3, |
||||
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99, |
||||
length SMALLINT UNSIGNED DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99, |
||||
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G', |
||||
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id), |
||||
KEY idx_title (title), |
||||
KEY idx_fk_language_id (language_id), |
||||
KEY idx_fk_original_language_id (original_language_id), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film_actor` |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id,film_id), |
||||
KEY idx_fk_film_id (`film_id`), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film_category` |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
category_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `film_text` |
||||
-- |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id SMALLINT NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT, |
||||
PRIMARY KEY (film_id), |
||||
FULLTEXT KEY idx_title_description (title,description) |
||||
)ENGINE=MyISAM DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Triggers for loading film_text from film |
||||
-- |
||||
|
||||
DELIMITER ;; |
||||
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN |
||||
INSERT INTO film_text (film_id, title, description) |
||||
VALUES (new.film_id, new.title, new.description); |
||||
END;; |
||||
|
||||
|
||||
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN |
||||
IF (old.title != new.title) or (old.description != new.description) |
||||
THEN |
||||
UPDATE film_text |
||||
SET title=new.title, |
||||
description=new.description, |
||||
film_id=new.film_id |
||||
WHERE film_id=old.film_id; |
||||
END IF; |
||||
END;; |
||||
|
||||
|
||||
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN |
||||
DELETE FROM film_text WHERE film_id = old.film_id; |
||||
END;; |
||||
|
||||
DELIMITER ; |
||||
|
||||
-- |
||||
-- Table structure for table `inventory` |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (inventory_id), |
||||
KEY idx_fk_film_id (film_id), |
||||
KEY idx_store_id_film_id (store_id,film_id), |
||||
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `language` |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name CHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (language_id) |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `payment` |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (payment_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table `rental` |
||||
-- |
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INT NOT NULL AUTO_INCREMENT, |
||||
rental_date DATETIME NOT NULL, |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
return_date DATETIME DEFAULT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (rental_id), |
||||
UNIQUE KEY (rental_date,inventory_id,customer_id), |
||||
KEY idx_fk_inventory_id (inventory_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `staff` |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
picture BLOB DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) BINARY DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (staff_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- Table structure for table `store` |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
manager_staff_id TINYINT UNSIGNED NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (store_id), |
||||
UNIQUE KEY idx_unique_manager (manager_staff_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8; |
||||
|
||||
-- |
||||
-- View structure for view `customer_list` |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, |
||||
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8'active',_utf8'') AS notes, cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `film_list` |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8' ', actor.last_name) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id; |
||||
|
||||
-- |
||||
-- View structure for view `nicer_but_slower_film_list` |
||||
-- |
||||
|
||||
CREATE VIEW nicer_but_slower_film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)), |
||||
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)), |
||||
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id; |
||||
|
||||
-- |
||||
-- View structure for view `staff_list` |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone, |
||||
city.city AS city, country.country AS country, s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_store` |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
CONCAT(c.city, _utf8',', cy.country) AS store |
||||
, CONCAT(m.first_name, _utf8' ', m.last_name) AS manager |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY s.store_id |
||||
ORDER BY cy.country, c.city; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_film_category` |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
ORDER BY total_sales DESC; |
||||
|
||||
-- |
||||
-- View structure for view `actor_info` |
||||
-- |
||||
|
||||
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM sakila.film f |
||||
INNER JOIN sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM sakila.actor a |
||||
LEFT JOIN sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
|
||||
-- |
||||
-- Procedure structure for procedure `rewards_report` |
||||
-- |
||||
|
||||
DELIMITER // |
||||
|
||||
CREATE PROCEDURE rewards_report ( |
||||
IN min_monthly_purchases TINYINT UNSIGNED |
||||
, IN min_dollar_amount_purchased DECIMAL(10,2) UNSIGNED |
||||
, OUT count_rewardees INT |
||||
) |
||||
LANGUAGE SQL |
||||
NOT DETERMINISTIC |
||||
READS SQL DATA |
||||
SQL SECURITY DEFINER |
||||
COMMENT 'Provides a customizable report on best customers' |
||||
proc: BEGIN |
||||
|
||||
DECLARE last_month_start DATE; |
||||
DECLARE last_month_end DATE; |
||||
|
||||
/* Some sanity checks... */ |
||||
IF min_monthly_purchases = 0 THEN |
||||
SELECT 'Minimum monthly purchases parameter must be > 0'; |
||||
LEAVE proc; |
||||
END IF; |
||||
IF min_dollar_amount_purchased = 0.00 THEN |
||||
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00'; |
||||
LEAVE proc; |
||||
END IF; |
||||
|
||||
/* Determine start and end time periods */ |
||||
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH); |
||||
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d'); |
||||
SET last_month_end = LAST_DAY(last_month_start); |
||||
|
||||
/* |
||||
Create a temporary storage area for |
||||
Customer IDs. |
||||
*/ |
||||
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY); |
||||
|
||||
/* |
||||
Find all customers meeting the |
||||
monthly purchase requirements |
||||
*/ |
||||
INSERT INTO tmpCustomer (customer_id) |
||||
SELECT p.customer_id |
||||
FROM payment AS p |
||||
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end |
||||
GROUP BY customer_id |
||||
HAVING SUM(p.amount) > min_dollar_amount_purchased |
||||
AND COUNT(customer_id) > min_monthly_purchases; |
||||
|
||||
/* Populate OUT parameter with count of found customers */ |
||||
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees; |
||||
|
||||
/* |
||||
Output ALL customer information of matching rewardees. |
||||
Customize output as needed. |
||||
*/ |
||||
SELECT c.* |
||||
FROM tmpCustomer AS t |
||||
INNER JOIN customer AS c ON t.customer_id = c.customer_id; |
||||
|
||||
/* Clean up */ |
||||
DROP TABLE tmpCustomer; |
||||
END // |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE FUNCTION get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2) |
||||
DETERMINISTIC |
||||
READS SQL DATA |
||||
BEGIN |
||||
|
||||
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE |
||||
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS: |
||||
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS |
||||
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE |
||||
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST |
||||
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED |
||||
|
||||
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY |
||||
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS |
||||
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY |
||||
|
||||
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees |
||||
FROM film, inventory, rental |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration, |
||||
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees |
||||
FROM rental, inventory, film |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
|
||||
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments |
||||
FROM payment |
||||
|
||||
WHERE payment.payment_date <= p_effective_date |
||||
AND payment.customer_id = p_customer_id; |
||||
|
||||
RETURN v_rentfees + v_overfees - v_payments; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND inventory_in_stock(inventory_id); |
||||
|
||||
SELECT FOUND_ROWS() INTO p_film_count; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND NOT inventory_in_stock(inventory_id); |
||||
|
||||
SELECT FOUND_ROWS() INTO p_film_count; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE FUNCTION inventory_held_by_customer(p_inventory_id INT) RETURNS INT |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_customer_id INT; |
||||
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL; |
||||
|
||||
SELECT customer_id INTO v_customer_id |
||||
FROM rental |
||||
WHERE return_date IS NULL |
||||
AND inventory_id = p_inventory_id; |
||||
|
||||
RETURN v_customer_id; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
DELIMITER $$ |
||||
|
||||
CREATE FUNCTION inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_rentals INT; |
||||
DECLARE v_out INT; |
||||
|
||||
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE |
||||
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED |
||||
|
||||
SELECT COUNT(*) INTO v_rentals |
||||
FROM rental |
||||
WHERE inventory_id = p_inventory_id; |
||||
|
||||
IF v_rentals = 0 THEN |
||||
RETURN TRUE; |
||||
END IF; |
||||
|
||||
SELECT COUNT(rental_id) INTO v_out |
||||
FROM inventory LEFT JOIN rental USING(inventory_id) |
||||
WHERE inventory.inventory_id = p_inventory_id |
||||
AND rental.return_date IS NULL; |
||||
|
||||
IF v_out > 0 THEN |
||||
RETURN FALSE; |
||||
ELSE |
||||
RETURN TRUE; |
||||
END IF; |
||||
END $$ |
||||
|
||||
DELIMITER ; |
||||
|
||||
SET SQL_MODE=@OLD_SQL_MODE; |
||||
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; |
||||
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; |
||||
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,658 @@
|
||||
-- Sakila Sample Database Schema |
||||
-- Version 1.2 |
||||
|
||||
-- Copyright (c) 2006, 2019, Oracle and/or its affiliates. |
||||
|
||||
-- Redistribution and use in source and binary forms, with or without |
||||
-- modification, are permitted provided that the following conditions are |
||||
-- met: |
||||
|
||||
-- * Redistributions of source code must retain the above copyright notice, |
||||
-- this list of conditions and the following disclaimer. |
||||
-- * Redistributions in binary form must reproduce the above copyright |
||||
-- notice, this list of conditions and the following disclaimer in the |
||||
-- documentation and/or other materials provided with the distribution. |
||||
-- * Neither the name of Oracle nor the names of its contributors may be used |
||||
-- to endorse or promote products derived from this software without |
||||
-- specific prior written permission. |
||||
|
||||
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS |
||||
-- IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
||||
-- THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
||||
-- PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR |
||||
-- CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
||||
-- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
-- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
||||
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF |
||||
-- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
||||
-- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS |
||||
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
SET NAMES utf8mb4; |
||||
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; |
||||
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; |
||||
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL'; |
||||
|
||||
DROP SCHEMA IF EXISTS test_sakila; |
||||
CREATE SCHEMA test_sakila; |
||||
USE test_sakila; |
||||
|
||||
-- |
||||
-- Table structure for table `actor` |
||||
-- |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id), |
||||
KEY idx_actor_last_name (last_name) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `address` |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id SMALLINT UNSIGNED NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
-- Add GEOMETRY column for MySQL 5.7.5 and higher |
||||
-- Also include SRID attribute for MySQL 8.0.3 and higher |
||||
/*!50705 location GEOMETRY */ /*!80003 SRID 0 */ /*!50705 NOT NULL,*/ |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (address_id), |
||||
KEY idx_fk_city_id (city_id), |
||||
/*!50705 SPATIAL KEY `idx_location` (location),*/ |
||||
CONSTRAINT `fk_address_city` FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `category` |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (category_id) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `city` |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (city_id), |
||||
KEY idx_fk_country_id (country_id), |
||||
CONSTRAINT `fk_city_country` FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `country` |
||||
-- |
||||
|
||||
CREATE TABLE country ( |
||||
country_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (country_id) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `customer` |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
create_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (customer_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
KEY idx_last_name (last_name), |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film` |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
title VARCHAR(128) NOT NULL, |
||||
description TEXT DEFAULT NULL, |
||||
release_year YEAR DEFAULT NULL, |
||||
language_id TINYINT UNSIGNED NOT NULL, |
||||
original_language_id TINYINT UNSIGNED DEFAULT NULL, |
||||
rental_duration TINYINT UNSIGNED NOT NULL DEFAULT 3, |
||||
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99, |
||||
length SMALLINT UNSIGNED DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99, |
||||
rating ENUM('G','PG','PG-13','R','NC-17') DEFAULT 'G', |
||||
special_features SET('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id), |
||||
KEY idx_title (title), |
||||
KEY idx_fk_language_id (language_id), |
||||
KEY idx_fk_original_language_id (original_language_id), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film_actor` |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id SMALLINT UNSIGNED NOT NULL, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (actor_id,film_id), |
||||
KEY idx_fk_film_id (`film_id`), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film_category` |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
category_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `film_text` |
||||
-- |
||||
-- InnoDB added FULLTEXT support in 5.6.10. If you use an |
||||
-- earlier version, then consider upgrading (recommended) or |
||||
-- changing InnoDB to MyISAM as the film_text engine |
||||
-- |
||||
|
||||
-- Use InnoDB for film_text as of 5.6.10, MyISAM prior to 5.6.10. |
||||
SET @old_default_storage_engine = @@default_storage_engine; |
||||
SET @@default_storage_engine = 'MyISAM'; |
||||
/*!50610 SET @@default_storage_engine = 'InnoDB'*/; |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id SMALLINT NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT, |
||||
PRIMARY KEY (film_id), |
||||
FULLTEXT KEY idx_title_description (title,description) |
||||
) DEFAULT CHARSET=utf8mb4; |
||||
|
||||
SET @@default_storage_engine = @old_default_storage_engine; |
||||
|
||||
-- |
||||
-- Triggers for loading film_text from film |
||||
-- |
||||
|
||||
CREATE TRIGGER `ins_film` AFTER INSERT ON `film` FOR EACH ROW BEGIN |
||||
INSERT INTO film_text (film_id, title, description) |
||||
VALUES (new.film_id, new.title, new.description); |
||||
END; |
||||
|
||||
|
||||
CREATE TRIGGER `upd_film` AFTER UPDATE ON `film` FOR EACH ROW BEGIN |
||||
IF (old.title != new.title) OR (old.description != new.description) OR (old.film_id != new.film_id) |
||||
THEN |
||||
UPDATE film_text |
||||
SET title=new.title, |
||||
description=new.description, |
||||
film_id=new.film_id |
||||
WHERE film_id=old.film_id; |
||||
END IF; |
||||
END; |
||||
|
||||
|
||||
CREATE TRIGGER `del_film` AFTER DELETE ON `film` FOR EACH ROW BEGIN |
||||
DELETE FROM film_text WHERE film_id = old.film_id; |
||||
END; |
||||
|
||||
-- |
||||
-- Table structure for table `inventory` |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
film_id SMALLINT UNSIGNED NOT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (inventory_id), |
||||
KEY idx_fk_film_id (film_id), |
||||
KEY idx_store_id_film_id (store_id,film_id), |
||||
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `language` |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
name CHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (language_id) |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `payment` |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date DATETIME NOT NULL, |
||||
last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (payment_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table `rental` |
||||
-- |
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INT NOT NULL AUTO_INCREMENT, |
||||
rental_date DATETIME NOT NULL, |
||||
inventory_id MEDIUMINT UNSIGNED NOT NULL, |
||||
customer_id SMALLINT UNSIGNED NOT NULL, |
||||
return_date DATETIME DEFAULT NULL, |
||||
staff_id TINYINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (rental_id), |
||||
UNIQUE KEY (rental_date,inventory_id,customer_id), |
||||
KEY idx_fk_inventory_id (inventory_id), |
||||
KEY idx_fk_customer_id (customer_id), |
||||
KEY idx_fk_staff_id (staff_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `staff` |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
picture BLOB DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id TINYINT UNSIGNED NOT NULL, |
||||
active BOOLEAN NOT NULL DEFAULT TRUE, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (staff_id), |
||||
KEY idx_fk_store_id (store_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- Table structure for table `store` |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, |
||||
manager_staff_id TINYINT UNSIGNED NOT NULL, |
||||
address_id SMALLINT UNSIGNED NOT NULL, |
||||
last_update TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, |
||||
PRIMARY KEY (store_id), |
||||
UNIQUE KEY idx_unique_manager (manager_staff_id), |
||||
KEY idx_fk_address_id (address_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ON DELETE RESTRICT ON UPDATE CASCADE, |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE RESTRICT ON UPDATE CASCADE |
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; |
||||
|
||||
-- |
||||
-- View structure for view `customer_list` |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, CONCAT(cu.first_name, _utf8mb4' ', cu.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, |
||||
a.phone AS phone, city.city AS city, country.country AS country, IF(cu.active, _utf8mb4'active',_utf8mb4'') AS notes, cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `film_list` |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(actor.first_name, _utf8mb4' ', actor.last_name) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id, category.name; |
||||
|
||||
-- |
||||
-- View structure for view `nicer_but_slower_film_list` |
||||
-- |
||||
|
||||
CREATE VIEW nicer_but_slower_film_list |
||||
AS |
||||
SELECT film.film_id AS FID, film.title AS title, film.description AS description, category.name AS category, film.rental_rate AS price, |
||||
film.length AS length, film.rating AS rating, GROUP_CONCAT(CONCAT(CONCAT(UCASE(SUBSTR(actor.first_name,1,1)), |
||||
LCASE(SUBSTR(actor.first_name,2,LENGTH(actor.first_name))),_utf8mb4' ',CONCAT(UCASE(SUBSTR(actor.last_name,1,1)), |
||||
LCASE(SUBSTR(actor.last_name,2,LENGTH(actor.last_name)))))) SEPARATOR ', ') AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GROUP BY film.film_id, category.name; |
||||
|
||||
-- |
||||
-- View structure for view `staff_list` |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, CONCAT(s.first_name, _utf8mb4' ', s.last_name) AS name, a.address AS address, a.postal_code AS `zip code`, a.phone AS phone, |
||||
city.city AS city, country.country AS country, s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_store` |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
CONCAT(c.city, _utf8mb4',', cy.country) AS store |
||||
, CONCAT(m.first_name, _utf8mb4' ', m.last_name) AS manager |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY s.store_id |
||||
ORDER BY cy.country, c.city; |
||||
|
||||
-- |
||||
-- View structure for view `sales_by_film_category` |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
ORDER BY total_sales DESC; |
||||
|
||||
-- |
||||
-- View structure for view `actor_info` |
||||
-- |
||||
|
||||
CREATE DEFINER=CURRENT_USER SQL SECURITY INVOKER VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM test_sakila.film f |
||||
INNER JOIN test_sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN test_sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM test_sakila.actor a |
||||
LEFT JOIN test_sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN test_sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN test_sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
|
||||
-- |
||||
-- Procedure structure for procedure `rewards_report` |
||||
-- |
||||
|
||||
CREATE PROCEDURE rewards_report ( |
||||
IN min_monthly_purchases TINYINT UNSIGNED |
||||
, IN min_dollar_amount_purchased DECIMAL(10,2) |
||||
, OUT count_rewardees INT |
||||
) |
||||
LANGUAGE SQL |
||||
NOT DETERMINISTIC |
||||
READS SQL DATA |
||||
SQL SECURITY DEFINER |
||||
COMMENT 'Provides a customizable report on best customers' |
||||
proc: BEGIN |
||||
|
||||
DECLARE last_month_start DATE; |
||||
DECLARE last_month_end DATE; |
||||
|
||||
/* Some sanity checks... */ |
||||
IF min_monthly_purchases = 0 THEN |
||||
SELECT 'Minimum monthly purchases parameter must be > 0'; |
||||
LEAVE proc; |
||||
END IF; |
||||
IF min_dollar_amount_purchased = 0.00 THEN |
||||
SELECT 'Minimum monthly dollar amount purchased parameter must be > $0.00'; |
||||
LEAVE proc; |
||||
END IF; |
||||
|
||||
/* Determine start and end time periods */ |
||||
SET last_month_start = DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH); |
||||
SET last_month_start = STR_TO_DATE(CONCAT(YEAR(last_month_start),'-',MONTH(last_month_start),'-01'),'%Y-%m-%d'); |
||||
SET last_month_end = LAST_DAY(last_month_start); |
||||
|
||||
/* |
||||
Create a temporary storage area for |
||||
Customer IDs. |
||||
*/ |
||||
CREATE TEMPORARY TABLE tmpCustomer (customer_id SMALLINT UNSIGNED NOT NULL PRIMARY KEY); |
||||
|
||||
/* |
||||
Find all customers meeting the |
||||
monthly purchase requirements |
||||
*/ |
||||
INSERT INTO tmpCustomer (customer_id) |
||||
SELECT p.customer_id |
||||
FROM payment AS p |
||||
WHERE DATE(p.payment_date) BETWEEN last_month_start AND last_month_end |
||||
GROUP BY customer_id |
||||
HAVING SUM(p.amount) > min_dollar_amount_purchased |
||||
AND COUNT(customer_id) > min_monthly_purchases; |
||||
|
||||
/* Populate OUT parameter with count of found customers */ |
||||
SELECT COUNT(*) FROM tmpCustomer INTO count_rewardees; |
||||
|
||||
/* |
||||
Output ALL customer information of matching rewardees. |
||||
Customize output as needed. |
||||
*/ |
||||
SELECT c.* |
||||
FROM tmpCustomer AS t |
||||
INNER JOIN customer AS c ON t.customer_id = c.customer_id; |
||||
|
||||
/* Clean up */ |
||||
DROP TABLE tmpCustomer; |
||||
END; |
||||
|
||||
CREATE FUNCTION get_customer_balance(p_customer_id INT, p_effective_date DATETIME) RETURNS DECIMAL(5,2) |
||||
DETERMINISTIC |
||||
READS SQL DATA |
||||
BEGIN |
||||
|
||||
#OK, WE NEED TO CALCULATE THE CURRENT BALANCE GIVEN A CUSTOMER_ID AND A DATE |
||||
#THAT WE WANT THE BALANCE TO BE EFFECTIVE FOR. THE BALANCE IS: |
||||
# 1) RENTAL FEES FOR ALL PREVIOUS RENTALS |
||||
# 2) ONE DOLLAR FOR EVERY DAY THE PREVIOUS RENTALS ARE OVERDUE |
||||
# 3) IF A FILM IS MORE THAN RENTAL_DURATION * 2 OVERDUE, CHARGE THE REPLACEMENT_COST |
||||
# 4) SUBTRACT ALL PAYMENTS MADE BEFORE THE DATE SPECIFIED |
||||
|
||||
DECLARE v_rentfees DECIMAL(5,2); #FEES PAID TO RENT THE VIDEOS INITIALLY |
||||
DECLARE v_overfees INTEGER; #LATE FEES FOR PRIOR RENTALS |
||||
DECLARE v_payments DECIMAL(5,2); #SUM OF PAYMENTS MADE PREVIOUSLY |
||||
|
||||
SELECT IFNULL(SUM(film.rental_rate),0) INTO v_rentfees |
||||
FROM film, inventory, rental |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
SELECT IFNULL(SUM(IF((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) > film.rental_duration, |
||||
((TO_DAYS(rental.return_date) - TO_DAYS(rental.rental_date)) - film.rental_duration),0)),0) INTO v_overfees |
||||
FROM rental, inventory, film |
||||
WHERE film.film_id = inventory.film_id |
||||
AND inventory.inventory_id = rental.inventory_id |
||||
AND rental.rental_date <= p_effective_date |
||||
AND rental.customer_id = p_customer_id; |
||||
|
||||
|
||||
SELECT IFNULL(SUM(payment.amount),0) INTO v_payments |
||||
FROM payment |
||||
|
||||
WHERE payment.payment_date <= p_effective_date |
||||
AND payment.customer_id = p_customer_id; |
||||
|
||||
RETURN v_rentfees + v_overfees - v_payments; |
||||
END; |
||||
|
||||
CREATE PROCEDURE film_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND inventory_in_stock(inventory_id); |
||||
|
||||
SELECT COUNT(*) |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND inventory_in_stock(inventory_id) |
||||
INTO p_film_count; |
||||
END; |
||||
|
||||
CREATE PROCEDURE film_not_in_stock(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT) |
||||
READS SQL DATA |
||||
BEGIN |
||||
SELECT inventory_id |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND NOT inventory_in_stock(inventory_id); |
||||
|
||||
SELECT COUNT(*) |
||||
FROM inventory |
||||
WHERE film_id = p_film_id |
||||
AND store_id = p_store_id |
||||
AND NOT inventory_in_stock(inventory_id) |
||||
INTO p_film_count; |
||||
END; |
||||
|
||||
|
||||
CREATE FUNCTION inventory_held_by_customer(p_inventory_id INT) RETURNS INT |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_customer_id INT; |
||||
DECLARE EXIT HANDLER FOR NOT FOUND RETURN NULL; |
||||
|
||||
SELECT customer_id INTO v_customer_id |
||||
FROM rental |
||||
WHERE return_date IS NULL |
||||
AND inventory_id = p_inventory_id; |
||||
|
||||
RETURN v_customer_id; |
||||
END; |
||||
|
||||
CREATE FUNCTION inventory_in_stock(p_inventory_id INT) RETURNS BOOLEAN |
||||
READS SQL DATA |
||||
BEGIN |
||||
DECLARE v_rentals INT; |
||||
DECLARE v_out INT; |
||||
|
||||
#AN ITEM IS IN-STOCK IF THERE ARE EITHER NO ROWS IN THE rental TABLE |
||||
#FOR THE ITEM OR ALL ROWS HAVE return_date POPULATED |
||||
|
||||
SELECT COUNT(*) INTO v_rentals |
||||
FROM rental |
||||
WHERE inventory_id = p_inventory_id; |
||||
|
||||
IF v_rentals = 0 THEN |
||||
RETURN TRUE; |
||||
END IF; |
||||
|
||||
SELECT COUNT(rental_id) INTO v_out |
||||
FROM inventory LEFT JOIN rental USING(inventory_id) |
||||
WHERE inventory.inventory_id = p_inventory_id |
||||
AND rental.return_date IS NULL; |
||||
|
||||
IF v_out > 0 THEN |
||||
RETURN FALSE; |
||||
ELSE |
||||
RETURN TRUE; |
||||
END IF; |
||||
END; |
||||
|
||||
SET SQL_MODE=@OLD_SQL_MODE; |
||||
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; |
||||
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; |
||||
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,504 @@
|
||||
/* |
||||
|
||||
Sakila for Microsoft SQL Server is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team. |
||||
This project is designed to help database administrators to decide which database to use for development of new products |
||||
The user can run the same SQL against different kind of databases and compare the performance |
||||
|
||||
License: BSD |
||||
Copyright DB Software Laboratory |
||||
http://www.etl-tools.com |
||||
|
||||
*/ |
||||
|
||||
CREATE DATABASE sakila; |
||||
GO |
||||
USE sakila; |
||||
|
||||
-- |
||||
-- Table structure for table actor |
||||
-- |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id int NOT NULL IDENTITY , |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (actor_id) |
||||
) |
||||
GO |
||||
ALTER TABLE actor ADD CONSTRAINT [DF_actor_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_actor_last_name ON actor(last_name) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table country |
||||
-- |
||||
|
||||
|
||||
CREATE TABLE country ( |
||||
country_id SMALLINT NOT NULL IDENTITY , |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update DATETIME, |
||||
PRIMARY KEY NONCLUSTERED (country_id) |
||||
) |
||||
GO |
||||
ALTER TABLE country ADD CONSTRAINT [DF_country_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table city |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id int NOT NULL IDENTITY , |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id SMALLINT NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (city_id), |
||||
CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
GO |
||||
ALTER TABLE city ADD CONSTRAINT [DF_city_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_country_id ON city(country_id) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table address |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id int NOT NULL IDENTITY , |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id INT NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (address_id) |
||||
) |
||||
GO |
||||
ALTER TABLE address ADD CONSTRAINT [DF_address_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_city_id ON address(city_id) |
||||
GO |
||||
ALTER TABLE address ADD CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table language |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id TINYINT NOT NULL IDENTITY, |
||||
name CHAR(20) NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (language_id) |
||||
) |
||||
GO |
||||
ALTER TABLE language ADD CONSTRAINT [DF_language_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table category |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id TINYINT NOT NULL IDENTITY, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (category_id) |
||||
) |
||||
GO |
||||
ALTER TABLE category ADD CONSTRAINT [DF_category_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table customer |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id INT NOT NULL IDENTITY , |
||||
store_id INT NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id INT NOT NULL, |
||||
active CHAR(1) NOT NULL DEFAULT 'Y', |
||||
create_date DATETIME NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (customer_id), |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
GO |
||||
ALTER TABLE customer ADD CONSTRAINT [DF_customer_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
ALTER TABLE customer ADD CONSTRAINT [DF_customer_create_date] DEFAULT (getdate()) FOR create_date |
||||
GO |
||||
CREATE INDEX idx_fk_store_id ON customer(store_id) |
||||
GO |
||||
CREATE INDEX idx_fk_address_id ON customer(address_id) |
||||
GO |
||||
CREATE INDEX idx_last_name ON customer(last_name) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table film |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id int NOT NULL IDENTITY , |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT DEFAULT NULL, |
||||
release_year VARCHAR(4) NULL, |
||||
language_id TINYINT NOT NULL, |
||||
original_language_id TINYINT DEFAULT NULL, |
||||
rental_duration TINYINT NOT NULL DEFAULT 3, |
||||
rental_rate DECIMAL(4,2) NOT NULL DEFAULT 4.99, |
||||
length SMALLINT DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) NOT NULL DEFAULT 19.99, |
||||
rating VARCHAR(10) DEFAULT 'G', |
||||
special_features VARCHAR(255) DEFAULT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (film_id), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) , |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) |
||||
) |
||||
GO |
||||
ALTER TABLE film ADD CONSTRAINT CHECK_special_features CHECK(special_features is null or |
||||
special_features like '%Trailers%' or |
||||
special_features like '%Commentaries%' or |
||||
special_features like '%Deleted Scenes%' or |
||||
special_features like '%Behind the Scenes%') |
||||
GO |
||||
ALTER TABLE film ADD CONSTRAINT CHECK_special_rating CHECK(rating in ('G','PG','PG-13','R','NC-17')) |
||||
GO |
||||
ALTER TABLE film ADD CONSTRAINT [DF_film_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_language_id ON film(language_id) |
||||
GO |
||||
CREATE INDEX idx_fk_original_language_id ON film(original_language_id) |
||||
GO |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table film_actor |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id INT NOT NULL, |
||||
film_id INT NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (actor_id,film_id), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
GO |
||||
ALTER TABLE film_actor ADD CONSTRAINT [DF_film_actor_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id) |
||||
GO |
||||
CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table film_category |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id INT NOT NULL, |
||||
category_id TINYINT NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
GO |
||||
ALTER TABLE film_category ADD CONSTRAINT [DF_film_category_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_film_category_film ON film_category(film_id) |
||||
GO |
||||
CREATE INDEX idx_fk_film_category_category ON film_category(category_id) |
||||
GO |
||||
-- |
||||
-- Table structure for table film_text |
||||
-- |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id SMALLINT NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description TEXT, |
||||
PRIMARY KEY NONCLUSTERED (film_id), |
||||
) |
||||
|
||||
-- |
||||
-- Table structure for table inventory |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id INT NOT NULL IDENTITY, |
||||
film_id INT NOT NULL, |
||||
store_id INT NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (inventory_id), |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
GO |
||||
ALTER TABLE inventory ADD CONSTRAINT [DF_inventory_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_film_id ON inventory(film_id) |
||||
GO |
||||
CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table staff |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id TINYINT NOT NULL IDENTITY, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id INT NOT NULL, |
||||
picture IMAGE DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id INT NOT NULL, |
||||
active BIT NOT NULL DEFAULT 1, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) DEFAULT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (staff_id), |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
GO |
||||
ALTER TABLE staff ADD CONSTRAINT [DF_staff_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_store_id ON staff(store_id) |
||||
GO |
||||
CREATE INDEX idx_fk_address_id ON staff(address_id) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table store |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id INT NOT NULL IDENTITY, |
||||
manager_staff_id TINYINT NOT NULL, |
||||
address_id INT NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (store_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) , |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) |
||||
) |
||||
|
||||
GO |
||||
ALTER TABLE store ADD CONSTRAINT [DF_store_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE UNIQUE NONCLUSTERED INDEX idx_fk_address_id ON store(manager_staff_id) |
||||
GO |
||||
CREATE INDEX idx_fk_store_address ON store(address_id) |
||||
GO |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table payment |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id int NOT NULL IDENTITY , |
||||
customer_id INT NOT NULL, |
||||
staff_id TINYINT NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date DATETIME NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (payment_id), |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) , |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) |
||||
) |
||||
GO |
||||
ALTER TABLE payment ADD CONSTRAINT [DF_payment_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_staff_id ON payment(staff_id) |
||||
GO |
||||
CREATE INDEX idx_fk_customer_id ON payment(customer_id) |
||||
GO |
||||
|
||||
-- |
||||
-- Table structure for table rental |
||||
-- |
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INT NOT NULL IDENTITY, |
||||
rental_date DATETIME NOT NULL, |
||||
inventory_id INT NOT NULL, |
||||
customer_id INT NOT NULL, |
||||
return_date DATETIME DEFAULT NULL, |
||||
staff_id TINYINT NOT NULL, |
||||
last_update DATETIME NOT NULL, |
||||
PRIMARY KEY NONCLUSTERED (rental_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) , |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) , |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) |
||||
) |
||||
GO |
||||
ALTER TABLE rental ADD CONSTRAINT [DF_rental_last_update] DEFAULT (getdate()) FOR last_update |
||||
GO |
||||
CREATE INDEX idx_fk_inventory_id ON rental(inventory_id) |
||||
GO |
||||
CREATE INDEX idx_fk_customer_id ON rental(customer_id) |
||||
GO |
||||
CREATE INDEX idx_fk_staff_id ON rental(staff_id) |
||||
GO |
||||
CREATE UNIQUE INDEX idx_uq ON rental (rental_date,inventory_id,customer_id) |
||||
GO |
||||
|
||||
-- FK CONSTRAINTS |
||||
ALTER TABLE customer ADD CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
GO |
||||
ALTER TABLE inventory ADD CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE; |
||||
GO |
||||
ALTER TABLE staff ADD CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE; |
||||
GO |
||||
ALTER TABLE payment ADD CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE; |
||||
GO |
||||
|
||||
-- |
||||
-- View structure for view customer_list |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, |
||||
cu.first_name + ' ' + cu.last_name AS name, |
||||
a.address AS address, |
||||
a.postal_code AS zip_code, |
||||
a.phone AS phone, |
||||
city.city AS city, |
||||
country.country AS country, |
||||
case when cu.active=1 then 'active' else '' end AS notes, |
||||
cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id |
||||
GO |
||||
-- |
||||
-- View structure for view film_list |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, |
||||
film.title AS title, |
||||
film.description AS description, |
||||
category.name AS category, |
||||
film.rental_rate AS price, |
||||
film.length AS length, |
||||
film.rating AS rating, |
||||
actor.first_name+' '+actor.last_name AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
GO |
||||
|
||||
-- |
||||
-- View structure for view staff_list |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, |
||||
s.first_name+' '+s.last_name AS name, |
||||
a.address AS address, |
||||
a.postal_code AS zip_code, |
||||
a.phone AS phone, |
||||
city.city AS city, |
||||
country.country AS country, |
||||
s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id |
||||
GO |
||||
-- |
||||
-- View structure for view sales_by_store |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
s.store_id |
||||
,c.city+','+cy.country AS store |
||||
,m.first_name+' '+ m.last_name AS manager |
||||
,SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY |
||||
s.store_id |
||||
, c.city+ ','+cy.country |
||||
, m.first_name+' '+ m.last_name |
||||
GO |
||||
-- |
||||
-- View structure for view sales_by_film_category |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
GO |
||||
|
||||
-- |
||||
-- View structure for view actor_info |
||||
-- |
||||
|
||||
/* |
||||
CREATE VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM sakila.film f |
||||
INNER JOIN sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM sakila.actor a |
||||
LEFT JOIN sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
*/ |
||||
|
||||
-- TO DO PROCEDURES |
||||
-- TO DO TRIGGERS |
||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,45 @@
|
||||
/* |
||||
|
||||
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team. |
||||
This project is designed to help database administrators to decide which database to use for development of new products |
||||
The user can run the same SQL against different kind of databases and compare the performance |
||||
|
||||
License: BSD |
||||
Copyright DB Software Laboratory |
||||
http://www.etl-tools.com |
||||
|
||||
*/ |
||||
|
||||
-- Delete data |
||||
DELETE FROM payment |
||||
; |
||||
DELETE FROM rental |
||||
; |
||||
DELETE FROM customer |
||||
; |
||||
DELETE FROM film_category |
||||
; |
||||
DELETE FROM film_text |
||||
; |
||||
DELETE FROM film_actor |
||||
; |
||||
DELETE FROM inventory |
||||
; |
||||
DELETE FROM film |
||||
; |
||||
DELETE FROM category |
||||
; |
||||
DELETE FROM staff |
||||
; |
||||
DELETE FROM store |
||||
; |
||||
DELETE FROM actor |
||||
; |
||||
DELETE FROM address |
||||
; |
||||
DELETE FROM city |
||||
; |
||||
DELETE FROM country |
||||
; |
||||
DELETE FROM language |
||||
; |
@ -0,0 +1,70 @@
|
||||
/* |
||||
|
||||
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team. |
||||
This project is designed to help database administrators to decide which database to use for development of new products |
||||
The user can run the same SQL against different kind of databases and compare the performance |
||||
|
||||
License: BSD |
||||
Copyright DB Software Laboratory |
||||
http://www.etl-tools.com |
||||
|
||||
*/ |
||||
|
||||
-- Drop Views |
||||
|
||||
DROP VIEW customer_list |
||||
; |
||||
DROP VIEW film_list |
||||
; |
||||
--DROP VIEW nicer_but_slower_film_list; |
||||
DROP VIEW sales_by_film_category |
||||
; |
||||
DROP VIEW sales_by_store |
||||
; |
||||
DROP VIEW staff_list |
||||
; |
||||
|
||||
-- Drop Tables |
||||
|
||||
DROP TABLE payment |
||||
; |
||||
DROP TABLE rental |
||||
; |
||||
DROP TABLE inventory |
||||
; |
||||
DROP TABLE film_text |
||||
; |
||||
DROP TABLE film_category |
||||
; |
||||
DROP TABLE film_actor |
||||
; |
||||
DROP TABLE film |
||||
; |
||||
DROP TABLE language |
||||
; |
||||
DROP TABLE customer |
||||
; |
||||
DROP TABLE actor |
||||
; |
||||
DROP TABLE category |
||||
; |
||||
DROP TABLE store |
||||
; |
||||
DROP TABLE address |
||||
; |
||||
DROP TABLE staff |
||||
; |
||||
DROP TABLE city |
||||
; |
||||
DROP TABLE country |
||||
; |
||||
|
||||
-- Procedures and views |
||||
--drop procedure film_in_stock; |
||||
--drop procedure film_not_in_stock; |
||||
--drop function get_customer_balance; |
||||
--drop function inventory_held_by_customer; |
||||
--drop function inventory_in_stock; |
||||
--drop procedure rewards_report; |
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,645 @@
|
||||
/* |
||||
|
||||
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team. |
||||
This project is designed to help database administrators to decide which database to use for development of new products |
||||
The user can run the same SQL against different kind of databases and compare the performance |
||||
|
||||
License: BSD |
||||
Copyright DB Software Laboratory |
||||
http://www.etl-tools.com |
||||
|
||||
*/ |
||||
|
||||
-- |
||||
-- Table structure for table actor |
||||
-- |
||||
--DROP TABLE actor; |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id numeric NOT NULL , |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (actor_id) |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_actor_last_name ON actor(last_name) |
||||
; |
||||
|
||||
CREATE TRIGGER actor_trigger_ai AFTER INSERT ON actor |
||||
BEGIN |
||||
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER actor_trigger_au AFTER UPDATE ON actor |
||||
BEGIN |
||||
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table country |
||||
-- |
||||
|
||||
CREATE TABLE country ( |
||||
country_id SMALLINT NOT NULL, |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (country_id) |
||||
) |
||||
; |
||||
|
||||
CREATE TRIGGER country_trigger_ai AFTER INSERT ON country |
||||
BEGIN |
||||
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER country_trigger_au AFTER UPDATE ON country |
||||
BEGIN |
||||
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table city |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id int NOT NULL, |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id SMALLINT NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (city_id), |
||||
CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_country_id ON city(country_id) |
||||
; |
||||
|
||||
CREATE TRIGGER city_trigger_ai AFTER INSERT ON city |
||||
BEGIN |
||||
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER city_trigger_au AFTER UPDATE ON city |
||||
BEGIN |
||||
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table address |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id int NOT NULL, |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id INT NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (address_id), |
||||
CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_city_id ON address(city_id) |
||||
; |
||||
|
||||
CREATE TRIGGER address_trigger_ai AFTER INSERT ON address |
||||
BEGIN |
||||
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER address_trigger_au AFTER UPDATE ON address |
||||
BEGIN |
||||
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table language |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id SMALLINT NOT NULL , |
||||
name CHAR(20) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (language_id) |
||||
) |
||||
; |
||||
|
||||
CREATE TRIGGER language_trigger_ai AFTER INSERT ON language |
||||
BEGIN |
||||
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER language_trigger_au AFTER UPDATE ON language |
||||
BEGIN |
||||
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table category |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id SMALLINT NOT NULL, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (category_id) |
||||
); |
||||
|
||||
CREATE TRIGGER category_trigger_ai AFTER INSERT ON category |
||||
BEGIN |
||||
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER category_trigger_au AFTER UPDATE ON category |
||||
BEGIN |
||||
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table customer |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id INT NOT NULL, |
||||
store_id INT NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id INT NOT NULL, |
||||
active CHAR(1) DEFAULT 'Y' NOT NULL, |
||||
create_date TIMESTAMP NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (customer_id), |
||||
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_customer_fk_store_id ON customer(store_id) |
||||
; |
||||
CREATE INDEX idx_customer_fk_address_id ON customer(address_id) |
||||
; |
||||
CREATE INDEX idx_customer_last_name ON customer(last_name) |
||||
; |
||||
|
||||
CREATE TRIGGER customer_trigger_ai AFTER INSERT ON customer |
||||
BEGIN |
||||
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER customer_trigger_au AFTER UPDATE ON customer |
||||
BEGIN |
||||
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table film |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id int NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description BLOB SUB_TYPE TEXT DEFAULT NULL, |
||||
release_year VARCHAR(4) DEFAULT NULL, |
||||
language_id SMALLINT NOT NULL, |
||||
original_language_id SMALLINT DEFAULT NULL, |
||||
rental_duration SMALLINT DEFAULT 3 NOT NULL, |
||||
rental_rate DECIMAL(4,2) DEFAULT 4.99 NOT NULL, |
||||
length SMALLINT DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) DEFAULT 19.99 NOT NULL, |
||||
rating VARCHAR(10) DEFAULT 'G', |
||||
special_features VARCHAR(100) DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (film_id), |
||||
CONSTRAINT CHECK_special_features CHECK(special_features is null or |
||||
special_features like '%Trailers%' or |
||||
special_features like '%Commentaries%' or |
||||
special_features like '%Deleted Scenes%' or |
||||
special_features like '%Behind the Scenes%'), |
||||
CONSTRAINT CHECK_special_rating CHECK(rating in ('G','PG','PG-13','R','NC-17')), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) , |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_language_id ON film(language_id) |
||||
; |
||||
CREATE INDEX idx_fk_original_language_id ON film(original_language_id) |
||||
; |
||||
|
||||
CREATE TRIGGER film_trigger_ai AFTER INSERT ON film |
||||
BEGIN |
||||
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_trigger_au AFTER UPDATE ON film |
||||
BEGIN |
||||
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table film_actor |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id INT NOT NULL, |
||||
film_id INT NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (actor_id,film_id), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id) |
||||
; |
||||
|
||||
CREATE TRIGGER film_actor_trigger_ai AFTER INSERT ON film_actor |
||||
BEGIN |
||||
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_actor_trigger_au AFTER UPDATE ON film_actor |
||||
BEGIN |
||||
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table film_category |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id INT NOT NULL, |
||||
category_id SMALLINT NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_category_film ON film_category(film_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_category_category ON film_category(category_id) |
||||
; |
||||
|
||||
CREATE TRIGGER film_category_trigger_ai AFTER INSERT ON film_category |
||||
BEGIN |
||||
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_category_trigger_au AFTER UPDATE ON film_category |
||||
BEGIN |
||||
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table film_text |
||||
-- |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id SMALLINT NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description BLOB SUB_TYPE TEXT, |
||||
PRIMARY KEY (film_id) |
||||
) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table inventory |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id INT NOT NULL, |
||||
film_id INT NOT NULL, |
||||
store_id INT NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (inventory_id), |
||||
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_id ON inventory(film_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id) |
||||
; |
||||
|
||||
CREATE TRIGGER inventory_trigger_ai AFTER INSERT ON inventory |
||||
BEGIN |
||||
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER inventory_trigger_au AFTER UPDATE ON inventory |
||||
BEGIN |
||||
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table staff |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id SMALLINT NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id INT NOT NULL, |
||||
picture BLOB DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id INT NOT NULL, |
||||
active SMALLINT DEFAULT 1 NOT NULL, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) DEFAULT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (staff_id), |
||||
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_staff_store_id ON staff(store_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_staff_address_id ON staff(address_id) |
||||
; |
||||
|
||||
CREATE TRIGGER staff_trigger_ai AFTER INSERT ON staff |
||||
BEGIN |
||||
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER staff_trigger_au AFTER UPDATE ON staff |
||||
BEGIN |
||||
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table store |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id INT NOT NULL, |
||||
manager_staff_id SMALLINT NOT NULL, |
||||
address_id INT NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (store_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) , |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_store_fk_manager_staff_id ON store(manager_staff_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_store_address ON store(address_id) |
||||
; |
||||
|
||||
CREATE TRIGGER store_trigger_ai AFTER INSERT ON store |
||||
BEGIN |
||||
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER store_trigger_au AFTER UPDATE ON store |
||||
BEGIN |
||||
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table payment |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id int NOT NULL, |
||||
customer_id INT NOT NULL, |
||||
staff_id SMALLINT NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date TIMESTAMP NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (payment_id), |
||||
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) , |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_staff_id ON payment(staff_id) |
||||
; |
||||
CREATE INDEX idx_fk_customer_id ON payment(customer_id) |
||||
; |
||||
|
||||
CREATE TRIGGER payment_trigger_ai AFTER INSERT ON payment |
||||
BEGIN |
||||
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER payment_trigger_au AFTER UPDATE ON payment |
||||
BEGIN |
||||
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INT NOT NULL, |
||||
rental_date TIMESTAMP NOT NULL, |
||||
inventory_id INT NOT NULL, |
||||
customer_id INT NOT NULL, |
||||
return_date TIMESTAMP DEFAULT NULL, |
||||
staff_id SMALLINT NOT NULL, |
||||
last_update TIMESTAMP NOT NULL, |
||||
PRIMARY KEY (rental_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) , |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) , |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) |
||||
) |
||||
; |
||||
CREATE INDEX idx_rental_fk_inventory_id ON rental(inventory_id) |
||||
; |
||||
CREATE INDEX idx_rental_fk_customer_id ON rental(customer_id) |
||||
; |
||||
CREATE INDEX idx_rental_fk_staff_id ON rental(staff_id) |
||||
; |
||||
CREATE UNIQUE INDEX idx_rental_uq ON rental (rental_date,inventory_id,customer_id) |
||||
; |
||||
|
||||
CREATE TRIGGER rental_trigger_ai AFTER INSERT ON rental |
||||
BEGIN |
||||
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER rental_trigger_au AFTER UPDATE ON rental |
||||
BEGIN |
||||
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
-- |
||||
-- View structure for view customer_list |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, |
||||
cu.first_name||' '||cu.last_name AS name, |
||||
a.address AS address, |
||||
a.postal_code AS zip_code, |
||||
a.phone AS phone, |
||||
city.city AS city, |
||||
country.country AS country, |
||||
case when cu.active=1 then 'active' else '' end AS notes, |
||||
cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id |
||||
; |
||||
-- |
||||
-- View structure for view film_list |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, |
||||
film.title AS title, |
||||
film.description AS description, |
||||
category.name AS category, |
||||
film.rental_rate AS price, |
||||
film.length AS length, |
||||
film.rating AS rating, |
||||
actor.first_name||' '||actor.last_name AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
; |
||||
|
||||
-- |
||||
-- View structure for view staff_list |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, |
||||
s.first_name||' '||s.last_name AS name, |
||||
a.address AS address, |
||||
a.postal_code AS zip_code, |
||||
a.phone AS phone, |
||||
city.city AS city, |
||||
country.country AS country, |
||||
s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id |
||||
; |
||||
-- |
||||
-- View structure for view sales_by_store |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
s.store_id |
||||
,c.city||','||cy.country AS store |
||||
,m.first_name||' '||m.last_name AS manager |
||||
,SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY |
||||
s.store_id |
||||
, c.city||','||cy.country |
||||
, m.first_name||' '||m.last_name |
||||
; |
||||
-- |
||||
-- View structure for view sales_by_film_category |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
; |
||||
|
||||
-- |
||||
-- View structure for view actor_info |
||||
-- |
||||
|
||||
/* |
||||
CREATE VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM sakila.film f |
||||
INNER JOIN sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM sakila.actor a |
||||
LEFT JOIN sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
*/ |
||||
|
||||
-- TO DO PROCEDURES |
||||
-- TO DO TRIGGERS |
||||
|
@ -0,0 +1,467 @@
|
||||
/* |
||||
|
||||
Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team. |
||||
This project is designed to help database administrators to decide which database to use for development of new products |
||||
The user can run the same SQL against different kind of databases and compare the performance |
||||
|
||||
License: BSD |
||||
Copyright DB Software Laboratory |
||||
http://www.etl-tools.com |
||||
|
||||
*/ |
||||
|
||||
-- |
||||
-- Table structure for table actor |
||||
-- |
||||
--DROP TABLE actor; |
||||
|
||||
CREATE TABLE actor ( |
||||
actor_id INTEGER NOT NULL , |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (actor_id) |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_actor_last_name ON actor(last_name) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table country |
||||
-- |
||||
|
||||
CREATE TABLE country ( |
||||
country_id INTEGER NOT NULL, |
||||
country VARCHAR(50) NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (country_id) |
||||
) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table city |
||||
-- |
||||
|
||||
CREATE TABLE city ( |
||||
city_id INTEGER NOT NULL, |
||||
city VARCHAR(50) NOT NULL, |
||||
country_id INTEGER NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (city_id), |
||||
CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_country_id ON city(country_id) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table address |
||||
-- |
||||
|
||||
CREATE TABLE address ( |
||||
address_id INTEGER NOT NULL, |
||||
address VARCHAR(50) NOT NULL, |
||||
address2 VARCHAR(50) DEFAULT NULL, |
||||
district VARCHAR(20) NOT NULL, |
||||
city_id INT NOT NULL, |
||||
postal_code VARCHAR(10) DEFAULT NULL, |
||||
phone VARCHAR(20) NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (address_id), |
||||
CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_city_id ON address(city_id) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table language |
||||
-- |
||||
|
||||
CREATE TABLE language ( |
||||
language_id INTEGER NOT NULL , |
||||
name CHAR(20) NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (language_id) |
||||
) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table category |
||||
-- |
||||
|
||||
CREATE TABLE category ( |
||||
category_id INTEGER NOT NULL, |
||||
name VARCHAR(25) NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (category_id) |
||||
); |
||||
|
||||
-- |
||||
-- Table structure for table customer |
||||
-- |
||||
|
||||
CREATE TABLE customer ( |
||||
customer_id INTEGER NOT NULL, |
||||
store_id INT NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
address_id INT NOT NULL, |
||||
active CHAR(1) DEFAULT 'Y' NOT NULL, |
||||
create_date TIMESTAMP NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (customer_id), |
||||
CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_customer_fk_store_id ON customer(store_id) |
||||
; |
||||
CREATE INDEX idx_customer_fk_address_id ON customer(address_id) |
||||
; |
||||
CREATE INDEX idx_customer_last_name ON customer(last_name) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table film |
||||
-- |
||||
|
||||
CREATE TABLE film ( |
||||
film_id INTEGER NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description BLOB SUB_TYPE TEXT DEFAULT NULL, |
||||
release_year VARCHAR(4) DEFAULT NULL, |
||||
language_id INTEGER NOT NULL, |
||||
original_language_id INTEGER DEFAULT NULL, |
||||
rental_duration INTEGER DEFAULT 3 NOT NULL, |
||||
rental_rate DECIMAL(4,2) DEFAULT 4.99 NOT NULL, |
||||
length INTEGER DEFAULT NULL, |
||||
replacement_cost DECIMAL(5,2) DEFAULT 19.99 NOT NULL, |
||||
rating VARCHAR(10) DEFAULT 'G', |
||||
special_features VARCHAR(100) DEFAULT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (film_id), |
||||
CONSTRAINT CHECK_special_features CHECK(special_features is null or |
||||
special_features like '%Trailers%' or |
||||
special_features like '%Commentaries%' or |
||||
special_features like '%Deleted Scenes%' or |
||||
special_features like '%Behind the Scenes%'), |
||||
CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) , |
||||
CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id) |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_language_id ON film(language_id) |
||||
; |
||||
CREATE INDEX idx_fk_original_language_id ON film(original_language_id) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table film_actor |
||||
-- |
||||
|
||||
CREATE TABLE film_actor ( |
||||
actor_id INT NOT NULL, |
||||
film_id INT NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (actor_id,film_id), |
||||
CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id) |
||||
; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table film_category |
||||
-- |
||||
|
||||
CREATE TABLE film_category ( |
||||
film_id INT NOT NULL, |
||||
category_id INTEGER NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (film_id, category_id), |
||||
CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_category_film ON film_category(film_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_category_category ON film_category(category_id) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table film_text |
||||
-- |
||||
|
||||
CREATE TABLE film_text ( |
||||
film_id INTEGER NOT NULL, |
||||
title VARCHAR(255) NOT NULL, |
||||
description BLOB SUB_TYPE TEXT, |
||||
PRIMARY KEY (film_id) |
||||
) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table inventory |
||||
-- |
||||
|
||||
CREATE TABLE inventory ( |
||||
inventory_id INTEGER NOT NULL, |
||||
film_id INT NOT NULL, |
||||
store_id INT NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (inventory_id), |
||||
CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_id ON inventory(film_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table staff |
||||
-- |
||||
|
||||
CREATE TABLE staff ( |
||||
staff_id INTEGER NOT NULL, |
||||
first_name VARCHAR(45) NOT NULL, |
||||
last_name VARCHAR(45) NOT NULL, |
||||
address_id INT NOT NULL, |
||||
picture BLOB DEFAULT NULL, |
||||
email VARCHAR(50) DEFAULT NULL, |
||||
store_id INT NOT NULL, |
||||
active INTEGER DEFAULT 1 NOT NULL, |
||||
username VARCHAR(16) NOT NULL, |
||||
password VARCHAR(40) DEFAULT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (staff_id), |
||||
CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE, |
||||
CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_staff_store_id ON staff(store_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_staff_address_id ON staff(address_id) |
||||
; |
||||
|
||||
-- |
||||
-- Table structure for table store |
||||
-- |
||||
|
||||
CREATE TABLE store ( |
||||
store_id INTEGER NOT NULL, |
||||
manager_staff_id INTEGER NOT NULL, |
||||
address_id INT NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (store_id), |
||||
CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) , |
||||
CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id) |
||||
) |
||||
; |
||||
|
||||
CREATE INDEX idx_store_fk_manager_staff_id ON store(manager_staff_id) |
||||
; |
||||
|
||||
CREATE INDEX idx_fk_store_address ON store(address_id) |
||||
; |
||||
|
||||
|
||||
-- |
||||
-- Table structure for table payment |
||||
-- |
||||
|
||||
CREATE TABLE payment ( |
||||
payment_id INTEGER NOT NULL, |
||||
customer_id INT NOT NULL, |
||||
staff_id INTEGER NOT NULL, |
||||
rental_id INT DEFAULT NULL, |
||||
amount DECIMAL(5,2) NOT NULL, |
||||
payment_date TIMESTAMP NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (payment_id), |
||||
CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE, |
||||
CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) , |
||||
CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) |
||||
) |
||||
; |
||||
CREATE INDEX idx_fk_staff_id ON payment(staff_id) |
||||
; |
||||
CREATE INDEX idx_fk_customer_id ON payment(customer_id) |
||||
; |
||||
|
||||
|
||||
CREATE TABLE rental ( |
||||
rental_id INTEGER NOT NULL, |
||||
rental_date TIMESTAMP NOT NULL, |
||||
inventory_id INT NOT NULL, |
||||
customer_id INT NOT NULL, |
||||
return_date TIMESTAMP DEFAULT NULL, |
||||
staff_id INTEGER NOT NULL, |
||||
last_update TIMESTAMP, |
||||
PRIMARY KEY (rental_id), |
||||
CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) , |
||||
CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) , |
||||
CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) |
||||
) |
||||
; |
||||
CREATE INDEX idx_rental_fk_inventory_id ON rental(inventory_id) |
||||
; |
||||
CREATE INDEX idx_rental_fk_customer_id ON rental(customer_id) |
||||
; |
||||
CREATE INDEX idx_rental_fk_staff_id ON rental(staff_id) |
||||
; |
||||
CREATE UNIQUE INDEX idx_rental_uq ON rental (rental_date,inventory_id,customer_id) |
||||
; |
||||
|
||||
-- |
||||
-- View structure for view customer_list |
||||
-- |
||||
|
||||
CREATE VIEW customer_list |
||||
AS |
||||
SELECT cu.customer_id AS ID, |
||||
cu.first_name||' '||cu.last_name AS name, |
||||
a.address AS address, |
||||
a.postal_code AS zip_code, |
||||
a.phone AS phone, |
||||
city.city AS city, |
||||
country.country AS country, |
||||
case when cu.active=1 then 'active' else '' end AS notes, |
||||
cu.store_id AS SID |
||||
FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id |
||||
; |
||||
-- |
||||
-- View structure for view film_list |
||||
-- |
||||
|
||||
CREATE VIEW film_list |
||||
AS |
||||
SELECT film.film_id AS FID, |
||||
film.title AS title, |
||||
film.description AS description, |
||||
category.name AS category, |
||||
film.rental_rate AS price, |
||||
film.length AS length, |
||||
film.rating AS rating, |
||||
actor.first_name||' '||actor.last_name AS actors |
||||
FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id |
||||
JOIN film_actor ON film.film_id = film_actor.film_id |
||||
JOIN actor ON film_actor.actor_id = actor.actor_id |
||||
; |
||||
|
||||
-- |
||||
-- View structure for view staff_list |
||||
-- |
||||
|
||||
CREATE VIEW staff_list |
||||
AS |
||||
SELECT s.staff_id AS ID, |
||||
s.first_name||' '||s.last_name AS name, |
||||
a.address AS address, |
||||
a.postal_code AS zip_code, |
||||
a.phone AS phone, |
||||
city.city AS city, |
||||
country.country AS country, |
||||
s.store_id AS SID |
||||
FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id |
||||
JOIN country ON city.country_id = country.country_id |
||||
; |
||||
-- |
||||
-- View structure for view sales_by_store |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_store |
||||
AS |
||||
SELECT |
||||
s.store_id |
||||
,c.city||','||cy.country AS store |
||||
,m.first_name||' '||m.last_name AS manager |
||||
,SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN store AS s ON i.store_id = s.store_id |
||||
INNER JOIN address AS a ON s.address_id = a.address_id |
||||
INNER JOIN city AS c ON a.city_id = c.city_id |
||||
INNER JOIN country AS cy ON c.country_id = cy.country_id |
||||
INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id |
||||
GROUP BY |
||||
s.store_id |
||||
, c.city||','||cy.country |
||||
, m.first_name||' '||m.last_name |
||||
; |
||||
-- |
||||
-- View structure for view sales_by_film_category |
||||
-- |
||||
-- Note that total sales will add up to >100% because |
||||
-- some titles belong to more than 1 category |
||||
-- |
||||
|
||||
CREATE VIEW sales_by_film_category |
||||
AS |
||||
SELECT |
||||
c.name AS category |
||||
, SUM(p.amount) AS total_sales |
||||
FROM payment AS p |
||||
INNER JOIN rental AS r ON p.rental_id = r.rental_id |
||||
INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id |
||||
INNER JOIN film AS f ON i.film_id = f.film_id |
||||
INNER JOIN film_category AS fc ON f.film_id = fc.film_id |
||||
INNER JOIN category AS c ON fc.category_id = c.category_id |
||||
GROUP BY c.name |
||||
; |
||||
|
||||
-- |
||||
-- View structure for view actor_info |
||||
-- |
||||
|
||||
/* |
||||
CREATE VIEW actor_info |
||||
AS |
||||
SELECT |
||||
a.actor_id, |
||||
a.first_name, |
||||
a.last_name, |
||||
GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ', |
||||
(SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ') |
||||
FROM sakila.film f |
||||
INNER JOIN sakila.film_category fc |
||||
ON f.film_id = fc.film_id |
||||
INNER JOIN sakila.film_actor fa |
||||
ON f.film_id = fa.film_id |
||||
WHERE fc.category_id = c.category_id |
||||
AND fa.actor_id = a.actor_id |
||||
) |
||||
) |
||||
ORDER BY c.name SEPARATOR '; ') |
||||
AS film_info |
||||
FROM sakila.actor a |
||||
LEFT JOIN sakila.film_actor fa |
||||
ON a.actor_id = fa.actor_id |
||||
LEFT JOIN sakila.film_category fc |
||||
ON fa.film_id = fc.film_id |
||||
LEFT JOIN sakila.category c |
||||
ON fc.category_id = c.category_id |
||||
GROUP BY a.actor_id, a.first_name, a.last_name; |
||||
*/ |
||||
|
||||
-- TO DO PROCEDURES |
||||
-- TO DO TRIGGERS |
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,180 @@
|
||||
|
||||
CREATE TRIGGER actor_trigger_ai AFTER INSERT ON actor |
||||
BEGIN |
||||
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER actor_trigger_au AFTER UPDATE ON actor |
||||
BEGIN |
||||
UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER country_trigger_ai AFTER INSERT ON country |
||||
BEGIN |
||||
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER country_trigger_au AFTER UPDATE ON country |
||||
BEGIN |
||||
UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER city_trigger_ai AFTER INSERT ON city |
||||
BEGIN |
||||
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER city_trigger_au AFTER UPDATE ON city |
||||
BEGIN |
||||
UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER address_trigger_ai AFTER INSERT ON address |
||||
BEGIN |
||||
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER address_trigger_au AFTER UPDATE ON address |
||||
BEGIN |
||||
UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER language_trigger_ai AFTER INSERT ON language |
||||
BEGIN |
||||
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER language_trigger_au AFTER UPDATE ON language |
||||
BEGIN |
||||
UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER category_trigger_ai AFTER INSERT ON category |
||||
BEGIN |
||||
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER category_trigger_au AFTER UPDATE ON category |
||||
BEGIN |
||||
UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER customer_trigger_ai AFTER INSERT ON customer |
||||
BEGIN |
||||
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER customer_trigger_au AFTER UPDATE ON customer |
||||
BEGIN |
||||
UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_trigger_ai AFTER INSERT ON film |
||||
BEGIN |
||||
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_trigger_au AFTER UPDATE ON film |
||||
BEGIN |
||||
UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_actor_trigger_ai AFTER INSERT ON film_actor |
||||
BEGIN |
||||
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_actor_trigger_au AFTER UPDATE ON film_actor |
||||
BEGIN |
||||
UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_category_trigger_ai AFTER INSERT ON film_category |
||||
BEGIN |
||||
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER film_category_trigger_au AFTER UPDATE ON film_category |
||||
BEGIN |
||||
UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER inventory_trigger_ai AFTER INSERT ON inventory |
||||
BEGIN |
||||
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER inventory_trigger_au AFTER UPDATE ON inventory |
||||
BEGIN |
||||
UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER staff_trigger_ai AFTER INSERT ON staff |
||||
BEGIN |
||||
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER staff_trigger_au AFTER UPDATE ON staff |
||||
BEGIN |
||||
UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER store_trigger_ai AFTER INSERT ON store |
||||
BEGIN |
||||
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER store_trigger_au AFTER UPDATE ON store |
||||
BEGIN |
||||
UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER payment_trigger_ai AFTER INSERT ON payment |
||||
BEGIN |
||||
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER payment_trigger_au AFTER UPDATE ON payment |
||||
BEGIN |
||||
UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER rental_trigger_ai AFTER INSERT ON rental |
||||
BEGIN |
||||
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
||||
|
||||
CREATE TRIGGER rental_trigger_au AFTER UPDATE ON rental |
||||
BEGIN |
||||
UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid; |
||||
END |
||||
; |
Binary file not shown.
@ -0,0 +1,4 @@
|
||||
DB_USER=root |
||||
DB_PASSWORD=password |
||||
DB_PORT=3306 |
||||
DB_HOST=localhost |
@ -0,0 +1,5 @@
|
||||
DB_USER=postgres |
||||
DB_PASSWORD=password |
||||
DB_PORT=5432 |
||||
DB_HOST=localhost |
||||
DB_CLIENT=pg |
@ -0,0 +1,322 @@
|
||||
import fs from 'fs'; |
||||
import process from 'process'; |
||||
import { knex } from 'knex'; |
||||
import SqlMgrv2 from '../../src/db/sql-mgr/v2/SqlMgrv2'; |
||||
import type { Knex } from 'knex'; |
||||
import type { DbConfig } from '../../src/interface/config'; |
||||
import NcConfigFactory from '../../src/utils/NcConfigFactory' |
||||
|
||||
export default class TestDbMngr { |
||||
public static readonly dbName = 'test_meta'; |
||||
public static readonly sakilaDbName = 'test_sakila'; |
||||
public static metaKnex: Knex; |
||||
public static sakilaKnex: Knex; |
||||
|
||||
public static defaultConnection = { |
||||
user: 'root', |
||||
password: 'password', |
||||
host: 'localhost', |
||||
port: 3306, |
||||
client: 'mysql2', |
||||
}; |
||||
|
||||
public static pgConnection = { |
||||
user: 'postgres', |
||||
password: 'password', |
||||
host: 'localhost', |
||||
port: 5432, |
||||
client: 'pg', |
||||
}; |
||||
|
||||
public static connection: { |
||||
user: string; |
||||
password: string; |
||||
host: string; |
||||
port: number; |
||||
client: string; |
||||
} = TestDbMngr.defaultConnection; |
||||
|
||||
public static dbConfig: DbConfig; |
||||
|
||||
static populateConnectionConfig() { |
||||
const { user, password, host, port, client } = TestDbMngr.defaultConnection; |
||||
TestDbMngr.connection = { |
||||
user: process.env['DB_USER'] || user, |
||||
password: process.env['DB_PASSWORD'] || password, |
||||
host: process.env['DB_HOST'] || host, |
||||
port: Number(process.env['DB_PORT']) || port, |
||||
client: process.env['DB_CLIENT'] || client, |
||||
}; |
||||
|
||||
console.log(TestDbMngr.connection); |
||||
} |
||||
|
||||
static async testConnection(config: DbConfig) { |
||||
try { |
||||
console.log('Testing connection', TestDbMngr.connection); |
||||
return await SqlMgrv2.testConnection(config); |
||||
} catch (e) { |
||||
console.log(e); |
||||
return { code: -1, message: 'Connection invalid' }; |
||||
} |
||||
} |
||||
|
||||
static async init() { |
||||
TestDbMngr.populateConnectionConfig(); |
||||
|
||||
// common for both pg and mysql
|
||||
if (await TestDbMngr.isDbConfigured()) { |
||||
await TestDbMngr.connectDb(); |
||||
} else { |
||||
console.log('Mysql is not configured. Switching to sqlite'); |
||||
await TestDbMngr.switchToSqlite(); |
||||
} |
||||
} |
||||
|
||||
private static async isDbConfigured() { |
||||
const { user, password, host, port, client } = TestDbMngr.connection; |
||||
const config = NcConfigFactory.urlToDbConfig( |
||||
`${client}://${user}:${password}@${host}:${port}`, |
||||
); |
||||
config.connection = { |
||||
user, |
||||
password, |
||||
host, |
||||
port, |
||||
}; |
||||
const result = await TestDbMngr.testConnection(config); |
||||
return result.code !== -1; |
||||
} |
||||
static async connectDb() { |
||||
const { user, password, host, port, client } = TestDbMngr.connection; |
||||
if (!process.env[`DATABASE_URL`]) { |
||||
process.env[ |
||||
`DATABASE_URL` |
||||
] = `${client}://${user}:${password}@${host}:${port}/${TestDbMngr.dbName}`; |
||||
} |
||||
|
||||
TestDbMngr.dbConfig = NcConfigFactory.urlToDbConfig( |
||||
NcConfigFactory.extractXcUrlFromJdbc(process.env[`DATABASE_URL`]), |
||||
); |
||||
this.dbConfig.meta = { |
||||
tn: 'nc_evolutions', |
||||
dbAlias: 'db', |
||||
api: { |
||||
type: 'rest', |
||||
prefix: '', |
||||
graphqlDepthLimit: 10, |
||||
}, |
||||
inflection: { |
||||
tn: 'camelize', |
||||
cn: 'camelize', |
||||
}, |
||||
}; |
||||
|
||||
await TestDbMngr.setupMeta(); |
||||
await TestDbMngr.setupSakila(); |
||||
} |
||||
|
||||
static async setupMeta() { |
||||
if (TestDbMngr.metaKnex) { |
||||
await TestDbMngr.metaKnex.destroy(); |
||||
} |
||||
|
||||
if (TestDbMngr.isSqlite()) { |
||||
await TestDbMngr.resetMetaSqlite(); |
||||
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig()); |
||||
return; |
||||
} |
||||
|
||||
TestDbMngr.metaKnex = knex(TestDbMngr.getDbConfigWithNoDb()); |
||||
await TestDbMngr.resetDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName); |
||||
await TestDbMngr.metaKnex.destroy(); |
||||
|
||||
TestDbMngr.metaKnex = knex(TestDbMngr.getMetaDbConfig()); |
||||
await TestDbMngr.useDatabase(TestDbMngr.metaKnex, TestDbMngr.dbName); |
||||
} |
||||
|
||||
static async setupSakila() { |
||||
if (TestDbMngr.sakilaKnex) { |
||||
await TestDbMngr.sakilaKnex.destroy(); |
||||
} |
||||
|
||||
if (TestDbMngr.isSqlite()) { |
||||
await TestDbMngr.seedSakila(); |
||||
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig()); |
||||
return; |
||||
} |
||||
|
||||
TestDbMngr.sakilaKnex = knex(TestDbMngr.getDbConfigWithNoDb()); |
||||
await TestDbMngr.resetDatabase( |
||||
TestDbMngr.sakilaKnex, |
||||
TestDbMngr.sakilaDbName, |
||||
); |
||||
await TestDbMngr.sakilaKnex.destroy(); |
||||
|
||||
TestDbMngr.sakilaKnex = knex(TestDbMngr.getSakilaDbConfig()); |
||||
await TestDbMngr.useDatabase( |
||||
TestDbMngr.sakilaKnex, |
||||
TestDbMngr.sakilaDbName, |
||||
); |
||||
} |
||||
|
||||
static async switchToSqlite() { |
||||
// process.env[`DATABASE_URL`] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.sqlite`;
|
||||
TestDbMngr.dbConfig = { |
||||
client: 'sqlite3', |
||||
connection: { |
||||
filename: `${__dirname}/${TestDbMngr.dbName}.db`, |
||||
database: TestDbMngr.dbName, |
||||
}, |
||||
useNullAsDefault: true, |
||||
meta: { |
||||
tn: 'nc_evolutions', |
||||
dbAlias: 'db', |
||||
api: { |
||||
type: 'rest', |
||||
prefix: '', |
||||
graphqlDepthLimit: 10, |
||||
}, |
||||
inflection: { |
||||
tn: 'camelize', |
||||
cn: 'camelize', |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
process.env[ |
||||
`NC_DB` |
||||
] = `sqlite3:///?database=${__dirname}/${TestDbMngr.dbName}.db`; |
||||
await TestDbMngr.setupMeta(); |
||||
await TestDbMngr.setupSakila(); |
||||
} |
||||
|
||||
private static async resetDatabase(knexClient, dbName) { |
||||
if (TestDbMngr.isSqlite()) { |
||||
// return knexClient.raw(`DELETE FROM sqlite_sequence`);
|
||||
} else { |
||||
try { |
||||
await knexClient.raw(`DROP DATABASE ${dbName}`); |
||||
} catch (e) {} |
||||
await knexClient.raw(`CREATE DATABASE ${dbName}`); |
||||
console.log(`Database ${dbName} created`); |
||||
|
||||
if (!TestDbMngr.isPg()) { |
||||
await knexClient.raw(`USE ${dbName}`); |
||||
} |
||||
} |
||||
} |
||||
|
||||
static isSqlite() { |
||||
return TestDbMngr.dbConfig.client === 'sqlite3'; |
||||
} |
||||
|
||||
static isPg() { |
||||
return TestDbMngr.dbConfig.client === 'pg'; |
||||
} |
||||
|
||||
private static async useDatabase(knexClient, dbName) { |
||||
if (!TestDbMngr.isSqlite() && !TestDbMngr.isPg()) { |
||||
await knexClient.raw(`USE ${dbName}`); |
||||
} |
||||
} |
||||
|
||||
static getDbConfigWithNoDb() { |
||||
const dbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig)); |
||||
delete dbConfig.connection.database; |
||||
return dbConfig; |
||||
} |
||||
|
||||
static getMetaDbConfig() { |
||||
return TestDbMngr.dbConfig; |
||||
} |
||||
|
||||
private static resetMetaSqlite() { |
||||
if (fs.existsSync(`${__dirname}/test_meta.db`)) { |
||||
fs.unlinkSync(`${__dirname}/test_meta.db`); |
||||
} |
||||
} |
||||
|
||||
static getSakilaDbConfig() { |
||||
const sakilaDbConfig = JSON.parse(JSON.stringify(TestDbMngr.dbConfig)); |
||||
sakilaDbConfig.connection.database = TestDbMngr.sakilaDbName; |
||||
sakilaDbConfig.connection.multipleStatements = true; |
||||
if (TestDbMngr.isSqlite()) { |
||||
sakilaDbConfig.connection.filename = `${__dirname}/test_sakila.db`; |
||||
} |
||||
return sakilaDbConfig; |
||||
} |
||||
|
||||
static async seedSakila() { |
||||
const testsDir = __dirname.replace('tests/unit', 'tests'); |
||||
|
||||
if (TestDbMngr.isSqlite()) { |
||||
if (fs.existsSync(`${__dirname}/test_sakila.db`)) { |
||||
fs.unlinkSync(`${__dirname}/test_sakila.db`); |
||||
} |
||||
fs.copyFileSync( |
||||
`${testsDir}/sqlite-sakila-db/sakila.db`, |
||||
`${__dirname}/test_sakila.db`, |
||||
); |
||||
} else if (TestDbMngr.isPg()) { |
||||
const schemaFile = fs |
||||
.readFileSync(`${testsDir}/pg-sakila-db/01-postgres-sakila-schema.sql`) |
||||
.toString(); |
||||
const dataFile = fs |
||||
.readFileSync( |
||||
`${testsDir}/pg-sakila-db/02-postgres-sakila-insert-data.sql`, |
||||
) |
||||
.toString(); |
||||
await TestDbMngr.sakilaKnex.raw(schemaFile); |
||||
await TestDbMngr.sakilaKnex.raw(dataFile); |
||||
} else { |
||||
const schemaFile = fs |
||||
.readFileSync(`${testsDir}/mysql-sakila-db/03-test-sakila-schema.sql`) |
||||
.toString(); |
||||
const dataFile = fs |
||||
.readFileSync(`${testsDir}/mysql-sakila-db/04-test-sakila-data.sql`) |
||||
.toString(); |
||||
await TestDbMngr.sakilaKnex.raw(schemaFile); |
||||
await TestDbMngr.sakilaKnex.raw(dataFile); |
||||
} |
||||
} |
||||
|
||||
static async disableForeignKeyChecks(knexClient) { |
||||
if (TestDbMngr.isSqlite()) { |
||||
await knexClient.raw('PRAGMA foreign_keys = OFF'); |
||||
} else if (TestDbMngr.isPg()) { |
||||
await knexClient.raw(`SET session_replication_role = 'replica'`); |
||||
} else { |
||||
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 0`); |
||||
} |
||||
} |
||||
|
||||
static async enableForeignKeyChecks(knexClient) { |
||||
if (TestDbMngr.isSqlite()) { |
||||
await knexClient.raw(`PRAGMA foreign_keys = ON;`); |
||||
} else if (TestDbMngr.isPg()) { |
||||
await knexClient.raw(`SET session_replication_role = 'origin'`); |
||||
} else { |
||||
await knexClient.raw(`SET FOREIGN_KEY_CHECKS = 1`); |
||||
} |
||||
} |
||||
|
||||
static async showAllTables(knexClient) { |
||||
if (TestDbMngr.isSqlite()) { |
||||
const tables = await knexClient.raw( |
||||
`SELECT name FROM sqlite_master WHERE type='table'`, |
||||
); |
||||
return tables |
||||
.filter((t) => t.name !== 'sqlite_sequence' && t.name !== '_evolutions') |
||||
.map((t) => t.name); |
||||
} else if (TestDbMngr.isPg()) { |
||||
const tables = await knexClient.raw( |
||||
`SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';`, |
||||
); |
||||
return tables.rows.map((t) => t.tablename); |
||||
} else { |
||||
const response = await knexClient.raw(`SHOW TABLES`); |
||||
return response[0].map((table) => Object.values(table)[0]); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,274 @@
|
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import request from 'supertest'; |
||||
import Column from '../../../src/models/Column'; |
||||
import FormViewColumn from '../../../src/models/FormViewColumn'; |
||||
import GalleryViewColumn from '../../../src/models/GalleryViewColumn'; |
||||
import GridViewColumn from '../../../src/models/GridViewColumn'; |
||||
import Model from '../../../src/models/Model'; |
||||
import Project from '../../../src/models/Project'; |
||||
import View from '../../../src/models/View'; |
||||
import { isSqlite, isPg } from '../init/db'; |
||||
|
||||
const defaultColumns = function (context) { |
||||
return [ |
||||
{ |
||||
column_name: 'id', |
||||
title: 'Id', |
||||
uidt: 'ID', |
||||
}, |
||||
{ |
||||
column_name: 'title', |
||||
title: 'Title', |
||||
uidt: 'SingleLineText', |
||||
}, |
||||
{ |
||||
cdf: isPg(context) ? 'now()' : 'CURRENT_TIMESTAMP', |
||||
column_name: 'created_at', |
||||
title: 'CreatedAt', |
||||
dtxp: '', |
||||
dtxs: '', |
||||
uidt: 'DateTime', |
||||
dt: isPg(context) ? 'timestamp without time zone' : undefined, |
||||
}, |
||||
{ |
||||
cdf: isSqlite(context) |
||||
? 'CURRENT_TIMESTAMP' |
||||
: isPg(context) |
||||
? 'now()' |
||||
: 'CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP', |
||||
column_name: 'updated_at', |
||||
title: 'UpdatedAt', |
||||
dtxp: '', |
||||
dtxs: '', |
||||
uidt: 'DateTime', |
||||
dt: isPg(context) ? 'timestamp without time zone' : undefined, |
||||
}, |
||||
]; |
||||
}; |
||||
|
||||
const createColumn = async (context, table, columnAttr) => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/tables/${table.id}/columns`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
...columnAttr, |
||||
}); |
||||
|
||||
const column: Column = (await table.getColumns()).find( |
||||
(column) => column.title === columnAttr.title |
||||
); |
||||
return column; |
||||
}; |
||||
|
||||
const createRollupColumn = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
title, |
||||
rollupFunction, |
||||
table, |
||||
relatedTableName, |
||||
relatedTableColumnTitle, |
||||
}: { |
||||
project: Project; |
||||
title: string; |
||||
rollupFunction: string; |
||||
table: Model; |
||||
relatedTableName: string; |
||||
relatedTableColumnTitle: string; |
||||
} |
||||
) => { |
||||
const childBases = await project.getBases(); |
||||
const childTable = await Model.getByIdOrName({ |
||||
project_id: project.id, |
||||
base_id: childBases[0].id!, |
||||
table_name: relatedTableName, |
||||
}); |
||||
const childTableColumns = await childTable.getColumns(); |
||||
const childTableColumn = await childTableColumns.find( |
||||
(column) => column.title === relatedTableColumnTitle |
||||
); |
||||
|
||||
const ltarColumn = (await table.getColumns()).find( |
||||
(column) => |
||||
column.uidt === UITypes.LinkToAnotherRecord && |
||||
column.colOptions?.fk_related_model_id === childTable.id |
||||
); |
||||
|
||||
const rollupColumn = await createColumn(context, table, { |
||||
title: title, |
||||
uidt: UITypes.Rollup, |
||||
fk_relation_column_id: ltarColumn?.id, |
||||
fk_rollup_column_id: childTableColumn?.id, |
||||
rollup_function: rollupFunction, |
||||
table_name: table.table_name, |
||||
column_name: title, |
||||
}); |
||||
|
||||
return rollupColumn; |
||||
}; |
||||
|
||||
const createLookupColumn = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
title, |
||||
table, |
||||
relatedTableName, |
||||
relatedTableColumnTitle, |
||||
}: { |
||||
project: Project; |
||||
title: string; |
||||
table: Model; |
||||
relatedTableName: string; |
||||
relatedTableColumnTitle: string; |
||||
} |
||||
) => { |
||||
const childBases = await project.getBases(); |
||||
const childTable = await Model.getByIdOrName({ |
||||
project_id: project.id, |
||||
base_id: childBases[0].id!, |
||||
table_name: relatedTableName, |
||||
}); |
||||
const childTableColumns = await childTable.getColumns(); |
||||
const childTableColumn = await childTableColumns.find( |
||||
(column) => column.title === relatedTableColumnTitle |
||||
); |
||||
|
||||
if (!childTableColumn) { |
||||
throw new Error( |
||||
`Could not find column ${relatedTableColumnTitle} in ${relatedTableName}` |
||||
); |
||||
} |
||||
|
||||
const ltarColumn = (await table.getColumns()).find( |
||||
(column) => |
||||
column.uidt === UITypes.LinkToAnotherRecord && |
||||
column.colOptions?.fk_related_model_id === childTable.id |
||||
); |
||||
const lookupColumn = await createColumn(context, table, { |
||||
title: title, |
||||
uidt: UITypes.Lookup, |
||||
fk_relation_column_id: ltarColumn?.id, |
||||
fk_lookup_column_id: childTableColumn?.id, |
||||
table_name: table.table_name, |
||||
column_name: title, |
||||
}); |
||||
|
||||
return lookupColumn; |
||||
}; |
||||
|
||||
const createQrCodeColumn = async ( |
||||
context, |
||||
{ |
||||
title, |
||||
table, |
||||
referencedQrValueTableColumnTitle, |
||||
}: { |
||||
title: string; |
||||
table: Model; |
||||
referencedQrValueTableColumnTitle: string; |
||||
} |
||||
) => { |
||||
const referencedQrValueTableColumnId = await table |
||||
.getColumns() |
||||
.then( |
||||
(cols) => |
||||
cols.find( |
||||
(column) => column.title == referencedQrValueTableColumnTitle |
||||
)['id'] |
||||
); |
||||
|
||||
const qrCodeColumn = await createColumn(context, table, { |
||||
title: title, |
||||
uidt: UITypes.QrCode, |
||||
column_name: title, |
||||
fk_qr_value_column_id: referencedQrValueTableColumnId, |
||||
}); |
||||
return qrCodeColumn; |
||||
}; |
||||
|
||||
const createBarcodeColumn = async ( |
||||
context, |
||||
{ |
||||
title, |
||||
table, |
||||
referencedBarcodeValueTableColumnTitle, |
||||
}: { |
||||
title: string; |
||||
table: Model; |
||||
referencedBarcodeValueTableColumnTitle: string; |
||||
} |
||||
) => { |
||||
const referencedBarcodeValueTableColumnId = await table |
||||
.getColumns() |
||||
.then( |
||||
(cols) => |
||||
cols.find( |
||||
(column) => column.title == referencedBarcodeValueTableColumnTitle |
||||
)['id'] |
||||
); |
||||
|
||||
const barcodeColumn = await createColumn(context, table, { |
||||
title: title, |
||||
uidt: UITypes.Barcode, |
||||
column_name: title, |
||||
fk_barcode_value_column_id: referencedBarcodeValueTableColumnId, |
||||
}); |
||||
return barcodeColumn; |
||||
}; |
||||
|
||||
const createLtarColumn = async ( |
||||
context, |
||||
{ |
||||
title, |
||||
parentTable, |
||||
childTable, |
||||
type, |
||||
}: { |
||||
title: string; |
||||
parentTable: Model; |
||||
childTable: Model; |
||||
type: string; |
||||
} |
||||
) => { |
||||
const ltarColumn = await createColumn(context, parentTable, { |
||||
title: title, |
||||
column_name: title, |
||||
uidt: UITypes.LinkToAnotherRecord, |
||||
parentId: parentTable.id, |
||||
childId: childTable.id, |
||||
type: type, |
||||
}); |
||||
|
||||
return ltarColumn; |
||||
}; |
||||
|
||||
const updateViewColumn = async ( |
||||
context, |
||||
{ view, column, attr }: { column: Column; view: View; attr: any } |
||||
) => { |
||||
const res = await request(context.app) |
||||
.patch(`/api/v1/db/meta/views/${view.id}/columns/${column.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
...attr, |
||||
}); |
||||
|
||||
const updatedColumn: FormViewColumn | GridViewColumn | GalleryViewColumn = ( |
||||
await view.getColumns() |
||||
).find((column) => column.id === column.id)!; |
||||
|
||||
return updatedColumn; |
||||
}; |
||||
|
||||
export { |
||||
defaultColumns, |
||||
createColumn, |
||||
createQrCodeColumn, |
||||
createBarcodeColumn, |
||||
createRollupColumn, |
||||
createLookupColumn, |
||||
createLtarColumn, |
||||
updateViewColumn, |
||||
}; |
@ -0,0 +1,81 @@
|
||||
import request from 'supertest'; |
||||
import Project from '../../../src/models/Project'; |
||||
|
||||
const sakilaProjectConfig = (context) => { |
||||
let base; |
||||
|
||||
if ( |
||||
context.sakilaDbConfig.client === 'mysql2' || |
||||
context.sakilaDbConfig.client === 'pg' |
||||
) { |
||||
base = { |
||||
type: context.sakilaDbConfig.client, |
||||
config: { |
||||
client: context.sakilaDbConfig.client, |
||||
connection: context.sakilaDbConfig.connection, |
||||
}, |
||||
}; |
||||
} else { |
||||
base = { |
||||
type: context.sakilaDbConfig.client, |
||||
config: { |
||||
client: context.sakilaDbConfig.client, |
||||
connection: { |
||||
client: context.sakilaDbConfig.client, |
||||
connection: context.sakilaDbConfig.connection, |
||||
}, |
||||
}, |
||||
}; |
||||
} |
||||
|
||||
base = { |
||||
...base, |
||||
inflection_column: 'camelize', |
||||
inflection_table: 'camelize', |
||||
}; |
||||
|
||||
return { |
||||
title: 'sakila', |
||||
bases: [base], |
||||
external: true, |
||||
}; |
||||
}; |
||||
|
||||
const defaultProjectValue = { |
||||
title: 'Title', |
||||
}; |
||||
|
||||
const defaultSharedBaseValue = { |
||||
roles: 'viewer', |
||||
password: 'password123', |
||||
}; |
||||
|
||||
const createSharedBase = async (app, token, project, sharedBaseArgs = {}) => { |
||||
await request(app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', token) |
||||
.send({ |
||||
...defaultSharedBaseValue, |
||||
...sharedBaseArgs, |
||||
}); |
||||
}; |
||||
|
||||
const createSakilaProject = async (context) => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send(sakilaProjectConfig(context)); |
||||
|
||||
return (await Project.getByTitleOrId(response.body.id)) as Project; |
||||
}; |
||||
|
||||
const createProject = async (context, projectArgs = defaultProjectValue) => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send(projectArgs); |
||||
|
||||
return (await Project.getByTitleOrId(response.body.id)) as Project; |
||||
}; |
||||
|
||||
export { createProject, createSharedBase, createSakilaProject }; |
@ -0,0 +1,373 @@
|
||||
import { ColumnType, UITypes } from 'nocodb-sdk'; |
||||
import request from 'supertest'; |
||||
import Column from '../../../src/models/Column'; |
||||
import Filter from '../../../src/models/Filter'; |
||||
import Model from '../../../src/models/Model'; |
||||
import Project from '../../../src/models/Project'; |
||||
import Sort from '../../../src/models/Sort'; |
||||
import NcConnectionMgrv2 from '../../../src/utils/common/NcConnectionMgrv2'; |
||||
|
||||
const rowValue = (column: ColumnType, index: number) => { |
||||
switch (column.uidt) { |
||||
case UITypes.Number: |
||||
return index; |
||||
case UITypes.SingleLineText: |
||||
return `test-${index}`; |
||||
case UITypes.Date: |
||||
return '2020-01-01'; |
||||
case UITypes.DateTime: |
||||
return '2020-01-01 00:00:00'; |
||||
case UITypes.Email: |
||||
return `test-${index}@example.com`; |
||||
default: |
||||
return `test-${index}`; |
||||
} |
||||
}; |
||||
|
||||
const rowMixedValue = (column: ColumnType, index: number) => { |
||||
// Array of country names
|
||||
const countries = [ |
||||
'Afghanistan', |
||||
'Albania', |
||||
'', |
||||
'Andorra', |
||||
'Angola', |
||||
'Antigua and Barbuda', |
||||
'Argentina', |
||||
null, |
||||
'Armenia', |
||||
'Australia', |
||||
'Austria', |
||||
'', |
||||
null, |
||||
]; |
||||
|
||||
// Array of sample random paragraphs (comma separated list of cities and countries). Not more than 200 characters
|
||||
const longText = [ |
||||
'Aberdeen, United Kingdom', |
||||
'Abidjan, Côte d’Ivoire', |
||||
'Abuja, Nigeria', |
||||
'', |
||||
'Addis Ababa, Ethiopia', |
||||
'Adelaide, Australia', |
||||
'Ahmedabad, India', |
||||
'Albuquerque, United States', |
||||
null, |
||||
'Alexandria, Egypt', |
||||
'Algiers, Algeria', |
||||
'Allahabad, India', |
||||
'', |
||||
null, |
||||
]; |
||||
|
||||
// Array of random integers, not more than 10000
|
||||
const numbers = [33, null, 456, 333, 267, 34, 8754, 3234, 44, 33, null]; |
||||
const decimals = [ |
||||
33.3, |
||||
456.34, |
||||
333.3, |
||||
null, |
||||
267.5674, |
||||
34.0, |
||||
8754.0, |
||||
3234.547, |
||||
44.2647, |
||||
33.98, |
||||
null, |
||||
]; |
||||
const duration = [10, 20, 30, 40, 50, 60, null, 70, 80, 90, null]; |
||||
const rating = [0, 1, 2, 3, null, 0, 4, 5, 0, 1, null]; |
||||
|
||||
// Array of random sample email strings (not more than 100 characters)
|
||||
const emails = [ |
||||
'jbutt@gmail.com', |
||||
'josephine_darakjy@darakjy.org', |
||||
'art@venere.org', |
||||
'', |
||||
null, |
||||
'donette.foller@cox.net', |
||||
'simona@morasca.com', |
||||
'mitsue_tollner@yahoo.com', |
||||
'leota@hotmail.com', |
||||
'sage_wieser@cox.net', |
||||
'', |
||||
null, |
||||
]; |
||||
|
||||
// Array of random sample phone numbers
|
||||
const phoneNumbers = [ |
||||
'1-541-754-3010', |
||||
'504-621-8927', |
||||
'810-292-9388', |
||||
'856-636-8749', |
||||
'907-385-4412', |
||||
'513-570-1893', |
||||
'419-503-2484', |
||||
'773-573-6914', |
||||
'', |
||||
null, |
||||
]; |
||||
|
||||
// Array of random sample URLs
|
||||
const urls = [ |
||||
'https://www.google.com', |
||||
'https://www.facebook.com', |
||||
'https://www.youtube.com', |
||||
'https://www.amazon.com', |
||||
'https://www.wikipedia.org', |
||||
'https://www.twitter.com', |
||||
'https://www.instagram.com', |
||||
'https://www.linkedin.com', |
||||
'https://www.reddit.com', |
||||
'https://www.tiktok.com', |
||||
'https://www.pinterest.com', |
||||
'https://www.netflix.com', |
||||
'https://www.microsoft.com', |
||||
'https://www.apple.com', |
||||
'', |
||||
null, |
||||
]; |
||||
|
||||
const singleSelect = [ |
||||
'jan', |
||||
'feb', |
||||
'mar', |
||||
'apr', |
||||
'may', |
||||
'jun', |
||||
'jul', |
||||
'aug', |
||||
'sep', |
||||
'oct', |
||||
'nov', |
||||
'dec', |
||||
null, |
||||
]; |
||||
|
||||
const multiSelect = [ |
||||
'jan,feb,mar', |
||||
'apr,may,jun', |
||||
'jul,aug,sep', |
||||
'oct,nov,dec', |
||||
'jan,feb,mar', |
||||
null, |
||||
]; |
||||
|
||||
switch (column.uidt) { |
||||
case UITypes.Number: |
||||
case UITypes.Percent: |
||||
return numbers[index % numbers.length]; |
||||
case UITypes.Decimal: |
||||
case UITypes.Currency: |
||||
return decimals[index % decimals.length]; |
||||
case UITypes.Duration: |
||||
return duration[index % duration.length]; |
||||
case UITypes.Rating: |
||||
return rating[index % rating.length]; |
||||
case UITypes.SingleLineText: |
||||
return countries[index % countries.length]; |
||||
case UITypes.Email: |
||||
return emails[index % emails.length]; |
||||
case UITypes.PhoneNumber: |
||||
return phoneNumbers[index % phoneNumbers.length]; |
||||
case UITypes.LongText: |
||||
return longText[index % longText.length]; |
||||
case UITypes.Date: |
||||
// set startDate as 400 days before today
|
||||
// eslint-disable-next-line no-case-declarations
|
||||
const result = new Date(); |
||||
result.setDate(result.getDate() - 400 + index); |
||||
return result.toISOString().slice(0, 10); |
||||
case UITypes.URL: |
||||
return urls[index % urls.length]; |
||||
case UITypes.SingleSelect: |
||||
return singleSelect[index % singleSelect.length]; |
||||
case UITypes.MultiSelect: |
||||
return multiSelect[index % multiSelect.length]; |
||||
default: |
||||
return `test-${index}`; |
||||
} |
||||
}; |
||||
|
||||
const getRow = async (context, { project, table, id }) => { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/${id}`) |
||||
.set('xc-auth', context.token); |
||||
|
||||
if (response.status !== 200) { |
||||
return undefined; |
||||
} |
||||
|
||||
return response.body; |
||||
}; |
||||
|
||||
const listRow = async ({ |
||||
project, |
||||
table, |
||||
options, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
options?: { |
||||
limit?: any; |
||||
offset?: any; |
||||
filterArr?: Filter[]; |
||||
sortArr?: Sort[]; |
||||
}; |
||||
}) => { |
||||
const bases = await project.getBases(); |
||||
const baseModel = await Model.getBaseModelSQL({ |
||||
id: table.id, |
||||
dbDriver: await NcConnectionMgrv2.get(bases[0]!), |
||||
}); |
||||
|
||||
const ignorePagination = !options; |
||||
|
||||
return await baseModel.list(options, ignorePagination); |
||||
}; |
||||
|
||||
const getOneRow = async ( |
||||
context, |
||||
{ project, table }: { project: Project; table: Model } |
||||
) => { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${project.id}/${table.id}/find-one`) |
||||
.set('xc-auth', context.token); |
||||
|
||||
return response.body; |
||||
}; |
||||
|
||||
const generateDefaultRowAttributes = ({ |
||||
columns, |
||||
index = 0, |
||||
}: { |
||||
columns: ColumnType[]; |
||||
index?: number; |
||||
}) => |
||||
columns.reduce((acc, column) => { |
||||
if ( |
||||
column.uidt === UITypes.LinkToAnotherRecord || |
||||
column.uidt === UITypes.ForeignKey || |
||||
column.uidt === UITypes.ID |
||||
) { |
||||
return acc; |
||||
} |
||||
acc[column.title!] = rowValue(column, index); |
||||
return acc; |
||||
}, {}); |
||||
|
||||
const createRow = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
table, |
||||
index = 0, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
index?: number; |
||||
} |
||||
) => { |
||||
const columns = await table.getColumns(); |
||||
const rowData = generateDefaultRowAttributes({ columns, index }); |
||||
|
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/data/noco/${project.id}/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send(rowData); |
||||
|
||||
return response.body; |
||||
}; |
||||
|
||||
const createBulkRows = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
table, |
||||
values, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
values: any[]; |
||||
} |
||||
) => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/data/bulk/noco/${project.id}/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send(values) |
||||
.expect(200); |
||||
}; |
||||
|
||||
// Links 2 table rows together. Will create rows if ids are not provided
|
||||
const createChildRow = async ( |
||||
context, |
||||
{ |
||||
project, |
||||
table, |
||||
childTable, |
||||
column, |
||||
rowId, |
||||
childRowId, |
||||
type, |
||||
}: { |
||||
project: Project; |
||||
table: Model; |
||||
childTable: Model; |
||||
column: Column; |
||||
rowId?: string; |
||||
childRowId?: string; |
||||
type: string; |
||||
} |
||||
) => { |
||||
if (!rowId) { |
||||
const row = await createRow(context, { project, table }); |
||||
rowId = row['Id']; |
||||
} |
||||
|
||||
if (!childRowId) { |
||||
const row = await createRow(context, { table: childTable, project }); |
||||
childRowId = row['Id']; |
||||
} |
||||
|
||||
await request(context.app) |
||||
.post( |
||||
`/api/v1/db/data/noco/${project.id}/${table.id}/${rowId}/${type}/${column.title}/${childRowId}` |
||||
) |
||||
.set('xc-auth', context.token); |
||||
|
||||
const row = await getRow(context, { project, table, id: rowId }); |
||||
|
||||
return row; |
||||
}; |
||||
|
||||
// Mixed row attributes
|
||||
const generateMixedRowAttributes = ({ |
||||
columns, |
||||
index = 0, |
||||
}: { |
||||
columns: ColumnType[]; |
||||
index?: number; |
||||
}) => |
||||
columns.reduce((acc, column) => { |
||||
if ( |
||||
column.uidt === UITypes.LinkToAnotherRecord || |
||||
column.uidt === UITypes.ForeignKey || |
||||
column.uidt === UITypes.ID |
||||
) { |
||||
return acc; |
||||
} |
||||
acc[column.title!] = rowMixedValue(column, index); |
||||
return acc; |
||||
}, {}); |
||||
|
||||
export { |
||||
createRow, |
||||
getRow, |
||||
createChildRow, |
||||
getOneRow, |
||||
listRow, |
||||
generateDefaultRowAttributes, |
||||
generateMixedRowAttributes, |
||||
createBulkRows, |
||||
rowMixedValue, |
||||
}; |
@ -0,0 +1,48 @@
|
||||
import request from 'supertest'; |
||||
import { Model } from '../../../src/models'; |
||||
import { defaultColumns } from './column'; |
||||
import type { Project } from '../../../src/models'; |
||||
|
||||
const defaultTableValue = (context) => ({ |
||||
table_name: 'Table1', |
||||
title: 'Table1_Title', |
||||
columns: defaultColumns(context), |
||||
}); |
||||
|
||||
const createTable = async (context, project, args = {}) => { |
||||
const defaultValue = defaultTableValue(context); |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ ...defaultValue, ...args }); |
||||
|
||||
const table: Model = await Model.get(response.body.id); |
||||
return table; |
||||
}; |
||||
|
||||
const getTable = async ({ |
||||
project, |
||||
name, |
||||
}: { |
||||
project: Project; |
||||
name: string; |
||||
}) => { |
||||
const bases = await project.getBases(); |
||||
return await Model.getByIdOrName({ |
||||
project_id: project.id, |
||||
base_id: bases[0].id!, |
||||
table_name: name, |
||||
}); |
||||
}; |
||||
|
||||
const getAllTables = async ({ project }: { project: Project }) => { |
||||
const bases = await project.getBases(); |
||||
const tables = await Model.list({ |
||||
project_id: project.id, |
||||
base_id: bases[0].id!, |
||||
}); |
||||
|
||||
return tables; |
||||
}; |
||||
|
||||
export { createTable, getTable, getAllTables }; |
@ -0,0 +1,18 @@
|
||||
import request from 'supertest'; |
||||
import { User } from '../../../src/models' |
||||
|
||||
const defaultUserArgs = { |
||||
email: 'test@example.com', |
||||
password: 'A1234abh2@dsad', |
||||
}; |
||||
|
||||
const createUser = async (context, userArgs = {}) => { |
||||
const args = { ...defaultUserArgs, ...userArgs }; |
||||
const response = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args); |
||||
const user = await User.getByEmail(args.email); |
||||
return { token: response.body.token, user }; |
||||
}; |
||||
|
||||
export { createUser, defaultUserArgs }; |
@ -0,0 +1,38 @@
|
||||
import { ViewTypes } from 'nocodb-sdk'; |
||||
import request from 'supertest'; |
||||
import Model from '../../../src/models/Model'; |
||||
import View from '../../../src/models/View'; |
||||
|
||||
const createView = async (context, {title, table, type}: {title: string, table: Model, type: ViewTypes}) => { |
||||
const viewTypeStr = (type) => { |
||||
switch (type) { |
||||
case ViewTypes.GALLERY: |
||||
return 'galleries'; |
||||
case ViewTypes.FORM: |
||||
return 'forms'; |
||||
case ViewTypes.GRID: |
||||
return 'grids'; |
||||
case ViewTypes.KANBAN: |
||||
return 'kanbans'; |
||||
default: |
||||
throw new Error('Invalid view type'); |
||||
} |
||||
}; |
||||
|
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/tables/${table.id}/${viewTypeStr(type)}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title, |
||||
type, |
||||
}); |
||||
if(response.status !== 200) { |
||||
throw new Error('createView',response.body.message); |
||||
} |
||||
|
||||
const view = await View.getByTitleOrId({fk_model_id: table.id, titleOrId:title}) as View; |
||||
|
||||
return view |
||||
} |
||||
|
||||
export {createView} |
@ -0,0 +1,24 @@
|
||||
import 'mocha'; |
||||
import restTests from './rest/index.test'; |
||||
import modelTests from './model/index.test'; |
||||
import TestDbMngr from './TestDbMngr' |
||||
import dotenv from 'dotenv'; |
||||
|
||||
process.env.NODE_ENV = 'test'; |
||||
process.env.TEST = 'true'; |
||||
process.env.NC_DISABLE_CACHE = 'true'; |
||||
process.env.NC_DISABLE_TELE = 'true'; |
||||
|
||||
// Load environment variables from .env file
|
||||
dotenv.config({ |
||||
path: __dirname + '/.env' |
||||
}); |
||||
|
||||
(async function() { |
||||
await TestDbMngr.init(); |
||||
|
||||
modelTests(); |
||||
restTests(); |
||||
|
||||
run(); |
||||
})(); |
@ -0,0 +1,60 @@
|
||||
import { Model, Project } from '../../../src/models'; |
||||
import NcConnectionMgrv2 from '../../../src/utils/common/NcConnectionMgrv2'; |
||||
import { orderedMetaTables } from '../../../src/utils/globals'; |
||||
import TestDbMngr from '../TestDbMngr'; |
||||
import { isPg } from './db'; |
||||
|
||||
const dropTablesAllNonExternalProjects = async () => { |
||||
const projects = await Project.list({}); |
||||
const userCreatedTableNames: string[] = []; |
||||
await Promise.all( |
||||
projects |
||||
.filter((project) => project.is_meta) |
||||
.map(async (project) => { |
||||
await project.getBases(); |
||||
const base = project.bases && project.bases[0]; |
||||
if (!base) return; |
||||
|
||||
const models = await Model.list({ |
||||
project_id: project.id, |
||||
base_id: base.id!, |
||||
}); |
||||
models.forEach((model) => { |
||||
userCreatedTableNames.push(model.table_name); |
||||
}); |
||||
}), |
||||
); |
||||
|
||||
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
|
||||
for (const tableName of userCreatedTableNames) { |
||||
if (TestDbMngr.isPg()) { |
||||
await TestDbMngr.metaKnex.raw(`DROP TABLE "${tableName}" CASCADE`); |
||||
} else { |
||||
await TestDbMngr.metaKnex.raw(`DROP TABLE ${tableName}`); |
||||
} |
||||
} |
||||
|
||||
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
}; |
||||
|
||||
const cleanupMetaTables = async () => { |
||||
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
for (const tableName of orderedMetaTables) { |
||||
try { |
||||
await TestDbMngr.metaKnex.raw(`DELETE FROM ${tableName}`); |
||||
} catch (e) {} |
||||
} |
||||
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.metaKnex); |
||||
}; |
||||
|
||||
export default async function () { |
||||
try { |
||||
await NcConnectionMgrv2.destroyAll(); |
||||
|
||||
await dropTablesAllNonExternalProjects(); |
||||
await cleanupMetaTables(); |
||||
} catch (e) { |
||||
console.error('cleanupMeta', e); |
||||
} |
||||
} |
@ -0,0 +1,116 @@
|
||||
|
||||
import { Audit, Project } from '../../../src/models' |
||||
import TestDbMngr from '../TestDbMngr'; |
||||
|
||||
const dropTablesOfSakila = async () => { |
||||
await TestDbMngr.disableForeignKeyChecks(TestDbMngr.sakilaKnex); |
||||
|
||||
for (const tableName of sakilaTableNames) { |
||||
try { |
||||
if (TestDbMngr.isPg()) { |
||||
await TestDbMngr.sakilaKnex.raw( |
||||
`DROP TABLE IF EXISTS "${tableName}" CASCADE` |
||||
); |
||||
} else { |
||||
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`); |
||||
} |
||||
} catch (e) {} |
||||
} |
||||
await TestDbMngr.enableForeignKeyChecks(TestDbMngr.sakilaKnex); |
||||
}; |
||||
|
||||
const dropSchemaAndSeedSakila = async () => { |
||||
try { |
||||
await TestDbMngr.sakilaKnex.raw(`DROP SCHEMA "public" CASCADE`); |
||||
await TestDbMngr.sakilaKnex.raw(`CREATE SCHEMA "public"`); |
||||
await TestDbMngr.seedSakila(); |
||||
} catch (e) { |
||||
console.error('dropSchemaAndSeedSakila', e); |
||||
throw e; |
||||
} |
||||
}; |
||||
|
||||
const resetAndSeedSakila = async () => { |
||||
try { |
||||
await dropTablesOfSakila(); |
||||
await TestDbMngr.seedSakila(); |
||||
} catch (e) { |
||||
console.error('resetSakila', e); |
||||
throw e; |
||||
} |
||||
}; |
||||
|
||||
const cleanUpSakila = async () => { |
||||
try { |
||||
const sakilaProject = await Project.getByTitle('sakila'); |
||||
|
||||
const audits = |
||||
sakilaProject && (await Audit.projectAuditList(sakilaProject.id, {})); |
||||
|
||||
if (audits?.length > 0) { |
||||
// if PG, drop schema
|
||||
if (TestDbMngr.isPg()) { |
||||
return await dropSchemaAndSeedSakila(); |
||||
} |
||||
// if mysql, drop tables
|
||||
return await resetAndSeedSakila(); |
||||
} |
||||
|
||||
const tablesInSakila = await TestDbMngr.showAllTables( |
||||
TestDbMngr.sakilaKnex |
||||
); |
||||
|
||||
await Promise.all( |
||||
tablesInSakila |
||||
.filter((tableName) => !sakilaTableNames.includes(tableName)) |
||||
.map(async (tableName) => { |
||||
try { |
||||
if (TestDbMngr.isPg()) { |
||||
await TestDbMngr.sakilaKnex.raw( |
||||
`DROP TABLE "${tableName}" CASCADE` |
||||
); |
||||
} else { |
||||
await TestDbMngr.sakilaKnex.raw(`DROP TABLE ${tableName}`); |
||||
} |
||||
} catch (e) { |
||||
console.error(e); |
||||
} |
||||
}) |
||||
); |
||||
} catch (e) { |
||||
console.error('cleanUpSakila', e); |
||||
} |
||||
}; |
||||
|
||||
const sakilaTableNames = [ |
||||
'actor', |
||||
'address', |
||||
'category', |
||||
'city', |
||||
'country', |
||||
'customer', |
||||
'film', |
||||
'film_actor', |
||||
'film_category', |
||||
'inventory', |
||||
'language', |
||||
'payment', |
||||
'payment_p2007_01', |
||||
'payment_p2007_02', |
||||
'payment_p2007_03', |
||||
'payment_p2007_04', |
||||
'payment_p2007_05', |
||||
'payment_p2007_06', |
||||
'rental', |
||||
'staff', |
||||
'store', |
||||
'actor_info', |
||||
'customer_list', |
||||
'film_list', |
||||
'nicer_but_slower_film_list', |
||||
'sales_by_film_category', |
||||
'sales_by_store', |
||||
'staff_list', |
||||
]; |
||||
|
||||
export { cleanUpSakila, resetAndSeedSakila }; |
@ -0,0 +1,18 @@
|
||||
import { DbConfig } from '../../../src/interface/config'; |
||||
|
||||
const isSqlite = (context) => { |
||||
return ( |
||||
(context.dbConfig as DbConfig).client === 'sqlite' || |
||||
(context.dbConfig as DbConfig).client === 'sqlite3' |
||||
); |
||||
}; |
||||
|
||||
const isPg = (context) => { |
||||
return (context.dbConfig as DbConfig).client === 'pg'; |
||||
}; |
||||
|
||||
const isMysql = (context) => |
||||
(context.dbConfig as DbConfig).client === 'mysql' || |
||||
(context.dbConfig as DbConfig).client === 'mysql2'; |
||||
|
||||
export { isSqlite, isMysql, isPg }; |
@ -0,0 +1,43 @@
|
||||
|
||||
import express from 'express'; |
||||
import nocobuild from '../../../src/nocobuild' |
||||
// import { Noco } from '../../../src/lib';
|
||||
import cleanupMeta from './cleanupMeta'; |
||||
import {cleanUpSakila, resetAndSeedSakila} from './cleanupSakila'; |
||||
import { createUser } from '../factory/user'; |
||||
|
||||
let server; |
||||
|
||||
const serverInit = async () => { |
||||
const serverInstance = express(); |
||||
serverInstance.enable('trust proxy'); |
||||
// serverInstance.use(await Noco.init());
|
||||
await nocobuild(serverInstance) |
||||
serverInstance.use(function(req, res, next){ |
||||
// 50 sec timeout
|
||||
req.setTimeout(500000, function(){ |
||||
console.log('Request has timed out.'); |
||||
res.send(408); |
||||
}); |
||||
next(); |
||||
}); |
||||
return serverInstance; |
||||
}; |
||||
|
||||
const isFirstTimeRun = () => !server |
||||
|
||||
export default async function () { |
||||
const {default: TestDbMngr} = await import('../TestDbMngr'); |
||||
|
||||
if (isFirstTimeRun()) { |
||||
await resetAndSeedSakila(); |
||||
server = await serverInit(); |
||||
} |
||||
|
||||
await cleanUpSakila(); |
||||
await cleanupMeta(); |
||||
|
||||
const { token } = await createUser({ app: server }, { roles: 'editor' }); |
||||
|
||||
return { app: server, token, dbConfig: TestDbMngr.dbConfig, sakilaDbConfig: TestDbMngr.getSakilaDbConfig() }; |
||||
} |
@ -0,0 +1,10 @@
|
||||
import 'mocha'; |
||||
import baseModelSqlTest from './tests/baseModelSql.test'; |
||||
|
||||
function modelTests() { |
||||
baseModelSqlTest(); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Model', modelTests); |
||||
} |
@ -0,0 +1,591 @@
|
||||
import 'mocha'; |
||||
import { BaseModelSqlv2 } from '../../../../src/db/BaseModelSqlv2' |
||||
import NcConnectionMgrv2 from '../../../../src/utils/common/NcConnectionMgrv2' |
||||
import init from '../../init'; |
||||
import { createProject } from '../../factory/project'; |
||||
import { createTable } from '../../factory/table'; |
||||
import Base from '../../../../src/models/Base'; |
||||
import Model from '../../../../src/models/Model'; |
||||
import Project from '../../../../src/models/Project'; |
||||
import View from '../../../../src/models/View'; |
||||
import { createRow, generateDefaultRowAttributes } from '../../factory/row'; |
||||
import Audit from '../../../../src/models/Audit'; |
||||
import { expect } from 'chai'; |
||||
import Filter from '../../../../src/models/Filter'; |
||||
import { createLtarColumn } from '../../factory/column'; |
||||
import LinkToAnotherRecordColumn from '../../../../src/models/LinkToAnotherRecordColumn'; |
||||
import { isPg, isSqlite } from '../../init/db'; |
||||
|
||||
function baseModelSqlTests() { |
||||
let context; |
||||
let project: Project; |
||||
let table: Model; |
||||
let view: View; |
||||
let baseModelSql: BaseModelSqlv2; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
project = await createProject(context); |
||||
table = await createTable(context, project); |
||||
view = await table.getViews()[0]; |
||||
|
||||
const base = await Base.get(table.base_id); |
||||
baseModelSql = new BaseModelSqlv2({ |
||||
dbDriver: await NcConnectionMgrv2.get(base), |
||||
model: table, |
||||
view, |
||||
}); |
||||
}); |
||||
|
||||
it('Insert record', async () => { |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
const columns = await table.getColumns(); |
||||
|
||||
let inputData: any = generateDefaultRowAttributes({ columns }); |
||||
const response = await baseModelSql.insert( |
||||
generateDefaultRowAttributes({ columns }), |
||||
undefined, |
||||
request |
||||
); |
||||
const insertedRow = (await baseModelSql.list())[0]; |
||||
|
||||
if (isPg(context)) { |
||||
inputData.CreatedAt = new Date(inputData.CreatedAt).toISOString(); |
||||
inputData.UpdatedAt = new Date(inputData.UpdatedAt).toISOString(); |
||||
|
||||
insertedRow.CreatedAt = new Date(insertedRow.CreatedAt).toISOString(); |
||||
insertedRow.UpdatedAt = new Date(insertedRow.UpdatedAt).toISOString(); |
||||
|
||||
response.CreatedAt = new Date(response.CreatedAt).toISOString(); |
||||
response.UpdatedAt = new Date(response.UpdatedAt).toISOString(); |
||||
} |
||||
|
||||
expect(insertedRow).to.include(inputData); |
||||
expect(insertedRow).to.include(response); |
||||
|
||||
const rowInsertedAudit = ( |
||||
await Audit.projectAuditList(project.id, {}) |
||||
).find((audit) => audit.op_sub_type === 'INSERT'); |
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'INSERT', |
||||
description: 'Record with ID 1 has been inserted into Table Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk insert record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
const bulkData = Array(10) |
||||
.fill(0) |
||||
.map((_, index) => generateDefaultRowAttributes({ columns, index })); |
||||
await baseModelSql.bulkInsert(bulkData, { cookie: request }); |
||||
|
||||
const insertedRows = await baseModelSql.list(); |
||||
|
||||
if (isPg(context)) { |
||||
insertedRows.forEach((row) => { |
||||
row.CreatedAt = new Date(row.CreatedAt).toISOString(); |
||||
row.UpdatedAt = new Date(row.UpdatedAt).toISOString(); |
||||
}); |
||||
} |
||||
|
||||
bulkData.forEach((inputData: any, index) => { |
||||
if (isPg(context)) { |
||||
inputData.CreatedAt = new Date(inputData.CreatedAt).toISOString(); |
||||
inputData.UpdatedAt = new Date(inputData.UpdatedAt).toISOString(); |
||||
} |
||||
expect(insertedRows[index]).to.include(inputData); |
||||
}); |
||||
|
||||
const rowBulkInsertedAudit = ( |
||||
await Audit.projectAuditList(project.id, {}) |
||||
).find((audit) => audit.op_sub_type === 'BULK_INSERT'); |
||||
expect(rowBulkInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_INSERT', |
||||
status: null, |
||||
description: '10 records have been bulk inserted in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Update record', async () => { |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
|
||||
const columns = await table.getColumns(); |
||||
|
||||
await baseModelSql.insert(generateDefaultRowAttributes({ columns })); |
||||
const rowId = 1; |
||||
await baseModelSql.updateByPk(rowId, { Title: 'test' }, undefined, request); |
||||
|
||||
const updatedRow = await baseModelSql.readByPk(1); |
||||
|
||||
expect(updatedRow).to.include({ Id: rowId, Title: 'test' }); |
||||
|
||||
const rowUpdatedAudit = (await Audit.projectAuditList(project.id, {})).find( |
||||
(audit) => audit.op_sub_type === 'UPDATE' |
||||
); |
||||
expect(rowUpdatedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'UPDATE', |
||||
description: 'Record with ID 1 has been updated in Table Table1_Title.\nColumn "Title" got changed from "test-0" to "test"', |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk update record', async () => { |
||||
// Since sqlite doesn't support multiple sql connections, we can't test bulk update in sqlite
|
||||
if (isSqlite(context)) return; |
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
const bulkData = Array(10) |
||||
.fill(0) |
||||
.map((_, index) => generateDefaultRowAttributes({ columns, index })); |
||||
await baseModelSql.bulkInsert(bulkData, { cookie: request }); |
||||
|
||||
const insertedRows: any[] = await baseModelSql.list(); |
||||
|
||||
await baseModelSql.bulkUpdate( |
||||
insertedRows.map((row) => ({ ...row, Title: `new-${row['Title']}` })), |
||||
{ cookie: request } |
||||
); |
||||
|
||||
const updatedRows = await baseModelSql.list(); |
||||
|
||||
updatedRows.forEach((row, index) => { |
||||
expect(row['Title']).to.equal(`new-test-${index}`); |
||||
}); |
||||
const rowBulkUpdateAudit = ( |
||||
await Audit.projectAuditList(project.id, {}) |
||||
).find((audit) => audit.op_sub_type === 'BULK_UPDATE'); |
||||
expect(rowBulkUpdateAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_UPDATE', |
||||
status: null, |
||||
description: '10 records have been bulk updated in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk update all record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
const bulkData = Array(10) |
||||
.fill(0) |
||||
.map((_, index) => generateDefaultRowAttributes({ columns, index })); |
||||
await baseModelSql.bulkInsert(bulkData, { cookie: request }); |
||||
|
||||
const idColumn = columns.find((column) => column.title === 'Id')!; |
||||
|
||||
await baseModelSql.bulkUpdateAll( |
||||
{ |
||||
filterArr: [ |
||||
new Filter({ |
||||
logical_op: 'and', |
||||
fk_column_id: idColumn.id, |
||||
comparison_op: 'lt', |
||||
value: 5, |
||||
}), |
||||
], |
||||
}, |
||||
{ Title: 'new-1' }, |
||||
{ cookie: request } |
||||
); |
||||
|
||||
const updatedRows = await baseModelSql.list(); |
||||
|
||||
updatedRows.forEach((row) => { |
||||
if (row.id < 5) expect(row['Title']).to.equal('new-1'); |
||||
}); |
||||
const rowBulkUpdateAudit = ( |
||||
await Audit.projectAuditList(project.id, {}) |
||||
).find((audit) => audit.op_sub_type === 'BULK_UPDATE'); |
||||
expect(rowBulkUpdateAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_UPDATE', |
||||
status: null, |
||||
description: '4 records have been bulk updated in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Delete record', async () => { |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
params: { id: 1 }, |
||||
}; |
||||
|
||||
const columns = await table.getColumns(); |
||||
const bulkData = Array(10) |
||||
.fill(0) |
||||
.map((_, index) => generateDefaultRowAttributes({ columns, index })); |
||||
await baseModelSql.bulkInsert(bulkData, { cookie: request }); |
||||
|
||||
const rowIdToDeleted = 1; |
||||
await baseModelSql.delByPk(rowIdToDeleted, undefined, request); |
||||
|
||||
const deletedRow = await baseModelSql.readByPk(rowIdToDeleted); |
||||
|
||||
expect(deletedRow).to.be.an('object').that.is.empty; |
||||
|
||||
console.log('Delete record', await Audit.projectAuditList(project.id, {})); |
||||
const rowDeletedAudit = (await Audit.projectAuditList(project.id, {})).find( |
||||
(audit) => audit.op_sub_type === 'DELETE' |
||||
); |
||||
expect(rowDeletedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'DELETE', |
||||
description: 'Record with ID 1 has been deleted in Table Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk delete records', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
const bulkData = Array(10) |
||||
.fill(0) |
||||
.map((_, index) => generateDefaultRowAttributes({ columns, index })); |
||||
await baseModelSql.bulkInsert(bulkData, { cookie: request }); |
||||
|
||||
const insertedRows: any[] = await baseModelSql.list(); |
||||
|
||||
await baseModelSql.bulkDelete( |
||||
insertedRows |
||||
.filter((row) => row['Id'] < 5) |
||||
.map((row) => ({ id: row['Id'] })), |
||||
{ cookie: request } |
||||
); |
||||
|
||||
const remainingRows = await baseModelSql.list(); |
||||
|
||||
expect(remainingRows).to.length(6); |
||||
|
||||
const rowBulkDeleteAudit = ( |
||||
await Audit.projectAuditList(project.id, {}) |
||||
).find((audit) => audit.op_sub_type === 'BULK_DELETE'); |
||||
|
||||
expect(rowBulkDeleteAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_DELETE', |
||||
status: null, |
||||
description: '4 records have been bulk deleted in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Bulk delete all record', async () => { |
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
const bulkData = Array(10) |
||||
.fill(0) |
||||
.map((_, index) => generateDefaultRowAttributes({ columns, index })); |
||||
await baseModelSql.bulkInsert(bulkData, { cookie: request }); |
||||
|
||||
const idColumn = columns.find((column) => column.title === 'Id')!; |
||||
|
||||
await baseModelSql.bulkDeleteAll( |
||||
{ |
||||
filterArr: [ |
||||
new Filter({ |
||||
logical_op: 'and', |
||||
fk_column_id: idColumn.id, |
||||
comparison_op: 'lt', |
||||
value: 5, |
||||
}), |
||||
], |
||||
}, |
||||
{ cookie: request } |
||||
); |
||||
|
||||
const remainingRows = await baseModelSql.list(); |
||||
|
||||
expect(remainingRows).to.length(6); |
||||
const rowBulkDeleteAudit = ( |
||||
await Audit.projectAuditList(project.id, {}) |
||||
).find((audit) => audit.op_sub_type === 'BULK_DELETE'); |
||||
expect(rowBulkDeleteAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
fk_model_id: table.id, |
||||
project_id: project.id, |
||||
row_id: null, |
||||
op_type: 'DATA', |
||||
op_sub_type: 'BULK_DELETE', |
||||
status: null, |
||||
description: '4 records have been bulk deleted in Table1_Title', |
||||
details: null, |
||||
}); |
||||
}); |
||||
|
||||
it('Nested insert', async () => { |
||||
const childTable = await createTable(context, project, { |
||||
title: 'Child Table', |
||||
table_name: 'child_table', |
||||
}); |
||||
const ltarColumn = await createLtarColumn(context, { |
||||
title: 'Ltar Column', |
||||
parentTable: table, |
||||
childTable, |
||||
type: 'hm', |
||||
}); |
||||
const childRow = await createRow(context, { |
||||
project, |
||||
table: childTable, |
||||
}); |
||||
const ltarColOptions = |
||||
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>(); |
||||
const childCol = await ltarColOptions.getChildColumn(); |
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
|
||||
await baseModelSql.nestedInsert( |
||||
{ |
||||
...generateDefaultRowAttributes({ columns }), |
||||
[ltarColumn.title]: [{ Id: childRow['Id'] }], |
||||
}, |
||||
undefined, |
||||
request |
||||
); |
||||
|
||||
const childBaseModel = new BaseModelSqlv2({ |
||||
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)), |
||||
model: childTable, |
||||
view, |
||||
}); |
||||
const insertedChildRow = await childBaseModel.readByPk(childRow['Id']); |
||||
expect(insertedChildRow[childCol.column_name]).to.equal(childRow['Id']); |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})) |
||||
.filter((audit) => audit.fk_model_id === table.id) |
||||
.find((audit) => audit.op_sub_type === 'INSERT'); |
||||
|
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'INSERT', |
||||
description: 'Record with ID 1 has been inserted into Table Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Link child', async () => { |
||||
const childTable = await createTable(context, project, { |
||||
title: 'Child Table', |
||||
table_name: 'child_table', |
||||
}); |
||||
const ltarColumn = await createLtarColumn(context, { |
||||
title: 'Ltar Column', |
||||
parentTable: table, |
||||
childTable, |
||||
type: 'hm', |
||||
}); |
||||
const insertedChildRow = await createRow(context, { |
||||
project, |
||||
table: childTable, |
||||
}); |
||||
const ltarColOptions = |
||||
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>(); |
||||
const childCol = await ltarColOptions.getChildColumn(); |
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
|
||||
await baseModelSql.insert( |
||||
generateDefaultRowAttributes({ columns }), |
||||
undefined, |
||||
request |
||||
); |
||||
const insertedRow = await baseModelSql.readByPk(1); |
||||
|
||||
await baseModelSql.addChild({ |
||||
colId: ltarColumn.id, |
||||
rowId: insertedRow['Id'], |
||||
childId: insertedChildRow['Id'], |
||||
cookie: request, |
||||
}); |
||||
|
||||
const childBaseModel = new BaseModelSqlv2({ |
||||
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)), |
||||
model: childTable, |
||||
view, |
||||
}); |
||||
const updatedChildRow = await childBaseModel.readByPk( |
||||
insertedChildRow['Id'] |
||||
); |
||||
|
||||
expect(updatedChildRow[childCol.column_name]).to.equal(insertedRow['Id']); |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})) |
||||
.filter((audit) => audit.fk_model_id === table.id) |
||||
.find((audit) => audit.op_sub_type === 'LINK_RECORD'); |
||||
|
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'LINK_RECORD', |
||||
description: |
||||
'Record [id:1] has been linked with record [id:1] in Table1_Title', |
||||
}); |
||||
}); |
||||
|
||||
it('Unlink child', async () => { |
||||
const childTable = await createTable(context, project, { |
||||
title: 'Child Table', |
||||
table_name: 'child_table', |
||||
}); |
||||
const ltarColumn = await createLtarColumn(context, { |
||||
title: 'Ltar Column', |
||||
parentTable: table, |
||||
childTable, |
||||
type: 'hm', |
||||
}); |
||||
const insertedChildRow = await createRow(context, { |
||||
project, |
||||
table: childTable, |
||||
}); |
||||
const ltarColOptions = |
||||
await ltarColumn.getColOptions<LinkToAnotherRecordColumn>(); |
||||
const childCol = await ltarColOptions.getChildColumn(); |
||||
|
||||
const columns = await table.getColumns(); |
||||
const request = { |
||||
clientIp: '::ffff:192.0.0.1', |
||||
user: { email: 'test@example.com' }, |
||||
}; |
||||
|
||||
await baseModelSql.insert( |
||||
generateDefaultRowAttributes({ columns }), |
||||
undefined, |
||||
request |
||||
); |
||||
const insertedRow = await baseModelSql.readByPk(1); |
||||
|
||||
await baseModelSql.addChild({ |
||||
colId: ltarColumn.id, |
||||
rowId: insertedRow['Id'], |
||||
childId: insertedChildRow['Id'], |
||||
cookie: request, |
||||
}); |
||||
|
||||
await baseModelSql.removeChild({ |
||||
colId: ltarColumn.id, |
||||
rowId: insertedRow['Id'], |
||||
childId: insertedChildRow['Id'], |
||||
cookie: request, |
||||
}); |
||||
|
||||
const childBaseModel = new BaseModelSqlv2({ |
||||
dbDriver: await NcConnectionMgrv2.get(await Base.get(table.base_id)), |
||||
model: childTable, |
||||
view, |
||||
}); |
||||
const updatedChildRow = await childBaseModel.readByPk( |
||||
insertedChildRow['Id'] |
||||
); |
||||
|
||||
expect(updatedChildRow[childCol.column_name]).to.be.null; |
||||
|
||||
const rowInsertedAudit = (await Audit.projectAuditList(project.id, {})) |
||||
.filter((audit) => audit.fk_model_id === table.id) |
||||
.find((audit) => audit.op_sub_type === 'UNLINK_RECORD'); |
||||
|
||||
expect(rowInsertedAudit).to.include({ |
||||
user: 'test@example.com', |
||||
ip: '::ffff:192.0.0.1', |
||||
base_id: null, |
||||
project_id: project.id, |
||||
fk_model_id: table.id, |
||||
row_id: '1', |
||||
op_type: 'DATA', |
||||
op_sub_type: 'UNLINK_RECORD', |
||||
description: |
||||
'Record [id:1] has been unlinked with record [id:1] in Table1_Title', |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('BaseModelSql', baseModelSqlTests); |
||||
} |
@ -0,0 +1,26 @@
|
||||
import 'mocha'; |
||||
import authTests from './tests/auth.test'; |
||||
import orgTests from './tests/org.test'; |
||||
import projectTests from './tests/project.test'; |
||||
import columnTypeSpecificTests from './tests/columnTypeSpecific.test'; |
||||
import tableTests from './tests/table.test'; |
||||
import tableRowTests from './tests/tableRow.test'; |
||||
import viewRowTests from './tests/viewRow.test'; |
||||
import attachmentTests from './tests/attachment.test'; |
||||
import filterTest from './tests/filter.test'; |
||||
|
||||
function restTests() { |
||||
authTests(); |
||||
orgTests(); |
||||
projectTests(); |
||||
tableTests(); |
||||
tableRowTests(); |
||||
viewRowTests(); |
||||
columnTypeSpecificTests(); |
||||
attachmentTests(); |
||||
filterTest(); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Rest', restTests); |
||||
} |
@ -0,0 +1,172 @@
|
||||
import { expect } from 'chai'; |
||||
import fs from 'fs'; |
||||
import { OrgUserRoles, ProjectRoles } from 'nocodb-sdk'; |
||||
import path from 'path'; |
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import { createProject } from '../../factory/project'; |
||||
import init from '../../init'; |
||||
|
||||
const FILE_PATH = path.join(__dirname, 'test.txt'); |
||||
|
||||
// Test case list
|
||||
// 1. Upload file - Super admin
|
||||
// 2. Upload file - Without token
|
||||
// 3. Upload file - Org level viewer
|
||||
// 4. Upload file - Org level creator
|
||||
// 5. Upload file - Org level viewer with editor role in a project
|
||||
|
||||
function attachmentTests() { |
||||
let context; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
fs.writeFileSync(FILE_PATH, 'test', `utf-8`); |
||||
context = await init(); |
||||
}); |
||||
|
||||
afterEach(function () { |
||||
fs.unlinkSync(FILE_PATH); |
||||
}); |
||||
|
||||
it('Upload file - Super admin', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/storage/upload') |
||||
.attach('files', FILE_PATH) |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
const attachments = response.body; |
||||
expect(attachments).to.be.an('array'); |
||||
expect(attachments[0].title).to.be.eq(path.basename(FILE_PATH)); |
||||
}); |
||||
|
||||
it('Upload file - Without token', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/storage/upload') |
||||
.attach('files', FILE_PATH) |
||||
.expect(401); |
||||
|
||||
const msg = response.body.msg; |
||||
expect(msg).to.be.eq('Unauthorized'); |
||||
}); |
||||
|
||||
it('Upload file - Org level viewer', async () => { |
||||
// signup a user
|
||||
const args = { |
||||
email: 'dummyuser@example.com', |
||||
password: 'A1234abh2@dsad', |
||||
}; |
||||
|
||||
const signupResponse = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args) |
||||
.expect(200); |
||||
|
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/storage/upload') |
||||
.attach('files', FILE_PATH) |
||||
.set('xc-auth', signupResponse.body.token) |
||||
.expect(400); |
||||
|
||||
const msg = response.body.msg; |
||||
expect(msg).to.be.eq('Upload not allowed'); |
||||
}); |
||||
|
||||
it('Upload file - Org level creator', async () => { |
||||
// signup a user
|
||||
const args = { |
||||
email: 'dummyuser@example.com', |
||||
password: 'A1234abh2@dsad', |
||||
}; |
||||
|
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args) |
||||
.expect(200); |
||||
|
||||
// update user role to creator
|
||||
const usersListResponse = await request(context.app) |
||||
.get('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
const user = usersListResponse.body.list.find( |
||||
(u) => u.email === args.email |
||||
); |
||||
|
||||
expect(user).to.have.property('roles').to.be.equal(OrgUserRoles.VIEWER); |
||||
|
||||
await request(context.app) |
||||
.patch('/api/v1/users/' + user.id) |
||||
.set('xc-auth', context.token) |
||||
.send({ roles: OrgUserRoles.CREATOR }) |
||||
.expect(200); |
||||
|
||||
const signinResponse = await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
// pass empty data in await request
|
||||
.send(args) |
||||
.expect(200); |
||||
|
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/storage/upload') |
||||
.attach('files', FILE_PATH) |
||||
.set('xc-auth', signinResponse.body.token) |
||||
.expect(200); |
||||
|
||||
const attachments = response.body; |
||||
expect(attachments).to.be.an('array'); |
||||
expect(attachments[0].title).to.be.eq(path.basename(FILE_PATH)); |
||||
}); |
||||
|
||||
it('Upload file - Org level viewer with editor role in a project', async () => { |
||||
// signup a new user
|
||||
const args = { |
||||
email: 'dummyuser@example.com', |
||||
password: 'A1234abh2@dsad', |
||||
}; |
||||
|
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args) |
||||
.expect(200); |
||||
|
||||
const newProject = await createProject(context, { |
||||
title: 'NewTitle1', |
||||
}); |
||||
|
||||
// invite user to project with editor role
|
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${newProject.id}/users`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: ProjectRoles.EDITOR, |
||||
email: args.email, |
||||
project_id: newProject.id, |
||||
projectName: newProject.title, |
||||
}) |
||||
.expect(200); |
||||
|
||||
// signin to get user token
|
||||
const signinResponse = await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
// pass empty data in await request
|
||||
.send(args) |
||||
.expect(200); |
||||
|
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/storage/upload') |
||||
.attach('files', FILE_PATH) |
||||
.set('xc-auth', signinResponse.body.token) |
||||
.expect(200); |
||||
|
||||
const attachments = response.body; |
||||
expect(attachments).to.be.an('array'); |
||||
expect(attachments[0].title).to.be.eq(path.basename(FILE_PATH)); |
||||
}); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Attachment', attachmentTests); |
||||
} |
@ -0,0 +1,190 @@
|
||||
import { expect } from 'chai'; |
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import init from '../../init'; |
||||
import { defaultUserArgs } from '../../factory/user'; |
||||
|
||||
// Test case list
|
||||
// 1. Signup with valid email
|
||||
// 2. Signup with invalid email
|
||||
// 3. Signup with invalid password
|
||||
// 4. Signin with valid credentials
|
||||
// 5. Signin without email and password
|
||||
// 6. Signin with invalid credentials
|
||||
// 7. Signin with invalid password
|
||||
// 8. me without token
|
||||
// 9. me with token
|
||||
// 10. forgot password with non-existing email id
|
||||
// 11. TBD: forgot password with existing email id
|
||||
// 12. Change password
|
||||
// 13. Change password - after logout
|
||||
// 14. TBD: Reset Password with an invalid token
|
||||
// 15. TBD: Email validate with an invalid token
|
||||
// 16. TBD: Email validate with a valid token
|
||||
// 17. TBD: Forgot password validate with a valid token
|
||||
// 18. TBD: Reset Password with an valid token
|
||||
// 19. TBD: refresh token api
|
||||
|
||||
function authTests() { |
||||
let context; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
}); |
||||
|
||||
it('Signup with valid email', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send({ email: 'new@example.com', password: defaultUserArgs.password }) |
||||
.expect(200); |
||||
|
||||
const token = response.body.token; |
||||
expect(token).to.be.a('string'); |
||||
}); |
||||
|
||||
it('Signup with invalid email', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send({ email: 'test', password: defaultUserArgs.password }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signup with invalid passsword', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send({ email: defaultUserArgs.email, password: 'weakpass' }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signin with valid credentials', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
.send({ |
||||
email: defaultUserArgs.email, |
||||
password: defaultUserArgs.password, |
||||
}) |
||||
.expect(200); |
||||
const token = response.body.token; |
||||
expect(token).to.be.a('string'); |
||||
}); |
||||
|
||||
it('Signup without email and password', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
// pass empty data in await request
|
||||
.send({}) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signin with invalid credentials', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
.send({ email: 'abc@abc.com', password: defaultUserArgs.password }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Signin with invalid password', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/user/signin') |
||||
.send({ email: defaultUserArgs.email, password: 'wrongPassword' }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('me without token', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/auth/user/me') |
||||
.unset('xc-auth') |
||||
.expect(200); |
||||
|
||||
if (!response.body?.roles?.guest) { |
||||
return new Error('User should be guest'); |
||||
} |
||||
}); |
||||
|
||||
it('me with token', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/auth/user/me') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
const email = response.body.email; |
||||
expect(email).to.equal(defaultUserArgs.email); |
||||
}); |
||||
|
||||
it('Forgot password with a non-existing email id', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/forgot') |
||||
.send({ email: 'nonexisting@email.com' }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
// todo: fix mailer issues
|
||||
// it('Forgot password with an existing email id', function () {});
|
||||
|
||||
it('Change password', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/change') |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
currentPassword: defaultUserArgs.password, |
||||
newPassword: 'NEW' + defaultUserArgs.password, |
||||
}) |
||||
.expect(200); |
||||
}); |
||||
|
||||
it('Change password - after logout', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/change') |
||||
.unset('xc-auth') |
||||
.send({ |
||||
currentPassword: defaultUserArgs.password, |
||||
newPassword: 'NEW' + defaultUserArgs.password, |
||||
}) |
||||
.expect(401); |
||||
}); |
||||
|
||||
// todo:
|
||||
it('Reset Password with an invalid token', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/password/reset/someRandomValue') |
||||
.send({ email: defaultUserArgs.email }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Email validate with an invalid token', async () => { |
||||
await request(context.app) |
||||
.post('/api/v1/auth/email/validate/someRandomValue') |
||||
.send({ email: defaultUserArgs.email }) |
||||
.expect(400); |
||||
}); |
||||
|
||||
// todo:
|
||||
// it('Email validate with a valid token', async () => {
|
||||
// // await request(context.app)
|
||||
// // .post('/auth/email/validate/someRandomValue')
|
||||
// // .send({email: EMAIL_ID})
|
||||
// // .expect(500, done);
|
||||
// });
|
||||
|
||||
// todo:
|
||||
// it('Forgot password validate with a valid token', async () => {
|
||||
// // await request(context.app)
|
||||
// // .post('/auth/token/validate/someRandomValue')
|
||||
// // .send({email: EMAIL_ID})
|
||||
// // .expect(500, done);
|
||||
// });
|
||||
|
||||
// todo:
|
||||
// it('Reset Password with an valid token', async () => {
|
||||
// // await request(context.app)
|
||||
// // .post('/auth/password/reset/someRandomValue')
|
||||
// // .send({password: 'anewpassword'})
|
||||
// // .expect(500, done);
|
||||
// });
|
||||
|
||||
// todo: refresh token api
|
||||
} |
||||
|
||||
export default function () { |
||||
describe('Auth', authTests); |
||||
} |
@ -0,0 +1,102 @@
|
||||
import 'mocha'; |
||||
import { title } from 'process'; |
||||
import request from 'supertest'; |
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import { expect } from 'chai'; |
||||
import init from '../../init'; |
||||
import { createProject, createSakilaProject } from '../../factory/project'; |
||||
import { createColumn, createQrCodeColumn } from '../../factory/column'; |
||||
import { getTable } from '../../factory/table'; |
||||
import type Model from '../../../../src/models/Model'; |
||||
import type Project from '../../../../src/models/Project'; |
||||
import type Column from '../../../../src/models/Column'; |
||||
|
||||
// Test case list
|
||||
// 1. Qr Code Column
|
||||
// a. adding a QR code column which references another column
|
||||
// - delivers the same cell values as the referenced column
|
||||
// - gets deleted if the referenced column gets deleted
|
||||
|
||||
function columnTypeSpecificTests() { |
||||
let context; |
||||
let project: Project; |
||||
let sakilaProject: Project; |
||||
let customerTable: Model; |
||||
let qrValueReferenceColumn: Column; |
||||
|
||||
const qrValueReferenceColumnTitle = 'Qr Value Column'; |
||||
const qrCodeReferenceColumnTitle = 'Qr Code Column'; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
|
||||
sakilaProject = await createSakilaProject(context); |
||||
project = await createProject(context); |
||||
|
||||
customerTable = await getTable({ |
||||
project: sakilaProject, |
||||
name: 'customer', |
||||
}); |
||||
}); |
||||
|
||||
describe('Qr Code Column', () => { |
||||
beforeEach(async function () { |
||||
qrValueReferenceColumn = await createColumn(context, customerTable, { |
||||
title: qrValueReferenceColumnTitle, |
||||
uidt: UITypes.SingleLineText, |
||||
table_name: customerTable.table_name, |
||||
column_name: title, |
||||
}); |
||||
}); |
||||
describe('adding a QR code column which references another column ', async () => { |
||||
beforeEach(async function () { |
||||
await createQrCodeColumn(context, { |
||||
title: qrCodeReferenceColumnTitle, |
||||
table: customerTable, |
||||
referencedQrValueTableColumnTitle: qrValueReferenceColumnTitle, |
||||
}); |
||||
}); |
||||
it('delivers the same cell values as the referenced column', async () => { |
||||
const resp = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${sakilaProject.id}/${customerTable.id}`) |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
expect(resp.body.list[0][qrValueReferenceColumnTitle]).to.eql( |
||||
resp.body.list[0][qrCodeReferenceColumnTitle], |
||||
); |
||||
expect( |
||||
resp.body.list.map((row) => row[qrValueReferenceColumnTitle]), |
||||
).to.eql(resp.body.list.map((row) => row[qrCodeReferenceColumnTitle])); |
||||
}); |
||||
|
||||
it('gets deleted if the referenced column gets deleted', async () => { |
||||
// delete referenced value column
|
||||
const columnsBeforeReferencedColumnDeleted = |
||||
await customerTable.getColumns(); |
||||
|
||||
expect( |
||||
columnsBeforeReferencedColumnDeleted.some( |
||||
(col) => col['title'] === qrCodeReferenceColumnTitle, |
||||
), |
||||
).to.eq(true); |
||||
|
||||
const response = await request(context.app) |
||||
.delete(`/api/v1/db/meta/columns/${qrValueReferenceColumn.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({}); |
||||
|
||||
const columnsAfterReferencedColumnDeleted = |
||||
await customerTable.getColumns(); |
||||
expect( |
||||
columnsAfterReferencedColumnDeleted.some( |
||||
(col) => col['title'] === qrCodeReferenceColumnTitle, |
||||
), |
||||
).to.eq(false); |
||||
}); |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Column types specific behavior', columnTypeSpecificTests); |
||||
} |
@ -0,0 +1,962 @@
|
||||
import 'mocha'; |
||||
import init from '../../init'; |
||||
import { createProject } from '../../factory/project'; |
||||
import Project from '../../../../src/models/Project'; |
||||
import { createTable } from '../../factory/table'; |
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import { createBulkRows, rowMixedValue, listRow } from '../../factory/row'; |
||||
import Model from '../../../../src/models/Model'; |
||||
import { expect } from 'chai'; |
||||
import request from 'supertest'; |
||||
|
||||
const debugMode = true; |
||||
|
||||
// Test case list
|
||||
|
||||
async function retrieveRecordsAndValidate( |
||||
filter: { |
||||
comparison_op: string; |
||||
value: string; |
||||
fk_column_id: any; |
||||
status: string; |
||||
logical_op: string; |
||||
}, |
||||
title: string |
||||
) { |
||||
let expectedRecords = []; |
||||
let toFloat = false; |
||||
if ( |
||||
['Number', 'Decimal', 'Currency', 'Percent', 'Duration', 'Rating'].includes( |
||||
title |
||||
) |
||||
) { |
||||
toFloat = true; |
||||
} |
||||
|
||||
// case for all comparison operators
|
||||
switch (filter.comparison_op) { |
||||
case 'eq': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => |
||||
(toFloat ? parseFloat(record[title]) : record[title]) === |
||||
(toFloat ? parseFloat(filter.value) : filter.value) |
||||
); |
||||
break; |
||||
case 'neq': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => |
||||
(toFloat ? parseFloat(record[title]) : record[title]) !== |
||||
(toFloat ? parseFloat(filter.value) : filter.value) |
||||
); |
||||
break; |
||||
case 'null': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => record[title] === null |
||||
); |
||||
break; |
||||
case 'notnull': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => record[title] !== null |
||||
); |
||||
break; |
||||
case 'empty': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => record[title] === '' |
||||
); |
||||
break; |
||||
case 'notempty': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => record[title] !== '' |
||||
); |
||||
break; |
||||
case 'like': |
||||
expectedRecords = unfilteredRecords.filter((record) => |
||||
record[title]?.includes(filter.value) |
||||
); |
||||
break; |
||||
case 'nlike': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => !record[title]?.includes(filter.value) |
||||
); |
||||
break; |
||||
case 'gt': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => |
||||
(toFloat ? parseFloat(record[title]) : record[title]) > |
||||
(toFloat ? parseFloat(filter.value) : filter.value) && |
||||
record[title] !== null |
||||
); |
||||
break; |
||||
case 'gte': |
||||
expectedRecords = unfilteredRecords.filter( |
||||
(record) => |
||||
(toFloat ? parseFloat(record[title]) : record[title]) >= |
||||
(toFloat ? parseFloat(filter.value) : filter.value) && |
||||
record[title] !== null |
||||
); |
||||
break; |
||||
case 'lt': |
||||
expectedRecords = unfilteredRecords.filter((record) => |
||||
title === 'Rating' |
||||
? (toFloat ? parseFloat(record[title]) : record[title]) < |
||||
(toFloat ? parseFloat(filter.value) : filter.value) || |
||||
record[title] === null |
||||
: (toFloat ? parseFloat(record[title]) : record[title]) < |
||||
(toFloat ? parseFloat(filter.value) : filter.value) && |
||||
record[title] !== null |
||||
); |
||||
break; |
||||
case 'lte': |
||||
expectedRecords = unfilteredRecords.filter((record) => |
||||
title === 'Rating' |
||||
? (toFloat ? parseFloat(record[title]) : record[title]) <= |
||||
(toFloat ? parseFloat(filter.value) : filter.value) || |
||||
record[title] === null |
||||
: (toFloat ? parseFloat(record[title]) : record[title]) <= |
||||
(toFloat ? parseFloat(filter.value) : filter.value) && |
||||
record[title] !== null |
||||
); |
||||
break; |
||||
case 'anyof': |
||||
expectedRecords = unfilteredRecords.filter((record) => { |
||||
const values = filter.value.split(','); |
||||
const recordValue = record[title]?.split(','); |
||||
return values.some((value) => recordValue?.includes(value)); |
||||
}); |
||||
break; |
||||
case 'nanyof': |
||||
expectedRecords = unfilteredRecords.filter((record) => { |
||||
const values = filter.value.split(','); |
||||
const recordValue = record[title]?.split(','); |
||||
return !values.some((value) => recordValue?.includes(value)); |
||||
}); |
||||
break; |
||||
case 'allof': |
||||
expectedRecords = unfilteredRecords.filter((record) => { |
||||
const values = filter.value.split(','); |
||||
return values.every((value) => record[title]?.includes(value)); |
||||
}); |
||||
break; |
||||
case 'nallof': |
||||
expectedRecords = unfilteredRecords.filter((record) => { |
||||
const values = filter.value.split(','); |
||||
return !values.every((value) => record[title]?.includes(value)); |
||||
}); |
||||
break; |
||||
} |
||||
|
||||
// retrieve filtered records
|
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${project.id}/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.query({ |
||||
filterArrJson: JSON.stringify([filter]), |
||||
}) |
||||
.expect(200); |
||||
|
||||
// validate
|
||||
if (debugMode) { |
||||
if (response.body.pageInfo.totalRows !== expectedRecords.length) { |
||||
console.log(`Failed for filter: ${JSON.stringify(filter)}`); |
||||
console.log(`Expected: ${expectedRecords.length}`); |
||||
console.log(`Actual: ${response.body.pageInfo.totalRows}`); |
||||
throw new Error('fix me!'); |
||||
} |
||||
response.body.list.forEach((row, index) => { |
||||
if (row[title] !== expectedRecords[index][title]) { |
||||
console.log(`Failed for filter: ${JSON.stringify(filter)}`); |
||||
console.log(`Expected: ${expectedRecords[index][title]}`); |
||||
console.log(`Actual: ${row[title]}`); |
||||
throw new Error('fix me!'); |
||||
} |
||||
}); |
||||
} else { |
||||
expect(response.body.pageInfo.totalRows).to.equal(expectedRecords.length); |
||||
response.body.list.forEach((row, index) => { |
||||
expect(row[title] !== expectedRecords[index][title]); |
||||
}); |
||||
} |
||||
} |
||||
|
||||
let context; |
||||
let project: Project; |
||||
let table: Model; |
||||
let columns: any[]; |
||||
let unfilteredRecords: any[] = []; |
||||
|
||||
async function verifyFilters(dataType, columnId, filterList) { |
||||
const filter = { |
||||
fk_column_id: columnId, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: '', |
||||
value: '', |
||||
}; |
||||
|
||||
for (let i = 0; i < filterList.length; i++) { |
||||
filter.comparison_op = filterList[i].comparison_op; |
||||
filter.value = filterList[i].value; |
||||
await retrieveRecordsAndValidate(filter, dataType); |
||||
} |
||||
} |
||||
|
||||
function filterTextBased() { |
||||
// prepare data for test cases
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
project = await createProject(context); |
||||
table = await createTable(context, project, { |
||||
table_name: 'textBased', |
||||
title: 'TextBased', |
||||
columns: [ |
||||
{ |
||||
column_name: 'Id', |
||||
title: 'Id', |
||||
uidt: UITypes.ID, |
||||
}, |
||||
{ |
||||
column_name: 'SingleLineText', |
||||
title: 'SingleLineText', |
||||
uidt: UITypes.SingleLineText, |
||||
}, |
||||
{ |
||||
column_name: 'MultiLineText', |
||||
title: 'MultiLineText', |
||||
uidt: UITypes.LongText, |
||||
}, |
||||
{ |
||||
column_name: 'Email', |
||||
title: 'Email', |
||||
uidt: UITypes.Email, |
||||
}, |
||||
{ |
||||
column_name: 'Phone', |
||||
title: 'Phone', |
||||
uidt: UITypes.PhoneNumber, |
||||
}, |
||||
{ |
||||
column_name: 'Url', |
||||
title: 'Url', |
||||
uidt: UITypes.URL, |
||||
}, |
||||
], |
||||
}); |
||||
|
||||
columns = await table.getColumns(); |
||||
|
||||
let rowAttributes = []; |
||||
for (let i = 0; i < 400; i++) { |
||||
let row = { |
||||
SingleLineText: rowMixedValue(columns[1], i), |
||||
MultiLineText: rowMixedValue(columns[2], i), |
||||
Email: rowMixedValue(columns[3], i), |
||||
Phone: rowMixedValue(columns[4], i), |
||||
Url: rowMixedValue(columns[5], i), |
||||
}; |
||||
rowAttributes.push(row); |
||||
} |
||||
|
||||
await createBulkRows(context, { |
||||
project, |
||||
table, |
||||
values: rowAttributes, |
||||
}); |
||||
unfilteredRecords = await listRow({ project, table }); |
||||
|
||||
// verify length of unfiltered records to be 400
|
||||
expect(unfilteredRecords.length).to.equal(400); |
||||
}); |
||||
|
||||
it('Type: Single Line Text', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: 'Afghanistan' }, |
||||
{ comparison_op: 'neq', value: 'Afghanistan' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'empty', value: '' }, |
||||
// { comparison_op: 'notempty', value: '' },
|
||||
{ comparison_op: 'like', value: 'Au' }, |
||||
{ comparison_op: 'nlike', value: 'Au' }, |
||||
]; |
||||
await verifyFilters('SingleLineText', columns[1].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Multi Line Text', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: 'Aberdeen, United Kingdom' }, |
||||
{ comparison_op: 'neq', value: 'Aberdeen, United Kingdom' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'empty', value: '' }, |
||||
// { comparison_op: 'notempty', value: '' },
|
||||
{ comparison_op: 'like', value: 'abad' }, |
||||
{ comparison_op: 'nlike', value: 'abad' }, |
||||
]; |
||||
await verifyFilters('MultiLineText', columns[2].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Email', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: 'leota@hotmail.com' }, |
||||
{ comparison_op: 'neq', value: 'leota@hotmail.com' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'empty', value: '' }, |
||||
// { comparison_op: 'notempty', value: '' },
|
||||
{ comparison_op: 'like', value: 'cox.net' }, |
||||
{ comparison_op: 'nlike', value: 'cox.net' }, |
||||
]; |
||||
await verifyFilters('Email', columns[3].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Phone', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '504-621-8927' }, |
||||
{ comparison_op: 'neq', value: '504-621-8927' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'empty', value: '' }, |
||||
// { comparison_op: 'notempty', value: '' },
|
||||
{ comparison_op: 'like', value: '504' }, |
||||
{ comparison_op: 'nlike', value: '504' }, |
||||
]; |
||||
await verifyFilters('Phone', columns[4].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Url', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: 'https://www.youtube.com' }, |
||||
{ comparison_op: 'neq', value: 'https://www.youtube.com' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'empty', value: '' }, |
||||
// { comparison_op: 'notempty', value: '' },
|
||||
{ comparison_op: 'like', value: 'e.com' }, |
||||
{ comparison_op: 'nlike', value: 'e.com' }, |
||||
]; |
||||
await verifyFilters('Url', columns[5].id, filterList); |
||||
}); |
||||
} |
||||
|
||||
function filterNumberBased() { |
||||
// prepare data for test cases
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
project = await createProject(context); |
||||
table = await createTable(context, project, { |
||||
table_name: 'numberBased', |
||||
title: 'numberBased', |
||||
columns: [ |
||||
{ |
||||
column_name: 'Id', |
||||
title: 'Id', |
||||
uidt: UITypes.ID, |
||||
}, |
||||
{ |
||||
column_name: 'Number', |
||||
title: 'Number', |
||||
uidt: UITypes.Number, |
||||
}, |
||||
{ |
||||
column_name: 'Decimal', |
||||
title: 'Decimal', |
||||
uidt: UITypes.Decimal, |
||||
}, |
||||
{ |
||||
column_name: 'Currency', |
||||
title: 'Currency', |
||||
uidt: UITypes.Currency, |
||||
}, |
||||
{ |
||||
column_name: 'Percent', |
||||
title: 'Percent', |
||||
uidt: UITypes.Percent, |
||||
}, |
||||
{ |
||||
column_name: 'Duration', |
||||
title: 'Duration', |
||||
uidt: UITypes.Duration, |
||||
}, |
||||
{ |
||||
column_name: 'Rating', |
||||
title: 'Rating', |
||||
uidt: UITypes.Rating, |
||||
}, |
||||
], |
||||
}); |
||||
|
||||
columns = await table.getColumns(); |
||||
|
||||
let rowAttributes = []; |
||||
for (let i = 0; i < 400; i++) { |
||||
let row = { |
||||
Number: rowMixedValue(columns[1], i), |
||||
Decimal: rowMixedValue(columns[2], i), |
||||
Currency: rowMixedValue(columns[3], i), |
||||
Percent: rowMixedValue(columns[4], i), |
||||
Duration: rowMixedValue(columns[5], i), |
||||
Rating: rowMixedValue(columns[6], i), |
||||
}; |
||||
rowAttributes.push(row); |
||||
} |
||||
|
||||
await createBulkRows(context, { |
||||
project, |
||||
table, |
||||
values: rowAttributes, |
||||
}); |
||||
unfilteredRecords = await listRow({ project, table }); |
||||
|
||||
// verify length of unfiltered records to be 400
|
||||
expect(unfilteredRecords.length).to.equal(400); |
||||
}); |
||||
|
||||
it('Type: Number', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '33' }, |
||||
{ comparison_op: 'neq', value: '33' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'gt', value: '44' }, |
||||
{ comparison_op: 'gte', value: '44' }, |
||||
{ comparison_op: 'lt', value: '44' }, |
||||
{ comparison_op: 'lte', value: '44' }, |
||||
]; |
||||
await verifyFilters('Number', columns[1].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Decimal', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '33.3' }, |
||||
{ comparison_op: 'neq', value: '33.3' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'gt', value: '44.26' }, |
||||
{ comparison_op: 'gte', value: '44.26' }, |
||||
{ comparison_op: 'lt', value: '44.26' }, |
||||
{ comparison_op: 'lte', value: '44.26' }, |
||||
]; |
||||
await verifyFilters('Decimal', columns[2].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Currency', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '33.3' }, |
||||
{ comparison_op: 'neq', value: '33.3' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'gt', value: '44.26' }, |
||||
{ comparison_op: 'gte', value: '44.26' }, |
||||
{ comparison_op: 'lt', value: '44.26' }, |
||||
{ comparison_op: 'lte', value: '44.26' }, |
||||
]; |
||||
await verifyFilters('Decimal', columns[3].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Percent', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '33' }, |
||||
{ comparison_op: 'neq', value: '33' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'gt', value: '44' }, |
||||
{ comparison_op: 'gte', value: '44' }, |
||||
{ comparison_op: 'lt', value: '44' }, |
||||
{ comparison_op: 'lte', value: '44' }, |
||||
]; |
||||
await verifyFilters('Percent', columns[4].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Duration', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '10' }, |
||||
{ comparison_op: 'neq', value: '10' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'gt', value: '50' }, |
||||
{ comparison_op: 'gte', value: '50' }, |
||||
{ comparison_op: 'lt', value: '50' }, |
||||
{ comparison_op: 'lte', value: '50' }, |
||||
]; |
||||
await verifyFilters('Duration', columns[5].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Rating', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: '3' }, |
||||
{ comparison_op: 'neq', value: '3' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'gt', value: '2' }, |
||||
{ comparison_op: 'gte', value: '2' }, |
||||
{ comparison_op: 'lt', value: '2' }, |
||||
{ comparison_op: 'lte', value: '2' }, |
||||
]; |
||||
await verifyFilters('Rating', columns[6].id, filterList); |
||||
}); |
||||
} |
||||
|
||||
function filterSelectBased() { |
||||
// prepare data for test cases
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
project = await createProject(context); |
||||
table = await createTable(context, project, { |
||||
table_name: 'selectBased', |
||||
title: 'selectBased', |
||||
columns: [ |
||||
{ |
||||
column_name: 'Id', |
||||
title: 'Id', |
||||
uidt: UITypes.ID, |
||||
}, |
||||
{ |
||||
column_name: 'SingleSelect', |
||||
title: 'SingleSelect', |
||||
uidt: UITypes.SingleSelect, |
||||
dtxp: "'jan','feb','mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'", |
||||
}, |
||||
{ |
||||
column_name: 'MultiSelect', |
||||
title: 'MultiSelect', |
||||
uidt: UITypes.MultiSelect, |
||||
dtxp: "'jan','feb','mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'", |
||||
}, |
||||
], |
||||
}); |
||||
|
||||
columns = await table.getColumns(); |
||||
|
||||
let rowAttributes = []; |
||||
for (let i = 0; i < 400; i++) { |
||||
let row = { |
||||
SingleSelect: rowMixedValue(columns[1], i), |
||||
MultiSelect: rowMixedValue(columns[2], i), |
||||
}; |
||||
rowAttributes.push(row); |
||||
} |
||||
|
||||
await createBulkRows(context, { |
||||
project, |
||||
table, |
||||
values: rowAttributes, |
||||
}); |
||||
unfilteredRecords = await listRow({ project, table }); |
||||
|
||||
// verify length of unfiltered records to be 400
|
||||
expect(unfilteredRecords.length).to.equal(400); |
||||
}); |
||||
|
||||
it('Type: Single select', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: 'jan' }, |
||||
{ comparison_op: 'neq', value: 'jan' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'like', value: 'j' }, |
||||
{ comparison_op: 'nlike', value: 'j' }, |
||||
{ comparison_op: 'anyof', value: 'jan,feb,mar' }, |
||||
{ comparison_op: 'nanyof', value: 'jan,feb,mar' }, |
||||
]; |
||||
await verifyFilters('SingleSelect', columns[1].id, filterList); |
||||
}); |
||||
|
||||
it('Type: Multi select', async () => { |
||||
let filterList = [ |
||||
{ comparison_op: 'eq', value: 'jan,feb,mar' }, |
||||
{ comparison_op: 'neq', value: 'jan,feb,mar' }, |
||||
{ comparison_op: 'null', value: '' }, |
||||
{ comparison_op: 'notnull', value: '' }, |
||||
{ comparison_op: 'like', value: 'jan' }, |
||||
{ comparison_op: 'nlike', value: 'jan' }, |
||||
{ comparison_op: 'anyof', value: 'jan,feb,mar' }, |
||||
{ comparison_op: 'nanyof', value: 'jan,feb,mar' }, |
||||
{ comparison_op: 'allof', value: 'jan,feb,mar' }, |
||||
{ comparison_op: 'nallof', value: 'jan,feb,mar' }, |
||||
]; |
||||
await verifyFilters('MultiSelect', columns[2].id, filterList); |
||||
}); |
||||
} |
||||
|
||||
async function applyDateFilter(filterParams, expectedRecords) { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/data/noco/${project.id}/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.query({ |
||||
filterArrJson: JSON.stringify([filterParams]), |
||||
}) |
||||
.expect(200); |
||||
// expect(response.body.pageInfo.totalRows).to.equal(expectedRecords);
|
||||
if (response.body.pageInfo.totalRows !== expectedRecords) { |
||||
console.log('filterParams', filterParams); |
||||
console.log( |
||||
'response.body.pageInfo.totalRows', |
||||
response.body.pageInfo.totalRows |
||||
); |
||||
console.log('expectedRecords', expectedRecords); |
||||
} |
||||
return response.body.list; |
||||
} |
||||
|
||||
function filterDateBased() { |
||||
// prepare data for test cases
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
project = await createProject(context); |
||||
table = await createTable(context, project, { |
||||
table_name: 'dateBased', |
||||
title: 'dateBased', |
||||
columns: [ |
||||
{ |
||||
column_name: 'Id', |
||||
title: 'Id', |
||||
uidt: UITypes.ID, |
||||
}, |
||||
{ |
||||
column_name: 'Date', |
||||
title: 'Date', |
||||
uidt: UITypes.Date, |
||||
}, |
||||
], |
||||
}); |
||||
|
||||
columns = await table.getColumns(); |
||||
|
||||
let rowAttributes = []; |
||||
for (let i = 0; i < 800; i++) { |
||||
let row = { |
||||
Date: rowMixedValue(columns[1], i), |
||||
}; |
||||
rowAttributes.push(row); |
||||
} |
||||
|
||||
await createBulkRows(context, { |
||||
project, |
||||
table, |
||||
values: rowAttributes, |
||||
}); |
||||
unfilteredRecords = await listRow({ project, table }); |
||||
|
||||
// verify length of unfiltered records to be 800
|
||||
expect(unfilteredRecords.length).to.equal(800); |
||||
}); |
||||
|
||||
it('Type: Date ', async () => { |
||||
const today = new Date().setHours(0, 0, 0, 0); |
||||
const tomorrow = new Date( |
||||
new Date().setDate(new Date().getDate() + 1) |
||||
).setHours(0, 0, 0, 0); |
||||
const yesterday = new Date( |
||||
new Date().setDate(new Date().getDate() - 1) |
||||
).setHours(0, 0, 0, 0); |
||||
const oneWeekAgo = new Date( |
||||
new Date().setDate(new Date().getDate() - 7) |
||||
).setHours(0, 0, 0, 0); |
||||
const oneWeekFromNow = new Date( |
||||
new Date().setDate(new Date().getDate() + 7) |
||||
).setHours(0, 0, 0, 0); |
||||
const oneMonthAgo = new Date( |
||||
new Date().setMonth(new Date().getMonth() - 1) |
||||
).setHours(0, 0, 0, 0); |
||||
const oneMonthFromNow = new Date( |
||||
new Date().setMonth(new Date().getMonth() + 1) |
||||
).setHours(0, 0, 0, 0); |
||||
const daysAgo45 = new Date( |
||||
new Date().setDate(new Date().getDate() - 45) |
||||
).setHours(0, 0, 0, 0); |
||||
const daysFromNow45 = new Date( |
||||
new Date().setDate(new Date().getDate() + 45) |
||||
).setHours(0, 0, 0, 0); |
||||
const thisMonth15 = new Date(new Date().setDate(15)).setHours(0, 0, 0, 0); |
||||
const oneYearAgo = new Date( |
||||
new Date().setFullYear(new Date().getFullYear() - 1) |
||||
).setHours(0, 0, 0, 0); |
||||
const oneYearFromNow = new Date( |
||||
new Date().setFullYear(new Date().getFullYear() + 1) |
||||
).setHours(0, 0, 0, 0); |
||||
|
||||
// records array with time set to 00:00:00; store time in unix epoch
|
||||
const recordsTimeSetToZero = unfilteredRecords.map((r) => { |
||||
const date = new Date(r['Date']); |
||||
date.setHours(0, 0, 0, 0); |
||||
return date.getTime(); |
||||
}); |
||||
|
||||
const isFilterList = [ |
||||
{ |
||||
opSub: 'today', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === today).length, |
||||
}, |
||||
{ |
||||
opSub: 'tomorrow', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === tomorrow).length, |
||||
}, |
||||
{ |
||||
opSub: 'yesterday', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === yesterday).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneWeekAgo', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === oneWeekAgo).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneWeekFromNow', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === oneWeekFromNow) |
||||
.length, |
||||
}, |
||||
{ |
||||
opSub: 'oneMonthAgo', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === oneMonthAgo).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneMonthFromNow', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === oneMonthFromNow) |
||||
.length, |
||||
}, |
||||
{ |
||||
opSub: 'daysAgo', |
||||
value: 45, |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === daysAgo45).length, |
||||
}, |
||||
{ |
||||
opSub: 'daysFromNow', |
||||
value: 45, |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === daysFromNow45) |
||||
.length, |
||||
}, |
||||
{ |
||||
opSub: 'exactDate', |
||||
value: new Date(thisMonth15).toISOString().split('T')[0], |
||||
rowCount: recordsTimeSetToZero.filter((r) => r === thisMonth15).length, |
||||
}, |
||||
]; |
||||
|
||||
// "is after" filter list
|
||||
const isAfterFilterList = [ |
||||
{ |
||||
opSub: 'today', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > today).length, |
||||
}, |
||||
{ |
||||
opSub: 'tomorrow', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > tomorrow).length, |
||||
}, |
||||
{ |
||||
opSub: 'yesterday', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > yesterday).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneWeekAgo', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > oneWeekAgo).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneWeekFromNow', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > oneWeekFromNow).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneMonthAgo', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > oneMonthAgo).length, |
||||
}, |
||||
{ |
||||
opSub: 'oneMonthFromNow', |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > oneMonthFromNow) |
||||
.length, |
||||
}, |
||||
{ |
||||
opSub: 'daysAgo', |
||||
value: 45, |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > daysAgo45).length, |
||||
}, |
||||
{ |
||||
opSub: 'daysFromNow', |
||||
value: 45, |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > daysFromNow45).length, |
||||
}, |
||||
{ |
||||
opSub: 'exactDate', |
||||
value: new Date().toISOString().split('T')[0], |
||||
rowCount: recordsTimeSetToZero.filter((r) => r > today).length, |
||||
}, |
||||
]; |
||||
|
||||
// "is within" filter list
|
||||
const isWithinFilterList = [ |
||||
{ |
||||
opSub: 'pastWeek', |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= oneWeekAgo && r <= today |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'pastMonth', |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= oneMonthAgo && r <= today |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'pastYear', |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= oneYearAgo && r <= today |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'nextWeek', |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= today && r <= oneWeekFromNow |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'nextMonth', |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= today && r <= oneMonthFromNow |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'nextYear', |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= today && r <= oneYearFromNow |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'nextNumberOfDays', |
||||
value: 45, |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= today && r <= daysFromNow45 |
||||
).length, |
||||
}, |
||||
{ |
||||
opSub: 'pastNumberOfDays', |
||||
value: 45, |
||||
rowCount: recordsTimeSetToZero.filter( |
||||
(r) => r >= daysAgo45 && r <= today |
||||
).length, |
||||
}, |
||||
]; |
||||
|
||||
// rest of the filters (without subop type)
|
||||
const filterList = [ |
||||
{ |
||||
opType: 'blank', |
||||
rowCount: unfilteredRecords.filter( |
||||
(r) => r['Date'] === null || r['Date'] === '' |
||||
).length, |
||||
}, |
||||
{ |
||||
opType: 'notblank', |
||||
rowCount: unfilteredRecords.filter( |
||||
(r) => r['Date'] !== null && r['Date'] !== '' |
||||
).length, |
||||
}, |
||||
]; |
||||
|
||||
// is
|
||||
for (let i = 0; i < isFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'eq', |
||||
comparison_sub_op: isFilterList[i].opSub, |
||||
value: isFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, isFilterList[i].rowCount); |
||||
} |
||||
|
||||
// is not
|
||||
for (let i = 0; i < isFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'neq', |
||||
comparison_sub_op: isFilterList[i].opSub, |
||||
value: isFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, 800 - isFilterList[i].rowCount); |
||||
} |
||||
|
||||
// is before
|
||||
for (let i = 0; i < isAfterFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'gt', |
||||
comparison_sub_op: isAfterFilterList[i].opSub, |
||||
value: isAfterFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, isAfterFilterList[i].rowCount); |
||||
} |
||||
|
||||
// is before or on
|
||||
for (let i = 0; i < isAfterFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'gte', |
||||
comparison_sub_op: isAfterFilterList[i].opSub, |
||||
value: isAfterFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, isAfterFilterList[i].rowCount + 1); |
||||
} |
||||
|
||||
// is after
|
||||
for (let i = 0; i < isAfterFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'lt', |
||||
comparison_sub_op: isAfterFilterList[i].opSub, |
||||
value: isAfterFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, 800 - isAfterFilterList[i].rowCount - 1); |
||||
} |
||||
|
||||
// is after or on
|
||||
for (let i = 0; i < isAfterFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'lte', |
||||
comparison_sub_op: isAfterFilterList[i].opSub, |
||||
value: isAfterFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, 800 - isAfterFilterList[i].rowCount); |
||||
} |
||||
|
||||
// is within
|
||||
for (let i = 0; i < isWithinFilterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: 'isWithin', |
||||
comparison_sub_op: isWithinFilterList[i].opSub, |
||||
value: isWithinFilterList[i].value, |
||||
}; |
||||
await applyDateFilter(filter, isWithinFilterList[i].rowCount); |
||||
} |
||||
|
||||
// rest of the filters (without subop type)
|
||||
for (let i = 0; i < filterList.length; i++) { |
||||
const filter = { |
||||
fk_column_id: columns[1].id, |
||||
status: 'create', |
||||
logical_op: 'and', |
||||
comparison_op: filterList[i].opType, |
||||
value: '', |
||||
}; |
||||
await applyDateFilter(filter, filterList[i].rowCount); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Filter: Text based', filterTextBased); |
||||
describe('Filter: Numerical', filterNumberBased); |
||||
describe('Filter: Select based', filterSelectBased); |
||||
describe('Filter: Date based', filterDateBased); |
||||
} |
@ -0,0 +1,223 @@
|
||||
import { expect } from 'chai'; |
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import { OrgUserRoles } from 'nocodb-sdk'; |
||||
import init from '../../init'; |
||||
|
||||
// Test case list in this file
|
||||
// 1. Get users list
|
||||
// 2. Invite a new user
|
||||
// 3. Update user role
|
||||
// 4. Remove user
|
||||
// 5. Get token list
|
||||
// 6. Generate token
|
||||
// 7. Delete token
|
||||
// 8. Disable/Enable signup
|
||||
|
||||
function authTests() { |
||||
let context; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
}); |
||||
|
||||
it('Get users list', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
expect(response.body).to.have.keys(['list', 'pageInfo']); |
||||
expect(response.body.list).to.have.length(1); |
||||
}); |
||||
|
||||
it('Invite a new user', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.send({ email: 'a@nocodb.com' }) |
||||
.expect(200); |
||||
|
||||
console.log(response.body); |
||||
|
||||
expect(response.body).to.have.property('invite_token').to.be.a('string'); |
||||
// todo: verify invite token
|
||||
}); |
||||
|
||||
it('Update user role', async () => { |
||||
const email = 'a@nocodb.com'; |
||||
// invite a user
|
||||
await request(context.app) |
||||
.post('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.send({ email }) |
||||
.expect(200); |
||||
const response = await request(context.app) |
||||
.get('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
expect(response.body.list).to.have.length(2); |
||||
|
||||
const user = response.body.list.find((u) => u.email === email); |
||||
|
||||
expect(user).to.have.property('roles').to.be.equal(OrgUserRoles.VIEWER); |
||||
|
||||
await request(context.app) |
||||
.patch('/api/v1/users/' + user.id) |
||||
.set('xc-auth', context.token) |
||||
.send({ roles: OrgUserRoles.CREATOR }) |
||||
.expect(200); |
||||
|
||||
const response2 = await request(context.app) |
||||
.get('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
expect(response2.body.list).to.have.length(2); |
||||
|
||||
const user2 = response2.body.list.find((u) => u.email === email); |
||||
|
||||
expect(user2).to.have.property('roles').to.be.equal(OrgUserRoles.CREATOR); |
||||
}); |
||||
|
||||
it('Remove user', async () => { |
||||
const email = 'a@nocodb.com'; |
||||
// invite a user
|
||||
await request(context.app) |
||||
.post('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.send({ email }) |
||||
.expect(200); |
||||
|
||||
const response = await request(context.app) |
||||
.get('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
expect(response.body.list).to.have.length(2); |
||||
|
||||
const user = response.body.list.find((u) => u.email === email); |
||||
|
||||
expect(user).to.have.property('roles').to.be.equal(OrgUserRoles.VIEWER); |
||||
|
||||
await request(context.app) |
||||
.delete('/api/v1/users/' + user.id) |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
const response2 = await request(context.app) |
||||
.get('/api/v1/users') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
expect(response2.body.list).to.have.length(1); |
||||
}); |
||||
|
||||
it('Get token list', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/tokens') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
expect(response.body).to.have.keys(['list', 'pageInfo']); |
||||
expect(response.body.list).to.have.length(0); |
||||
}); |
||||
|
||||
it('Generate token', async () => { |
||||
const r = await request(context.app) |
||||
.post('/api/v1/tokens') |
||||
.set('xc-auth', context.token) |
||||
.send({ description: 'test' }) |
||||
.expect(200); |
||||
|
||||
const response = await request(context.app) |
||||
.get('/api/v1/tokens') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
expect(response.body).to.have.keys(['list', 'pageInfo']); |
||||
expect(response.body.list).to.have.length(1); |
||||
expect(response.body.list[0]).to.have.property('token').to.be.a('string'); |
||||
expect(response.body.list[0]) |
||||
.to.have.property('description') |
||||
.to.be.a('string') |
||||
.to.be.eq('test'); |
||||
}); |
||||
|
||||
it('Delete token', async () => { |
||||
const r = await request(context.app) |
||||
.post('/api/v1/tokens') |
||||
.set('xc-auth', context.token) |
||||
.send({ description: 'test' }) |
||||
.expect(200); |
||||
|
||||
let response = await request(context.app) |
||||
.get('/api/v1/tokens') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
expect(response.body).to.have.keys(['list', 'pageInfo']); |
||||
expect(response.body.list).to.have.length(1); |
||||
|
||||
await request(context.app) |
||||
.delete('/api/v1/tokens/' + r.body.token) |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
response = await request(context.app) |
||||
.get('/api/v1/tokens') |
||||
.set('xc-auth', context.token) |
||||
.expect(200); |
||||
|
||||
expect(response.body).to.have.keys(['list', 'pageInfo']); |
||||
expect(response.body.list).to.have.length(0); |
||||
}); |
||||
|
||||
it.only('Disable/Enable signup', async () => { |
||||
const args = { |
||||
email: 'dummyuser@example.com', |
||||
password: 'A1234abh2@dsad', |
||||
}; |
||||
|
||||
await request(context.app) |
||||
.post('/api/v1/app-settings') |
||||
.set('xc-auth', context.token) |
||||
.send({ invite_only_signup: true }) |
||||
.expect(200); |
||||
|
||||
const failedRes = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args) |
||||
.expect(400); |
||||
|
||||
expect(failedRes.body) |
||||
.to.be.an('object') |
||||
.to.have.property('msg') |
||||
.to.be.equal('Not allowed to signup, contact super admin.'); |
||||
|
||||
await request(context.app) |
||||
.post('/api/v1/app-settings') |
||||
.set('xc-auth', context.token) |
||||
.send({ invite_only_signup: false }) |
||||
.expect(200); |
||||
|
||||
const successRes = await request(context.app) |
||||
.post('/api/v1/auth/user/signup') |
||||
.send(args) |
||||
.expect(200); |
||||
|
||||
expect(successRes.body) |
||||
.to.be.an('object') |
||||
.to.have.property('token') |
||||
.to.be.a('string'); |
||||
|
||||
const userMeRes = await request(context.app) |
||||
.get('/api/v1/auth/user/me') |
||||
.set('xc-auth', successRes.body.token) |
||||
.expect(200); |
||||
|
||||
expect(userMeRes.body) |
||||
.to.be.an('object') |
||||
.to.have.property('email') |
||||
.to.be.eq(args.email); |
||||
}); |
||||
} |
||||
|
||||
export default function () {} |
@ -0,0 +1,360 @@
|
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import { Project } from '../../../../src/models' |
||||
import { createTable } from '../../factory/table'; |
||||
import init from '../../init'; |
||||
import { createProject, createSharedBase } from '../../factory/project'; |
||||
import { beforeEach } from 'mocha'; |
||||
import { Exception } from 'handlebars'; |
||||
import { expect } from 'chai'; |
||||
|
||||
// Test case list
|
||||
// 1. Get project info
|
||||
// 2. UI ACL
|
||||
// 3. Create project
|
||||
// 4. Create project with existing title
|
||||
// 5. Update project
|
||||
// 6. Update project with existing title
|
||||
// 7. Create project shared base
|
||||
// 8. Created project shared base should have only editor or viewer role
|
||||
// 9. Updated project shared base should have only editor or viewer role
|
||||
// 10. Updated project shared base
|
||||
// 11. Get project shared base
|
||||
// 12. Delete project shared base
|
||||
// 13. Meta diff sync
|
||||
// 14. Meta diff sync
|
||||
// 15. Meta diff sync
|
||||
// 16. Get all projects meta
|
||||
|
||||
function projectTest() { |
||||
let context; |
||||
let project; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
|
||||
project = await createProject(context); |
||||
}); |
||||
|
||||
it('Get project info', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/info`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
}); |
||||
|
||||
// todo: Test by creating models under project and check if the UCL is working
|
||||
it('UI ACL', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/visibility-rules`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
}); |
||||
// todo: Test creating visibility set
|
||||
|
||||
it('List projects', async () => { |
||||
const response = await request(context.app) |
||||
.get('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
if (response.body.list.length !== 1) |
||||
new Error('Should list only 1 project'); |
||||
if (!response.body.pageInfo) new Error('Should have pagination info'); |
||||
}); |
||||
|
||||
it('Create project', async () => { |
||||
const response = await request(context.app) |
||||
.post('/api/v1/db/meta/projects/') |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: 'Title1', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const newProject = await Project.getByTitleOrId(response.body.id); |
||||
if (!newProject) return new Error('Project not created'); |
||||
}); |
||||
|
||||
it('Create projects with existing title', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: project.title, |
||||
}) |
||||
.expect(400); |
||||
}); |
||||
|
||||
// todo: fix passport user role popluation bug
|
||||
// it('Delete project', async async () => {
|
||||
// const toBeDeletedProject = await createProject(app, token, {
|
||||
// title: 'deletedTitle',
|
||||
// });
|
||||
// await request(app)
|
||||
// .delete('/api/v1/db/meta/projects/${toBeDeletedProject.id}')
|
||||
// .set('xc-auth', token)
|
||||
// .send({
|
||||
// title: 'Title1',
|
||||
// })
|
||||
// .expect(200, async (err) => {
|
||||
// // console.log(res);
|
||||
//
|
||||
|
||||
// const deletedProject = await Project.getByTitleOrId(
|
||||
// toBeDeletedProject.id
|
||||
// );
|
||||
// if (deletedProject) return new Error('Project not delete');
|
||||
|
||||
// new Error();
|
||||
// });
|
||||
// });
|
||||
|
||||
it('Read project', async () => { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
|
||||
if (response.body.id !== project.id) |
||||
return new Error('Got the wrong project'); |
||||
}); |
||||
|
||||
it('Update projects', async () => { |
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: 'NewTitle', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const newProject = await Project.getByTitleOrId(project.id); |
||||
if (newProject.title !== 'NewTitle') { |
||||
return new Error('Project not updated'); |
||||
} |
||||
}); |
||||
|
||||
it('Update projects with existing title', async function () { |
||||
const newProject = await createProject(context, { |
||||
title: 'NewTitle1', |
||||
}); |
||||
|
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
title: newProject.title, |
||||
}) |
||||
.expect(400); |
||||
}); |
||||
|
||||
it('Create project shared base', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'viewer', |
||||
password: 'password123', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if ( |
||||
!updatedProject.uuid || |
||||
updatedProject.roles !== 'viewer' || |
||||
updatedProject.password !== 'password123' |
||||
) { |
||||
return new Error('Shared base not configured properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Created project shared base should have only editor or viewer role', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'commenter', |
||||
password: 'password123', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if (updatedProject.roles === 'commenter') { |
||||
return new Error('Shared base not configured properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Updated project shared base should have only editor or viewer role', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'commenter', |
||||
password: 'password123', |
||||
}) |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if (updatedProject.roles === 'commenter') { |
||||
throw new Exception('Shared base not updated properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Updated project shared base', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.patch(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
roles: 'editor', |
||||
password: 'password123', |
||||
}) |
||||
.expect(200); |
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
|
||||
if (updatedProject.roles !== 'editor') { |
||||
throw new Exception('Shared base not updated properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Get project shared base', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
|
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
if (!updatedProject.uuid) { |
||||
throw new Exception('Shared base not created'); |
||||
} |
||||
}); |
||||
|
||||
it('Delete project shared base', async () => { |
||||
await createSharedBase(context.app, context.token, project); |
||||
|
||||
await request(context.app) |
||||
.delete(`/api/v1/db/meta/projects/${project.id}/shared`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
const updatedProject = await Project.getByTitleOrId(project.id); |
||||
if (updatedProject.uuid) { |
||||
throw new Exception('Shared base not deleted'); |
||||
} |
||||
}); |
||||
|
||||
// todo: Do compare api test
|
||||
|
||||
it('Meta diff sync', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/meta-diff`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
}); |
||||
|
||||
it('Meta diff sync', async () => { |
||||
await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/meta-diff`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
}); |
||||
|
||||
// todo: improve test. Check whether the all the actions are present in the response and correct as well
|
||||
it('Meta diff sync', async () => { |
||||
await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/audits`) |
||||
.set('xc-auth', context.token) |
||||
.send() |
||||
.expect(200); |
||||
}); |
||||
|
||||
it('Get all projects meta', async () => { |
||||
await createTable(context, project, { |
||||
table_name: 'table1', |
||||
title: 'table1', |
||||
}); |
||||
await createTable(context, project, { |
||||
table_name: 'table2', |
||||
title: 'table2', |
||||
}); |
||||
await createTable(context, project, { |
||||
table_name: 'table3', |
||||
title: 'table3', |
||||
}); |
||||
|
||||
await request(context.app) |
||||
.get(`/api/v1/aggregated-meta-info`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200) |
||||
.then((res) => { |
||||
expect(res.body).to.have.all.keys( |
||||
'userCount', |
||||
'sharedBaseCount', |
||||
'projectCount', |
||||
'projects' |
||||
); |
||||
expect(res.body).to.have.property('projectCount').to.eq(1); |
||||
expect(res.body).to.have.property('projects').to.be.an('array'); |
||||
expect(res.body.projects[0].tableCount.table).to.be.eq(3); |
||||
expect(res.body) |
||||
.to.have.nested.property('projects[0].tableCount.table') |
||||
.to.be.a('number'); |
||||
expect(res.body) |
||||
.to.have.nested.property('projects[0].tableCount.view') |
||||
.to.be.a('number'); |
||||
expect(res.body) |
||||
.to.have.nested.property('projects[0].viewCount') |
||||
.to.be.an('object') |
||||
.have.keys( |
||||
'formCount', |
||||
'gridCount', |
||||
'galleryCount', |
||||
'kanbanCount', |
||||
'total', |
||||
'sharedFormCount', |
||||
'sharedGridCount', |
||||
'sharedGalleryCount', |
||||
'sharedKanbanCount', |
||||
'sharedTotal', |
||||
'sharedLockedCount' |
||||
); |
||||
expect(res.body.projects[0]).have.keys( |
||||
'external', |
||||
'webhookCount', |
||||
'filterCount', |
||||
'sortCount', |
||||
'userCount', |
||||
'rowCount', |
||||
'tableCount', |
||||
'viewCount' |
||||
); |
||||
expect(res.body) |
||||
.to.have.nested.property('projects[0].rowCount') |
||||
.to.be.an('array'); |
||||
expect(res.body) |
||||
.to.have.nested.property('projects[0].external') |
||||
.to.be.an('boolean'); |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
export default function () { |
||||
describe('Project', projectTest); |
||||
} |
@ -0,0 +1,264 @@
|
||||
import 'mocha'; |
||||
import request from 'supertest'; |
||||
import init from '../../init'; |
||||
import { createTable, getAllTables } from '../../factory/table'; |
||||
import { createProject } from '../../factory/project'; |
||||
import { defaultColumns } from '../../factory/column'; |
||||
import Model from '../../../../src/models/Model'; |
||||
import { expect } from 'chai'; |
||||
|
||||
// Test case list
|
||||
// 1. Get table list
|
||||
// 2. Create table
|
||||
// 3. Create table with same table name
|
||||
// 4. Create table with same title
|
||||
// 5. Create table with title length more than the limit
|
||||
// 6. Create table with title having leading white space
|
||||
// 7. Update table
|
||||
// 8. Delete table
|
||||
// 9. Get table
|
||||
// 10. Reorder table
|
||||
|
||||
function tableTest() { |
||||
let context; |
||||
let project; |
||||
let table; |
||||
|
||||
beforeEach(async function () { |
||||
context = await init(); |
||||
|
||||
project = await createProject(context); |
||||
table = await createTable(context, project); |
||||
}); |
||||
|
||||
it('Get table list', async function () { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
expect(response.body.list).to.be.an('array').not.empty; |
||||
}); |
||||
|
||||
it('Create table', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'table2', |
||||
title: 'new_title_2', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(200); |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 2) { |
||||
return new Error('Tables is not be created'); |
||||
} |
||||
|
||||
if (response.body.columns.length !== defaultColumns(context)) { |
||||
return new Error('Columns not saved properly'); |
||||
} |
||||
|
||||
if ( |
||||
!( |
||||
response.body.table_name.startsWith(project.prefix) && |
||||
response.body.table_name.endsWith('table2') |
||||
) |
||||
) { |
||||
return new Error('table name not configured properly'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with no table name', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: undefined, |
||||
title: 'new_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if ( |
||||
!response.text.includes( |
||||
'Missing table name `table_name` property in request body' |
||||
) |
||||
) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
console.log(tables); |
||||
return new Error( |
||||
`Tables should not be created, tables.length:${tables.length}` |
||||
); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with same table name', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: table.table_name, |
||||
title: 'New_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if (!response.text.includes('Duplicate table name')) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with same title', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'New_table_name', |
||||
title: table.title, |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if (!response.text.includes('Duplicate table alias')) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with title length more than the limit', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'a'.repeat(256), |
||||
title: 'new_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if (!response.text.includes('Table name exceeds ')) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Create table with title having leading white space', async function () { |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/projects/${project.id}/tables`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
table_name: 'table_name_with_whitespace ', |
||||
title: 'new_title', |
||||
columns: defaultColumns(context), |
||||
}) |
||||
.expect(400); |
||||
|
||||
if ( |
||||
!response.text.includes( |
||||
'Leading or trailing whitespace not allowed in table names' |
||||
) |
||||
) { |
||||
console.error(response.text); |
||||
return new Error('Wrong api response'); |
||||
} |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
if (tables.length !== 1) { |
||||
return new Error('Tables should not be created'); |
||||
} |
||||
}); |
||||
|
||||
it('Update table', async function () { |
||||
const response = await request(context.app) |
||||
.patch(`/api/v1/db/meta/tables/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
project_id: project.id, |
||||
table_name: 'new_title', |
||||
}) |
||||
.expect(200); |
||||
const updatedTable = await Model.get(table.id); |
||||
|
||||
if (!updatedTable.table_name.endsWith('new_title')) { |
||||
return new Error('Table was not updated'); |
||||
} |
||||
}); |
||||
|
||||
it('Delete table', async function () { |
||||
const response = await request(context.app) |
||||
.delete(`/api/v1/db/meta/tables/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
const tables = await getAllTables({ project }); |
||||
|
||||
if (tables.length !== 0) { |
||||
return new Error('Table is not deleted'); |
||||
} |
||||
}); |
||||
|
||||
// todo: Check the condtion where the table being deleted is being refered by multiple tables
|
||||
// todo: Check the if views are also deleted
|
||||
|
||||
it('Get table', async function () { |
||||
const response = await request(context.app) |
||||
.get(`/api/v1/db/meta/tables/${table.id}`) |
||||
.set('xc-auth', context.token) |
||||
.send({}) |
||||
.expect(200); |
||||
|
||||
if (response.body.id !== table.id) new Error('Wrong table'); |
||||
}); |
||||
|
||||
// todo: flaky test, order condition is sometimes not met
|
||||
it('Reorder table', async function () { |
||||
const newOrder = table.order === 0 ? 1 : 0; |
||||
const response = await request(context.app) |
||||
.post(`/api/v1/db/meta/tables/${table.id}/reorder`) |
||||
.set('xc-auth', context.token) |
||||
.send({ |
||||
order: newOrder, |
||||
}) |
||||
.expect(200); |
||||
// .expect(200, async (err) => {
|
||||
// if (err) return new Error(err);
|
||||
|
||||
// const updatedTable = await Model.get(table.id);
|
||||
// console.log(Number(updatedTable.order), newOrder);
|
||||
// if (Number(updatedTable.order) !== newOrder) {
|
||||
// return new Error('Reordering failed');
|
||||
// }
|
||||
|
||||
// new Error();
|
||||
// });
|
||||
}); |
||||
} |
||||
|
||||
export default async function () { |
||||
describe('Table', tableTest); |
||||
} |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,73 @@
|
||||
{ |
||||
"extends": "../../tsconfig.json", |
||||
"compilerOptions": { |
||||
"skipLibCheck": true, |
||||
"composite": true, |
||||
"target": "es2017", |
||||
"outDir": "build/main", |
||||
"rootDir": "src", |
||||
"moduleResolution": "node", |
||||
"module": "commonjs", |
||||
"declaration": true, |
||||
"inlineSourceMap": true, |
||||
"esModuleInterop": true |
||||
/* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, |
||||
"allowJs": false, |
||||
// "strict": true /* Enable all strict type-checking options. */, |
||||
|
||||
/* Strict Type-Checking Options */ |
||||
// "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */, |
||||
// "strictNullChecks": true /* Enable strict null checks. */, |
||||
// "strictFunctionTypes": true /* Enable strict checking of function types. */, |
||||
// "strictPropertyInitialization": true /* Enable strict checking of property initialization in classes. */, |
||||
// "noImplicitThis": true /* Raise error on 'this' expressions with an implied 'any' type. */, |
||||
// "alwaysStrict": true /* Parse in strict mode and emit "use strict" for each source file. */, |
||||
"resolveJsonModule": true, |
||||
/* Additional Checks */ |
||||
"noUnusedLocals": false |
||||
/* Report errors on unused locals. */, |
||||
"noUnusedParameters": false |
||||
/* Report errors on unused parameters. */, |
||||
"noImplicitReturns": false |
||||
/* Report error when not all code paths in function return a value. */, |
||||
"noFallthroughCasesInSwitch": false |
||||
/* Report errors for fallthrough cases in switch statement. */, |
||||
/* Debugging Options */ |
||||
"traceResolution": false |
||||
/* Report module resolution log messages. */, |
||||
"listEmittedFiles": false |
||||
/* Print names of generated files part of the compilation. */, |
||||
"listFiles": false |
||||
/* Print names of files part of the compilation. */, |
||||
"pretty": true |
||||
/* Stylize errors and messages using color and context. */, |
||||
/* Experimental Options */ |
||||
// "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */, |
||||
// "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */, |
||||
|
||||
"lib": [ |
||||
"es2017" |
||||
], |
||||
"types": [ |
||||
"mocha", "node" |
||||
], |
||||
"typeRoots": [ |
||||
"node_modules/@types", |
||||
"src/types" |
||||
] |
||||
}, |
||||
"parserOptions": { |
||||
"sourceType": "module", |
||||
"tsconfigRootDir": "./", |
||||
"project": "./tsconfig.json" |
||||
}, |
||||
"include": [ |
||||
"./tests/**/**/**.ts", |
||||
"./tests/**/**.ts" |
||||
// "**/*.ts", |
||||
// "**/*.json" |
||||
], |
||||
"exclude": [ |
||||
], |
||||
"compileOnSave": false |
||||
} |
Loading…
Reference in new issue