diff --git a/packages/noco-docs/docs/020.getting-started/050.self-hosted/020.environment-variables.md b/packages/noco-docs/docs/020.getting-started/050.self-hosted/020.environment-variables.md
index e44215cc4f..aff753a97f 100644
--- a/packages/noco-docs/docs/020.getting-started/050.self-hosted/020.environment-variables.md
+++ b/packages/noco-docs/docs/020.getting-started/050.self-hosted/020.environment-variables.md
@@ -75,24 +75,34 @@ For production use cases, it is crucial to set all environment variables marked
| `NC_REDIS_URL` | Yes | Specifies the Redis URL used for caching.
Eg: `redis://:authpassword@127.0.0.1:6380/4` | Caching layer of backend |
## Product Configuration
-| Variable | Mandatory | Description | If Not Set |
-| -------- | --------- |----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------|
-| `DB_QUERY_LIMIT_DEFAULT` | No | Default pagination limit for data tables. | Defaults to `25`. Maximum is `100` |
-| `DB_QUERY_LIMIT_GROUP_BY_GROUP` | No | Number of groups per page. | Defaults to `10`. |
-| `DB_QUERY_LIMIT_GROUP_BY_RECORD` | No | Number of records per group. | Defaults to `10`. |
-| `DB_QUERY_LIMIT_MAX` | No | Maximum allowable pagination limit. | Defaults to `1000`. |
-| `DB_QUERY_LIMIT_MIN` | No | Minimum allowable pagination limit. | Defaults to `10` |
-| `NC_CONNECT_TO_EXTERNAL_DB_DISABLED` | No | Disables the ability to create bases on external databases. | |
-| `NC_INVITE_ONLY_SIGNUP` | No | Disables public signup; signup is possible only via invitations. Integrated into the [super admin settings menu](/account-settings/oss-specific-details#enable--disable-signup) as of version 0.99.0. | |
-| `NC_REQUEST_BODY_SIZE` | No | Maximum bytes allowed in the request body, based on [ExpressJS limits](https://expressjs.com/en/resources/middleware/body-parser.html#limit). | Defaults to `1048576` (1 MB). |
-| `NC_EXPORT_MAX_TIMEOUT` | No | Sets a timeout in milliseconds for downloading CSVs in batches if not completed within this period. | Defaults to `5000` (5 seconds). |
-| `NC_ALLOW_LOCAL_HOOKS` | No | Allows webhooks to call local network links, posing potential security risks. Set to `true` to enable; all other values are considered `false`. | Defaults to `false`. |
-| `NC_SANITIZE_COLUMN_NAME` | No | Enables sanitization of column names during their creation to prevent SQL injection and other security issues. | Defaults to `true`. |
-| `NC_TOOL_DIR` | No | Specifies the directory to store metadata and app-related files. In Docker setups, this maps to `/usr/app/data/` for mounting volumes. | Defaults to the current working directory. |
-| `NC_MINIMAL_DBS` | No | Creates a new SQLite file for each base. All SQLite database files are stored in the `nc_minimal_dbs` folder. Enabling this also disables base creation on external databases. | |
-| `NC_MIGRATIONS_DISABLED` | No | Disables NocoDB migrations. | |
-| `NC_DISABLE_AUDIT` | No | Disables the audit log feature. | Defaults to `false`. |
-| `NC_AUTOMATION_LOG_LEVEL` | No | Configures logging levels for automation features. Possible values: `OFF`, `ERROR`, `ALL`. More details can be found under [Webhooks](/automation/webhook/create-webhook). | Defaults to `OFF`. |
+| Variable | Mandatory | Description | If Not Set |
+| -------- | --------- |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------|
+| `DB_QUERY_LIMIT_DEFAULT` | No | Default pagination limit for data tables. | Defaults to `25`. Maximum is `100` |
+| `DB_QUERY_LIMIT_GROUP_BY_GROUP` | No | Number of groups per page. | Defaults to `10`. |
+| `DB_QUERY_LIMIT_GROUP_BY_RECORD` | No | Number of records per group. | Defaults to `10`. |
+| `DB_QUERY_LIMIT_MAX` | No | Maximum allowable pagination limit. | Defaults to `1000`. |
+| `DB_QUERY_LIMIT_MIN` | No | Minimum allowable pagination limit. | Defaults to `10` |
+| `NC_CONNECT_TO_EXTERNAL_DB_DISABLED` | No | Disables the ability to create bases on external databases. | |
+| `NC_INVITE_ONLY_SIGNUP` | No | Disables public signup; signup is possible only via invitations. Integrated into the [super admin settings menu](/account-settings/oss-specific-details#enable--disable-signup) as of version 0.99.0. | |
+| `NC_REQUEST_BODY_SIZE` | No | Maximum bytes allowed in the request body, based on [ExpressJS limits](https://expressjs.com/en/resources/middleware/body-parser.html#limit). | Defaults to `1048576` (1 MB). |
+| `NC_EXPORT_MAX_TIMEOUT` | No | Sets a timeout in milliseconds for downloading CSVs in batches if not completed within this period. | Defaults to `5000` (5 seconds). |
+| `NC_ALLOW_LOCAL_HOOKS` | No | Allows webhooks to call local network links, posing potential security risks. Set to `true` to enable; all other values are considered `false`. | Defaults to `false`. |
+| `NC_SANITIZE_COLUMN_NAME` | No | Enables sanitization of column names during their creation to prevent SQL injection and other security issues. | Defaults to `true`. |
+| `NC_TOOL_DIR` | No | Specifies the directory to store metadata and app-related files. In Docker setups, this maps to `/usr/app/data/` for mounting volumes. | Defaults to the current working directory. |
+| `NC_MINIMAL_DBS` | No | Enables the minimal database feature of NocoDB. For more details, see [Minimal Database behavior](#minimal-database). | Enabled by default for PostgreSQL when the database user has schema creation permission. Set to `false` to disable. |
+| `NC_MIGRATIONS_DISABLED` | No | Disables NocoDB migrations. | |
+| `NC_DISABLE_AUDIT` | No | Disables the audit log feature. | Defaults to `false`. |
+| `NC_AUTOMATION_LOG_LEVEL` | No | Configures logging levels for automation features. Possible values: `OFF`, `ERROR`, `ALL`. More details can be found under [Webhooks](/automation/webhook/create-webhook). | Defaults to `OFF`. |
+
+### Minimal Database
+
+This feature organizes base data into smaller, modular structures for PostgreSQL and SQLite databases:
+
+- **SQLite**: Each base's data is stored as a separate SQLite file, ensuring isolated storage.
+- **PostgreSQL**: A unique schema is created for each base, providing logical separation within the database.
+
+**Note**: For PostgreSQL, this feature is enabled by default if the user has the required permissions. To disable it, set the `NC_MINIMAL_DBS` environment variable to `false`.
+
## Logging & Monitoring
| Variable | Mandatory | Description | If Not Set |
diff --git a/packages/nocodb/src/controllers/filters.controller.ts b/packages/nocodb/src/controllers/filters.controller.ts
index 95c5f5b2b1..9e7d6a0652 100644
--- a/packages/nocodb/src/controllers/filters.controller.ts
+++ b/packages/nocodb/src/controllers/filters.controller.ts
@@ -38,7 +38,7 @@ export class FiltersController {
return new PagedResponseImpl(
await this.filtersService.filterList(context, {
viewId,
- includeAllFilters: includeAllFilters === 'true'
+ includeAllFilters: includeAllFilters === 'true',
}),
);
}
diff --git a/packages/nocodb/src/db/BaseModelSqlv2.ts b/packages/nocodb/src/db/BaseModelSqlv2.ts
index ddb1172031..dc4247f560 100644
--- a/packages/nocodb/src/db/BaseModelSqlv2.ts
+++ b/packages/nocodb/src/db/BaseModelSqlv2.ts
@@ -210,6 +210,7 @@ class BaseModelSqlv2 {
protected source: Source;
public model: Model;
public context: NcContext;
+ public schema?: string;
public static config: any = defaultLimitConfig;
@@ -222,14 +223,17 @@ class BaseModelSqlv2 {
model,
viewId,
context,
+ schema,
}: {
[key: string]: any;
model: Model;
+ schema?: string;
}) {
this._dbDriver = dbDriver;
this.model = model;
this.viewId = viewId;
this.context = context;
+ this.schema = schema;
autoBind(this);
}
@@ -4854,7 +4858,9 @@ class BaseModelSqlv2 {
public getTnPath(tb: { table_name: string } | string, alias?: string) {
const tn = typeof tb === 'string' ? tb : tb.table_name;
const schema = (this.dbDriver as any).searchPath?.();
- if (this.isMssql && schema) {
+ if (this.isPg && this.schema) {
+ return `${this.schema}.${tn}${alias ? ` as ${alias}` : ``}`;
+ } else if (this.isMssql && schema) {
return this.dbDriver.raw(`??.??${alias ? ' as ??' : ''}`, [
schema,
tn,
diff --git a/packages/nocodb/src/db/sql-client/lib/pg/PgClient.ts b/packages/nocodb/src/db/sql-client/lib/pg/PgClient.ts
index dd61695ef1..a8e4c805c3 100644
--- a/packages/nocodb/src/db/sql-client/lib/pg/PgClient.ts
+++ b/packages/nocodb/src/db/sql-client/lib/pg/PgClient.ts
@@ -493,7 +493,7 @@ class PGClient extends KnexClient {
]);
}
- const schemaName = this.connectionConfig.searchPath?.[0] || 'public';
+ const schemaName = this.getEffectiveSchema(args);
// Check schemaExists because `CREATE SCHEMA IF NOT EXISTS` requires permissions of `CREATE ON DATABASE`
const schemaExists = !!(
@@ -524,6 +524,10 @@ class PGClient extends KnexClient {
return result;
}
+ protected getEffectiveSchema(args: { schema?: string } = {}) {
+ return args?.schema || this.schema;
+ }
+
async dropDatabase(args) {
const _func = this.dropDatabase.name;
const result = new Result();
@@ -575,7 +579,11 @@ class PGClient extends KnexClient {
const exists = await this.sqlClient.raw(
`SELECT table_schema,table_name as tn, table_catalog FROM information_schema.tables where table_schema=? and
table_name = ? and table_catalog = ?`,
- [this.schema, args.tn, this.connectionConfig.connection.database],
+ [
+ this.getEffectiveSchema(args),
+ args.tn,
+ this.connectionConfig.connection.database,
+ ],
);
if (exists.rows.length === 0) {
@@ -638,7 +646,11 @@ class PGClient extends KnexClient {
try {
const { rows } = await this.sqlClient.raw(
`SELECT table_schema,table_name as tn, table_catalog FROM information_schema.tables where table_schema=? and table_name = ? and table_catalog = ?'`,
- [this.schema, args.tn, this.connectionConfig.connection.database],
+ [
+ this.getEffectiveSchema(args),
+ args.tn,
+ this.connectionConfig.connection.database,
+ ],
);
result.data.value = rows.length > 0;
} catch (e) {
@@ -716,7 +728,7 @@ class PGClient extends KnexClient {
FROM information_schema.tables
where table_schema = ?
ORDER BY table_schema, table_name`,
- [this.schema],
+ [this.getEffectiveSchema(args)],
);
result.data.list = rows.filter(
@@ -852,7 +864,7 @@ class PGClient extends KnexClient {
where c.table_catalog=:database and c.table_schema=:schema and c.table_name=:table
order by c.table_name, c.ordinal_position`,
{
- schema: this.schema,
+ schema: this.getEffectiveSchema(args),
database: args.databaseName,
table: args.tn,
},
@@ -933,7 +945,6 @@ class PGClient extends KnexClient {
return result;
}
-
/**
*
* @param {Object} - args - Input arguments
@@ -994,7 +1005,7 @@ class PGClient extends KnexClient {
and i.oid<>0
AND f.attnum > 0
ORDER BY i.relname, f.attnum;`,
- [this.schema, args.tn],
+ [this.getEffectiveSchema(args), args.tn],
);
result.data.list = rows;
} catch (e) {
@@ -1024,9 +1035,13 @@ class PGClient extends KnexClient {
const foreignKeyName = args.foreignKeyName || null;
- args.childTableWithSchema = args.childTable;
+ args.childTableWithSchema = args.schema
+ ? `${args.schema}.${args.childTable}`
+ : args.childTable;
- args.parentTableWithSchema = args.parentTable;
+ args.parentTableWithSchema = args.schema
+ ? `${args.schema}.${args.parentTable}`
+ : args.parentTable;
try {
// const self = this;
@@ -1185,7 +1200,7 @@ class PGClient extends KnexClient {
on pc.conname = tc.constraint_name
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_schema=:schema and tc.table_name=:table
order by tc.table_name;`,
- { schema: this.schema, table: args.tn },
+ { schema: this.getEffectiveSchema(args), table: args.tn },
);
const ruleMapping = {
@@ -1212,6 +1227,74 @@ class PGClient extends KnexClient {
return result;
}
+ /**
+ *
+ * @param {Object} - args
+ * @param {String} - args.parentTable
+ * @param {String} - args.parentColumn
+ * @param {String} - args.childColumn
+ * @param {String} - args.childTable
+ * @returns {Promise<{upStatement, downStatement}>}
+ */
+ async relationCreate(args) {
+ const _func = this.relationCreate.name;
+ const result = new Result();
+ log.api(`${_func}:args:`, args);
+
+ const foreignKeyName = args.foreignKeyName || null;
+
+ args.childTableWithSchema = args.schema
+ ? `${args.schema}.${args.childTable}`
+ : args.childTable;
+
+ args.parentTableWithSchema = args.schema
+ ? `${args.schema}.${args.parentTable}`
+ : args.parentTable;
+
+ try {
+ const upQb = this.sqlClient.schema.table(
+ args.childTableWithSchema,
+ function (table) {
+ table = table
+ .foreign(args.childColumn, foreignKeyName)
+ .references(args.parentColumn)
+ .on(args.parentTableWithSchema);
+
+ if (args.onUpdate) {
+ table = table.onUpdate(args.onUpdate);
+ }
+ if (args.onDelete) {
+ table.onDelete(args.onDelete);
+ }
+ },
+ );
+
+ await this.sqlClient.raw(upQb.toQuery());
+
+ const upStatement = this.querySeparator() + upQb.toQuery();
+
+ this.emit(`Success : ${upStatement}`);
+
+ const downStatement =
+ this.querySeparator() +
+ this.sqlClient.schema
+ .table(args.childTableWithSchema, function (table) {
+ table.dropForeign(args.childColumn, foreignKeyName);
+ })
+ .toQuery();
+
+ result.data.object = {
+ upStatement: [{ sql: upStatement }],
+ downStatement: [{ sql: downStatement }],
+ };
+ } catch (e) {
+ log.ppe(e, _func);
+ throw e;
+ }
+
+ return result;
+ }
+
/**
*
* @param {Object} - args - Input arguments
@@ -1256,7 +1339,7 @@ class PGClient extends KnexClient {
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_schema = ?
order by tc.table_name;`,
- [this.schema],
+ [this.getEffectiveSchema(args)],
);
const ruleMapping = {
@@ -1309,7 +1392,7 @@ class PGClient extends KnexClient {
const { rows } = await this.sqlClient.raw(
`select * from information_schema.triggers where trigger_schema=? and event_object_table=?`,
- [this.schema, args.tn],
+ [this.getEffectiveSchema(args), args.tn],
);
for (let i = 0; i < rows.length; ++i) {
@@ -1353,13 +1436,13 @@ class PGClient extends KnexClient {
try {
args.databaseName = this.connectionConfig.connection.database;
- const { rows } = await this.raw(
+ const { rows } = await this.sqlClient.raw(
`SELECT *
FROM pg_catalog.pg_namespace n
JOIN pg_catalog.pg_proc p
ON pronamespace = n.oid
WHERE nspname = ?;`,
- [this.schema],
+ [this.getEffectiveSchema(args)],
);
const functionRows = [];
for (let i = 0; i < rows.length; ++i) {
@@ -1408,13 +1491,13 @@ class PGClient extends KnexClient {
try {
args.databaseName = this.connectionConfig.connection.database;
- const { rows } = await this.raw(
+ const { rows } = await this.sqlClient.raw(
`SELECT *
FROM pg_catalog.pg_namespace n
JOIN pg_catalog.pg_proc p
ON pronamespace = n.oid
WHERE nspname = ?;`,
- [this.schema],
+ [this.getEffectiveSchema(args)],
);
const procedureRows = [];
for (let i = 0; i < rows.length; ++i) {
@@ -1456,7 +1539,7 @@ class PGClient extends KnexClient {
`select *
from INFORMATION_SCHEMA.views
WHERE table_schema = ?;`,
- [this.schema],
+ [this.getEffectiveSchema(args)],
);
for (let i = 0; i < rows.length; ++i) {
@@ -1494,7 +1577,7 @@ class PGClient extends KnexClient {
`SELECT format('%I.%I(%s)', ns.nspname, p.proname, oidvectortypes(p.proargtypes)) as function_declaration, pg_get_functiondef(p.oid) as create_function
FROM pg_proc p INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
WHERE ns.nspname = ? and p.proname = ?;`,
- [this.schema, args.function_name],
+ [this.getEffectiveSchema(args), args.function_name],
);
// log.debug(response);
@@ -2229,8 +2312,12 @@ class PGClient extends KnexClient {
for (let i = 0; i < args.columns.length; i++) {
const column = args.columns[i];
if (column.au) {
- const triggerFnName = `xc_au_${args.tn}_${column.cn}`;
- const triggerName = `xc_trigger_${args.tn}_${column.cn}`;
+ const triggerFnName = args.schema
+ ? `xc_au_${args.schema}_${args.tn}_${column.cn}`
+ : `xc_au_${args.tn}_${column.cn}`;
+ const triggerName = args.schema
+ ? `xc_trigger_${args.schema}_${args.tn}_${column.cn}`
+ : `xc_trigger_${args.tn}_${column.cn}`;
const triggerFnQuery = this.genQuery(
`CREATE OR REPLACE FUNCTION ??()
@@ -2252,14 +2339,18 @@ class PGClient extends KnexClient {
BEFORE UPDATE ON ??
FOR EACH ROW
EXECUTE PROCEDURE ??();`,
- [triggerName, args.tn, triggerFnName],
+ [
+ triggerName,
+ args.schema ? `${args.schema}.${args.tn}` : args.tn,
+ triggerFnName,
+ ],
);
downQuery +=
this.querySeparator() +
this.genQuery(`DROP TRIGGER IF EXISTS ?? ON ??;`, [
triggerName,
- args.tn,
+ args.schema ? `${args.schema}.${args.tn}` : args.tn,
]) +
this.querySeparator() +
this.genQuery(`DROP FUNCTION IF EXISTS ??()`, [triggerFnName]);
@@ -2280,8 +2371,12 @@ class PGClient extends KnexClient {
for (let i = 0; i < args.columns.length; i++) {
const column = args.columns[i];
if (column.au && column.altered === 1) {
- const triggerFnName = `xc_au_${args.tn}_${column.cn}`;
- const triggerName = `xc_trigger_${args.tn}_${column.cn}`;
+ const triggerFnName = args.schema
+ ? `xc_au_${args.schema}_${args.tn}_${column.cn}`
+ : `xc_au_${args.tn}_${column.cn}`;
+ const triggerName = args.schema
+ ? `xc_trigger_${args.schema}_${args.tn}_${column.cn}`
+ : `xc_trigger_${args.tn}_${column.cn}`;
const triggerFnQuery = this.genQuery(
`CREATE OR REPLACE FUNCTION ??()
@@ -2303,7 +2398,11 @@ class PGClient extends KnexClient {
BEFORE UPDATE ON ??
FOR EACH ROW
EXECUTE PROCEDURE ??();`,
- [triggerName, args.tn, triggerFnName],
+ [
+ triggerName,
+ args.schema ? `${args.schema}.${args.tn}` : args.tn,
+ triggerFnName,
+ ],
);
downQuery +=
@@ -2356,7 +2455,7 @@ class PGClient extends KnexClient {
log.api(`${_func}:args:`, args);
try {
- args.table = args.tn;
+ args.table = args.schema ? `${args.schema}.${args.tn}` : args.tn;
const originalColumns = args.originalColumns;
args.connectionConfig = this._connectionConfig;
args.sqlClient = this.sqlClient;
@@ -2477,7 +2576,9 @@ class PGClient extends KnexClient {
/** ************** create up & down statements *************** */
const upStatement =
this.querySeparator() +
- this.sqlClient.schema.dropTable(args.tn).toString();
+ this.sqlClient.schema
+ .dropTable(args.schema ? `${args.schema}.${args.tn}` : args.tn)
+ .toString();
let downQuery = this.createTable(args.tn, args);
/**
@@ -2563,7 +2664,9 @@ class PGClient extends KnexClient {
/** ************** drop tn *************** */
await this.sqlClient.raw(
- this.sqlClient.schema.dropTable(args.tn).toQuery(),
+ this.sqlClient.schema
+ .dropTable(args.schema ? `${args.schema}.${args.tn}` : args.tn)
+ .toQuery(),
);
/** ************** return files *************** */
@@ -2838,7 +2941,9 @@ class PGClient extends KnexClient {
query += this.alterTablePK(table, args.columns, [], query, true);
- query = this.genQuery(`CREATE TABLE ?? (${query});`, [args.tn]);
+ query = this.genQuery(`CREATE TABLE ?? (${query});`, [
+ args.schema ? `${args.schema}.${args.tn}` : args.tn,
+ ]);
return query;
}
@@ -2954,12 +3059,7 @@ class PGClient extends KnexClient {
}
get schema() {
- return (
- (this.connectionConfig &&
- this.connectionConfig.searchPath &&
- this.connectionConfig.searchPath[0]) ||
- 'public'
- );
+ return this.connectionConfig?.searchPath?.[0] || 'public';
}
/**
@@ -3025,7 +3125,10 @@ class PGClient extends KnexClient {
await this.sqlClient.raw(
this.sqlClient.schema
.renameTable(
- this.sqlClient.raw('??.??', [this.schema, args.tn_old]),
+ this.sqlClient.raw('??.??', [
+ this.getEffectiveSchema(args),
+ args.tn_old,
+ ]),
args.tn,
)
.toQuery(),
@@ -3036,7 +3139,10 @@ class PGClient extends KnexClient {
this.querySeparator() +
this.sqlClient.schema
.renameTable(
- this.sqlClient.raw('??.??', [this.schema, args.tn]),
+ this.sqlClient.raw('??.??', [
+ this.getEffectiveSchema(args),
+ args.tn,
+ ]),
args.tn_old,
)
.toQuery();
@@ -3047,7 +3153,10 @@ class PGClient extends KnexClient {
this.querySeparator() +
this.sqlClient.schema
.renameTable(
- this.sqlClient.raw('??.??', [this.schema, args.tn_old]),
+ this.sqlClient.raw('??.??', [
+ this.getEffectiveSchema(args),
+ args.tn_old,
+ ]),
args.tn,
)
.toQuery();
@@ -3064,6 +3173,151 @@ class PGClient extends KnexClient {
return result;
}
+
+ /**
+ *
+ * @param {Object} - args
+ * @param {String} - args.tn
+ * @param {String} - args.indexName
+ * @param {String} - args.non_unique
+ * @param {String[]} - args.columns
+ * @returns {Promise<{upStatement, downStatement}>}
+ */
+ async indexCreate(args) {
+ const _func = this.indexCreate.name;
+ const result = new Result();
+ log.api(`${_func}:args:`, args);
+
+ const indexName = args.indexName || null;
+
+ try {
+ args.table = args.schema ? `${args.schema}.${args.tn}` : args.tn;
+
+ // s = await this.sqlClient.schema.index(Object.keys(args.columns));
+ await this.sqlClient.raw(
+ this.sqlClient.schema
+ .table(args.table, function (table) {
+ if (args.non_unique) {
+ table.index(args.columns, indexName);
+ } else {
+ table.unique(args.columns, indexName);
+ }
+ })
+ .toQuery(),
+ );
+
+ const upStatement =
+ this.querySeparator() +
+ this.sqlClient.schema
+ .table(args.table, function (table) {
+ if (args.non_unique) {
+ table.index(args.columns, indexName);
+ } else {
+ table.unique(args.columns, indexName);
+ }
+ })
+ .toQuery();
+
+ this.emit(`Success : ${upStatement}`);
+
+ const downStatement =
+ this.querySeparator() +
+ this.sqlClient.schema
+ .table(args.table, function (table) {
+ if (args.non_unique) {
+ table.dropIndex(args.columns, indexName);
+ } else {
+ table.dropUnique(args.columns, indexName);
+ }
+ })
+ .toQuery();
+
+ result.data.object = {
+ upStatement: [{ sql: upStatement }],
+ downStatement: [{ sql: downStatement }],
+ };
+
+ // result.data.object = {
+ // upStatement,
+ // downStatement
+ // };
+ } catch (e) {
+ log.ppe(e, _func);
+ throw e;
+ }
+
+ return result;
+ }
+
+ /**
+ *
+ * @param {Object} - args
+ * @param {String} - args.tn
+ * @param {String[]} - args.columns
+ * @param {String} - args.indexName
+ * @param {String} - args.non_unique
+ * @returns {Promise<{upStatement, downStatement}>}
+ */
+ async indexDelete(args) {
+ const _func = this.indexDelete.name;
+ const result = new Result();
+ log.api(`${_func}:args:`, args);
+
+ const indexName = args.indexName || null;
+
+ try {
+ args.table = args.schema ? `${args.schema}.${args.tn}` : args.tn;
+
+ // s = await this.sqlClient.schema.index(Object.keys(args.columns));
+ await this.sqlClient.raw(
+ this.sqlClient.schema
+ .table(args.table, function (table) {
+ if (args.non_unique_original) {
+ table.dropIndex(args.columns, indexName);
+ } else {
+ table.dropUnique(args.columns, indexName);
+ }
+ })
+ .toQuery(),
+ );
+
+ const upStatement =
+ this.querySeparator() +
+ this.sqlClient.schema
+ .table(args.table, function (table) {
+ if (args.non_unique_original) {
+ table.dropIndex(args.columns, indexName);
+ } else {
+ table.dropUnique(args.columns, indexName);
+ }
+ })
+ .toQuery();
+
+ this.emit(`Success : ${upStatement}`);
+
+ const downStatement =
+ this.querySeparator() +
+ this.sqlClient.schema
+ .table(args.table, function (table) {
+ if (args.non_unique_original) {
+ table.index(args.columns, indexName);
+ } else {
+ table.unique(args.columns, indexName);
+ }
+ })
+ .toQuery();
+
+ result.data.object = {
+ upStatement: [{ sql: upStatement }],
+ downStatement: [{ sql: downStatement }],
+ };
+ } catch (e) {
+ log.ppe(e, _func);
+ throw e;
+ }
+
+ return result;
+ }
}
export default PGClient;
diff --git a/packages/nocodb/src/helpers/initBaseBehaviour.ts b/packages/nocodb/src/helpers/initBaseBehaviour.ts
new file mode 100644
index 0000000000..8117f61495
--- /dev/null
+++ b/packages/nocodb/src/helpers/initBaseBehaviour.ts
@@ -0,0 +1,57 @@
+import { Logger } from '@nestjs/common';
+import { Knex } from 'knex';
+import PgConnectionConfig = Knex.PgConnectionConfig;
+import NcConnectionMgrv2 from '~/utils/common/NcConnectionMgrv2';
+import CustomKnex from '~/db/CustomKnex';
+const logger = new Logger('initBaseBehavior');
+
+export async function initBaseBehavior() {
+ const dataConfig = await NcConnectionMgrv2.getDataConfig();
+
+ // return if client is not postgres
+ if (dataConfig.client !== 'pg') {
+ return;
+ }
+
+ // if NC_MINIMAL_DBS already exists, return
+ if (process.env.NC_MINIMAL_DBS === 'false') {
+ return;
+ }
+
+ let tempConnection: Knex | undefined;
+
+ try {
+ tempConnection = CustomKnex(dataConfig);
+
+ // check if database user have permission to create new schema
+ const schemaCreateAllowed = await tempConnection.raw(
+ "SELECT has_database_privilege(:user, :database, 'CREATE') as has_database_privilege",
+ {
+ database: (dataConfig.connection as PgConnectionConfig).database,
+ user: (dataConfig.connection as PgConnectionConfig).user,
+ },
+ );
+
+ // if schema creation is not allowed, return
+ if (!schemaCreateAllowed.rows[0]?.has_database_privilege) {
+ // set NC_MINIMAL_DBS to false if it's set to true and log warning
+ if (process.env.NC_MINIMAL_DBS === 'true') {
+ process.env.NC_MINIMAL_DBS = 'false';
+ }
+ logger.warn(
+ `User ${(dataConfig.connection as PgConnectionConfig)?.user} does not have permission to create schema, minimal databases feature will be disabled`,
+ );
+ return;
+ }
+
+ // set NC_MINIMAL_DBS to true
+ process.env.NC_MINIMAL_DBS = 'true';
+ } catch (error) {
+ logger.warn(
+ `Error while checking schema creation permission: ${error.message}`,
+ );
+ } finally {
+ // close the connection since it's only used to verify permission
+ await tempConnection?.destroy();
+ }
+}
diff --git a/packages/nocodb/src/meta/migrations/XcMigrationSourcev2.ts b/packages/nocodb/src/meta/migrations/XcMigrationSourcev2.ts
index 733f7d64e3..f5f51c0185 100644
--- a/packages/nocodb/src/meta/migrations/XcMigrationSourcev2.ts
+++ b/packages/nocodb/src/meta/migrations/XcMigrationSourcev2.ts
@@ -50,6 +50,7 @@ import * as nc_060_descriptions from '~/meta/migrations/v2/nc_060_descriptions';
import * as nc_061_integration_is_default from '~/meta/migrations/v2/nc_061_integration_is_default';
import * as nc_062_integration_store from '~/meta/migrations/v2/nc_062_integration_store';
import * as nc_063_form_field_filter from '~/meta/migrations/v2/nc_063_form_field_filter';
+import * as nc_064_pg_minimal_dbs from '~/meta/migrations/v2/nc_064_pg_minimal_dbs';
// Create a custom migration source class
export default class XcMigrationSourcev2 {
@@ -111,6 +112,7 @@ export default class XcMigrationSourcev2 {
'nc_061_integration_is_default',
'nc_062_integration_store',
'nc_063_form_field_filter',
+ 'nc_064_pg_minimal_dbs',
]);
}
@@ -224,6 +226,8 @@ export default class XcMigrationSourcev2 {
return nc_062_integration_store;
case 'nc_063_form_field_filter':
return nc_063_form_field_filter;
+ case 'nc_064_pg_minimal_dbs':
+ return nc_064_pg_minimal_dbs;
}
}
}
diff --git a/packages/nocodb/src/meta/migrations/v2/nc_064_pg_minimal_dbs.ts b/packages/nocodb/src/meta/migrations/v2/nc_064_pg_minimal_dbs.ts
new file mode 100644
index 0000000000..4a97d9561f
--- /dev/null
+++ b/packages/nocodb/src/meta/migrations/v2/nc_064_pg_minimal_dbs.ts
@@ -0,0 +1,16 @@
+import type { Knex } from 'knex';
+import { MetaTable } from '~/utils/globals';
+
+const up = async (knex: Knex) => {
+ await knex.schema.alterTable(MetaTable.BASES, (table) => {
+ table.boolean('is_local').defaultTo(false);
+ });
+};
+
+const down = async (knex: Knex) => {
+ await knex.schema.alterTable(MetaTable.BASES, (table) => {
+ table.dropColumn('is_local');
+ });
+};
+
+export { up, down };
diff --git a/packages/nocodb/src/models/Model.ts b/packages/nocodb/src/models/Model.ts
index 307185cfec..21eee81600 100644
--- a/packages/nocodb/src/models/Model.ts
+++ b/packages/nocodb/src/models/Model.ts
@@ -34,6 +34,7 @@ import {
prepareForDb,
prepareForResponse,
} from '~/utils/modelUtils';
+import { Source } from '~/models';
const logger = new Logger('Model');
@@ -471,21 +472,33 @@ export default class Model implements TableType {
dbDriver: XKnex;
model?: Model;
extractDefaultView?: boolean;
+ source?: Source;
},
ncMeta = Noco.ncMeta,
): Promise {
const model = args?.model || (await this.get(context, args.id, ncMeta));
+ const source =
+ args.source ||
+ (await Source.get(context, model.source_id, false, ncMeta));
if (!args?.viewId && args.extractDefaultView) {
const view = await View.getDefaultView(context, model.id, ncMeta);
args.viewId = view.id;
}
+ let schema: string;
+
+ if (source?.isMeta(true, 1)) {
+ schema = source.getConfig()?.schema;
+ } else if (source?.type === 'pg') {
+ schema = source.getConfig()?.searchPath?.[0];
+ }
return new BaseModelSqlv2({
context,
dbDriver: args.dbDriver,
viewId: args.viewId,
model,
+ schema,
});
}
diff --git a/packages/nocodb/src/models/Source.ts b/packages/nocodb/src/models/Source.ts
index 0fb43f378a..8714eb50fe 100644
--- a/packages/nocodb/src/models/Source.ts
+++ b/packages/nocodb/src/models/Source.ts
@@ -34,6 +34,7 @@ export default class Source implements SourceType {
alias?: string;
type?: DriverClient;
is_meta?: BoolType;
+ is_local?: BoolType;
is_schema_readonly?: BoolType;
is_data_readonly?: BoolType;
config?: string;
@@ -71,6 +72,7 @@ export default class Source implements SourceType {
'config',
'type',
'is_meta',
+ 'is_local',
'inflection_column',
'inflection_table',
'order',
@@ -131,6 +133,7 @@ export default class Source implements SourceType {
'config',
'type',
'is_meta',
+ 'is_local',
'inflection_column',
'inflection_table',
'order',
@@ -297,6 +300,15 @@ export default class Source implements SourceType {
}
public async getConnectionConfig(): Promise {
+ if (this.is_meta || this.is_local) {
+ const metaConfig = await NcConnectionMgrv2.getDataConfig();
+ const config = { ...metaConfig };
+ if (config.client === 'sqlite3') {
+ config.connection = metaConfig;
+ }
+ return config;
+ }
+
const config = this.getConfig();
// todo: update sql-client args
@@ -307,7 +319,6 @@ export default class Source implements SourceType {
return config;
}
-
public getConfig(skipIntegrationConfig = false): any {
if (this.is_meta) {
const metaConfig = Noco.getConfig()?.meta?.db;
@@ -556,9 +567,9 @@ export default class Source implements SourceType {
if (_mode === 0) {
return this.is_meta;
}
- return false;
+ return this.is_local;
} else {
- return this.is_meta;
+ return this.is_meta || this.is_local;
}
}
diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/at-import.processor.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/at-import.processor.ts
index c79cb42792..d3a9e1acdd 100644
--- a/packages/nocodb/src/modules/jobs/jobs/at-import/at-import.processor.ts
+++ b/packages/nocodb/src/modules/jobs/jobs/at-import/at-import.processor.ts
@@ -466,12 +466,7 @@ export class AtImportProcessor {
(value as any).name = 'nc_empty';
}
// skip duplicates (we don't allow them)
- if (
- options.find(
- (el) =>
- el.title === (value as any).name,
- )
- ) {
+ if (options.find((el) => el.title === (value as any).name)) {
logWarning(
`Duplicate select option found: ${col.name} :: ${
(value as any).name
diff --git a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/fetchAT.ts b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/fetchAT.ts
index e685c511cf..e3dcad68f4 100644
--- a/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/fetchAT.ts
+++ b/packages/nocodb/src/modules/jobs/jobs/at-import/helpers/fetchAT.ts
@@ -243,7 +243,6 @@ async function readView(viewId) {
resolve(fullObject);
});
});
-
if (data?.data) {
return { view: data.data };
diff --git a/packages/nocodb/src/providers/init-meta-service.provider.ts b/packages/nocodb/src/providers/init-meta-service.provider.ts
index 241bd75529..3cb91d0068 100644
--- a/packages/nocodb/src/providers/init-meta-service.provider.ts
+++ b/packages/nocodb/src/providers/init-meta-service.provider.ts
@@ -13,6 +13,7 @@ import { User } from '~/models';
import { NcConfig, prepareEnv } from '~/utils/nc-config';
import { MetaTable, RootScopes } from '~/utils/globals';
import { updateMigrationJobsState } from '~/helpers/migrationJobs';
+import { initBaseBehavior } from '~/helpers/initBaseBehaviour';
export const InitMetaServiceProvider: FactoryProvider = {
// initialize app,
@@ -83,6 +84,9 @@ export const InitMetaServiceProvider: FactoryProvider = {
Noco.config = config;
Noco.eventEmitter = eventEmitter;
+ // decide base behavior based on env and database permissions
+ await initBaseBehavior();
+
if (!instanceConfig) {
// bump to latest version for fresh install
await updateMigrationJobsState({
diff --git a/packages/nocodb/src/services/bases.service.ts b/packages/nocodb/src/services/bases.service.ts
index ec904f3679..61137c2a24 100644
--- a/packages/nocodb/src/services/bases.service.ts
+++ b/packages/nocodb/src/services/bases.service.ts
@@ -163,38 +163,56 @@ export class BasesService {
baseBody.prefix = `nc_${ranId}__`;
baseBody.is_meta = true;
if (process.env.NC_MINIMAL_DBS === 'true') {
- // if env variable NC_MINIMAL_DBS is set, then create a SQLite file/connection for each base
- // each file will be named as nc_.db
- const fs = require('fs');
- const toolDir = getToolDir();
- const nanoidv2 = customAlphabet(
- '1234567890abcdefghijklmnopqrstuvwxyz',
- 14,
- );
- if (!(await promisify(fs.exists)(`${toolDir}/nc_minimal_dbs`))) {
- await promisify(fs.mkdir)(`${toolDir}/nc_minimal_dbs`);
- }
- const dbId = nanoidv2();
- const baseTitle = DOMPurify.sanitize(baseBody.title);
- baseBody.prefix = '';
- baseBody.sources = [
- {
- type: 'sqlite3',
- is_meta: false,
- config: {
- client: 'sqlite3',
- connection: {
+ const dataConfig = await Noco.getConfig()?.meta?.db;
+ if (dataConfig?.client === 'pg') {
+ baseBody.prefix = '';
+ baseBody.sources = [
+ {
+ type: 'pg',
+ is_local: true,
+ is_meta: false,
+ config: {
+ schema: baseId,
+ },
+ inflection_column: 'camelize',
+ inflection_table: 'camelize',
+ },
+ ];
+ } else {
+ // if env variable NC_MINIMAL_DBS is set, then create a SQLite file/connection for each base
+ // each file will be named as nc_.db
+ const fs = require('fs');
+ const toolDir = getToolDir();
+ const nanoidv2 = customAlphabet(
+ '1234567890abcdefghijklmnopqrstuvwxyz',
+ 14,
+ );
+ if (!(await promisify(fs.exists)(`${toolDir}/nc_minimal_dbs`))) {
+ await promisify(fs.mkdir)(`${toolDir}/nc_minimal_dbs`);
+ }
+ const dbId = nanoidv2();
+ const baseTitle = DOMPurify.sanitize(baseBody.title);
+ baseBody.prefix = '';
+ baseBody.sources = [
+ {
+ type: 'sqlite3',
+ is_meta: false,
+ is_local: true,
+ config: {
client: 'sqlite3',
- database: baseTitle,
connection: {
- filename: `${toolDir}/nc_minimal_dbs/${baseTitle}_${dbId}.db`,
+ client: 'sqlite3',
+ database: baseTitle,
+ connection: {
+ filename: `${toolDir}/nc_minimal_dbs/${baseTitle}_${dbId}.db`,
+ },
},
},
+ inflection_column: 'camelize',
+ inflection_table: 'camelize',
},
- inflection_column: 'camelize',
- inflection_table: 'camelize',
- },
- ];
+ ];
+ }
} else {
const db = Noco.getConfig().meta?.db;
baseBody.sources = [
diff --git a/packages/nocodb/src/services/data-alias-nested.service.ts b/packages/nocodb/src/services/data-alias-nested.service.ts
index 1537f6b03e..900fe3fb0a 100644
--- a/packages/nocodb/src/services/data-alias-nested.service.ts
+++ b/packages/nocodb/src/services/data-alias-nested.service.ts
@@ -32,6 +32,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
@@ -80,6 +81,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
@@ -123,6 +125,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
@@ -166,6 +169,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
@@ -208,6 +212,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
@@ -253,6 +258,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
@@ -300,6 +306,7 @@ export class DataAliasNestedService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const column = await getColumnByIdOrName(context, param.columnName, model);
diff --git a/packages/nocodb/src/services/data-table.service.ts b/packages/nocodb/src/services/data-table.service.ts
index 1671bfda41..7229885660 100644
--- a/packages/nocodb/src/services/data-table.service.ts
+++ b/packages/nocodb/src/services/data-table.service.ts
@@ -52,6 +52,7 @@ export class DataTableService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const row = await baseModel.readByPk(param.rowId, false, param.query, {
@@ -82,6 +83,7 @@ export class DataTableService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
if (view.type !== ViewTypes.GRID) {
diff --git a/packages/nocodb/src/services/datas.service.ts b/packages/nocodb/src/services/datas.service.ts
index 0894a1c05a..1d6b1f69d2 100644
--- a/packages/nocodb/src/services/datas.service.ts
+++ b/packages/nocodb/src/services/datas.service.ts
@@ -96,6 +96,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const countArgs: any = { ...param.query, throwErrorIfInvalidParams: true };
@@ -124,6 +125,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
return await baseModel.nestedInsert(param.body, null, param.cookie);
@@ -145,6 +147,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
return await baseModel.updateByPk(
@@ -166,6 +169,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
// if xcdb base skip checking for LTAR
@@ -209,6 +213,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
}));
const { ast, dependencyFields } = await getAst(context, {
@@ -273,6 +278,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const args: any = { ...query };
@@ -305,6 +311,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const listArgs: any = { ...query };
@@ -342,6 +349,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const row = await baseModel.readByPk(param.rowId, false, param.query, {
getHiddenColumn: param.getHiddenColumn,
@@ -366,6 +374,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
return await baseModel.exist(param.rowId);
@@ -403,6 +412,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { ast, dependencyFields } = await getAst(context, {
@@ -487,6 +497,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = `${model.title}List`;
@@ -551,6 +562,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = 'List';
@@ -615,6 +627,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = 'List';
@@ -679,6 +692,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = 'List';
@@ -743,6 +757,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = `${model.title}List`;
@@ -797,6 +812,7 @@ export class DatasService {
const baseModel = await Model.getBaseModelSQL(context, {
id: model.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { ast, dependencyFields } = await getAst(context, {
@@ -830,6 +846,7 @@ export class DatasService {
const baseModel = await Model.getBaseModelSQL(context, {
id: model.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
return await baseModel.insert(param.body, null, param.cookie);
@@ -854,6 +871,7 @@ export class DatasService {
const baseModel = await Model.getBaseModelSQL(context, {
id: model.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
return await baseModel.updateByPk(
@@ -882,6 +900,7 @@ export class DatasService {
const baseModel = await Model.getBaseModelSQL(context, {
id: model.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
return await baseModel.delByPk(param.rowId, null, param.cookie);
@@ -911,6 +930,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
await baseModel.removeChild({
@@ -947,6 +967,7 @@ export class DatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
await baseModel.addChild({
@@ -1010,6 +1031,7 @@ export class DatasService {
id: view.model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { offset, dbRows, elapsed } = await getDbRows(context, {
@@ -1048,6 +1070,7 @@ export class DatasService {
id: view.model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { offset, dbRows, elapsed } = await getDbRows(context, {
diff --git a/packages/nocodb/src/services/public-datas.service.ts b/packages/nocodb/src/services/public-datas.service.ts
index 0aba33ef4b..d74d00ffe4 100644
--- a/packages/nocodb/src/services/public-datas.service.ts
+++ b/packages/nocodb/src/services/public-datas.service.ts
@@ -65,6 +65,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { ast, dependencyFields } = await getAst(context, {
@@ -129,6 +130,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const listArgs: any = { ...param.query };
@@ -198,6 +200,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { ast } = await getAst(context, { model, query: param.query, view });
@@ -293,6 +296,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const listArgs: any = { ...query };
@@ -350,6 +354,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
await view.getViewWithInfo(context);
@@ -470,6 +475,7 @@ export class PublicDatasService {
id: model.id,
viewId: colOptions.fk_target_view_id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const { ast, dependencyFields } = await getAst(context, {
@@ -558,6 +564,7 @@ export class PublicDatasService {
id: view.fk_model_id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = `List`;
@@ -637,6 +644,7 @@ export class PublicDatasService {
id: view.fk_model_id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const key = `List`;
@@ -711,6 +719,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const row = await baseModel.readByPk(rowId, false, query);
@@ -813,6 +822,7 @@ export class PublicDatasService {
id: model.id,
viewId: view?.id,
dbDriver: await NcConnectionMgrv2.get(source),
+ source,
});
const listArgs: any = { ...param.query };
diff --git a/packages/nocodb/src/utils/common/NcConnectionMgrv2.ts b/packages/nocodb/src/utils/common/NcConnectionMgrv2.ts
index 5afba858c3..d030fc2c4a 100644
--- a/packages/nocodb/src/utils/common/NcConnectionMgrv2.ts
+++ b/packages/nocodb/src/utils/common/NcConnectionMgrv2.ts
@@ -111,4 +111,8 @@ export default class NcConnectionMgrv2 {
...(await source.getConnectionConfig()),
});
}
+
+ public static async getDataConfig?() {
+ return Noco.getConfig()?.meta?.db;
+ }
}