mirror of https://github.com/nocodb/nocodb
Pranav C
2 months ago
committed by
GitHub
69 changed files with 2527 additions and 588 deletions
@ -0,0 +1,62 @@
|
||||
name: "NestJS Unit Test" |
||||
on: |
||||
push: |
||||
branches: [develop] |
||||
paths: |
||||
- "packages/nocodb/**" |
||||
- ".github/workflows/jest-unit-test.yml" |
||||
pull_request: |
||||
types: [opened, reopened, synchronize, ready_for_review, labeled] |
||||
branches: [develop] |
||||
paths: |
||||
- "packages/nocodb/**" |
||||
- ".github/workflows/jest-unit-test.yml" |
||||
workflow_call: |
||||
# Triggered manually |
||||
workflow_dispatch: |
||||
jobs: |
||||
jest-unit-test: |
||||
runs-on: [self-hosted, aws] |
||||
timeout-minutes: 20 |
||||
if: ${{ github.event_name == 'push' || contains(github.event.pull_request.labels.*.name, 'trigger-CI') || !github.event.pull_request.draft || inputs.force == true }} |
||||
steps: |
||||
- name: Checkout |
||||
uses: actions/checkout@v3 |
||||
with: |
||||
fetch-depth: 0 |
||||
- name: Setup Node |
||||
uses: actions/setup-node@v3 |
||||
with: |
||||
node-version: 18.19.1 |
||||
- name: Setup pnpm |
||||
uses: pnpm/action-setup@v4 |
||||
with: |
||||
version: 8 |
||||
- name: Get pnpm store directory |
||||
shell: bash |
||||
timeout-minutes: 1 |
||||
run: | |
||||
echo "STORE_PATH=/root/setup-pnpm/node_modules/.bin/store/v3" >> $GITHUB_ENV |
||||
- uses: actions/cache@v3 |
||||
name: Setup pnpm cache |
||||
with: |
||||
path: ${{ env.STORE_PATH }} |
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-pnpm-store- |
||||
- name: Set CI env |
||||
run: export CI=true |
||||
- name: Set NC Edition |
||||
run: export EE=true |
||||
- name: remove use-node-version line from .npmrc |
||||
run: sed -i '/^use-node-version/d' .npmrc |
||||
- name: install dependencies |
||||
run: pnpm bootstrap |
||||
- name: build nocodb-sdk |
||||
working-directory: ./packages/nocodb-sdk |
||||
run: | |
||||
pnpm run generate:sdk |
||||
pnpm run build:main |
||||
- name: run unit tests |
||||
working-directory: ./packages/nocodb |
||||
run: pnpm run test |
@ -0,0 +1,156 @@
|
||||
name: "Release : Secret CLI Executables" |
||||
|
||||
on: |
||||
# Triggered manually |
||||
workflow_dispatch: |
||||
inputs: |
||||
tag: |
||||
description: "Tag name" |
||||
required: true |
||||
secrets: |
||||
NC_GITHUB_TOKEN: |
||||
required: true |
||||
jobs: |
||||
build-and-publish: |
||||
runs-on: [self-hosted, aws] |
||||
steps: |
||||
- uses: actions/checkout@v3 |
||||
- name: Cache node modules |
||||
id: cache-npm |
||||
uses: actions/cache@v3 |
||||
env: |
||||
cache-name: cache-node-modules |
||||
with: |
||||
# npm cache files are stored in `~/.npm` on Linux/macOS |
||||
path: ~/.npm |
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-build-${{ env.cache-name }}- |
||||
${{ runner.os }}-build- |
||||
${{ runner.os }}- |
||||
|
||||
- name: Cache pkg modules |
||||
id: cache-pkg |
||||
uses: actions/cache@v3 |
||||
env: |
||||
cache-name: cache-pkg |
||||
with: |
||||
# pkg cache files are stored in `~/.pkg-cache` |
||||
path: ~/.pkg-cache |
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-build-${{ env.cache-name }}- |
||||
${{ runner.os }}-build- |
||||
${{ runner.os }}- |
||||
|
||||
- name: Build executables |
||||
run: | |
||||
pnpm bootstrap |
||||
cd ./packages/nocodb |
||||
pnpm run build:cli |
||||
cd ../nc-secret-mgr |
||||
targetVersion=${{ github.event.inputs.tag || inputs.tag }} node ../../scripts/updateVersion.js |
||||
pnpm run build && pnpm run publish |
||||
|
||||
# for building images for all platforms these libraries are required in Linux |
||||
- name: Install QEMU and ldid |
||||
run: | |
||||
sudo apt update |
||||
# Install qemu |
||||
sudo apt install qemu binfmt-support qemu-user-static |
||||
# install ldid |
||||
git clone https://github.com/daeken/ldid.git |
||||
cd ./ldid |
||||
./make.sh |
||||
sudo cp ./ldid /usr/local/bin |
||||
|
||||
- uses: actions/setup-node@v3 |
||||
with: |
||||
node-version: 16 |
||||
|
||||
- name : Install nocodb, other dependencies and build executables |
||||
run: | |
||||
cd ./packages/nc-secret-mgr |
||||
|
||||
# install npm dependendencies |
||||
pnpm i |
||||
|
||||
# Build sqlite binaries for all platforms |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=win32 --fallback-to-build --target_arch=x64 --target_libc=unknown |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=win32 --fallback-to-build --target_arch=ia32 --target_libc=unknown |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=darwin --fallback-to-build --target_arch=x64 --target_libc=unknown |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=darwin --fallback-to-build --target_arch=arm64 --target_libc=unknown |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=linux --fallback-to-build --target_arch=x64 --target_libc=glibc |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=linux --fallback-to-build --target_arch=arm64 --target_libc=glibc |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=linux --fallback-to-build --target_arch=x64 --target_libc=musl |
||||
./node_modules/.bin/node-pre-gyp install --directory=./node_modules/sqlite3 --target_platform=linux --fallback-to-build --target_arch=arm64 --target_libc=musl |
||||
|
||||
# clean up code to optimize size |
||||
npx modclean --patterns="default:*" --run |
||||
|
||||
# build executables |
||||
npm run build |
||||
|
||||
ls ./dist |
||||
|
||||
# Move macOS executables for signing |
||||
mkdir ./mac-dist |
||||
mv ./dist/nc-secret-arm64 ./mac-dist/ |
||||
mv ./dist/nc-secret-x64 ./mac-dist/ |
||||
|
||||
- name: Upload executables(except mac executables) to release |
||||
uses: svenstaro/upload-release-action@v2 |
||||
with: |
||||
repo_token: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
file: dist/** |
||||
tag: ${{ github.event.inputs.tag || inputs.tag }} |
||||
overwrite: true |
||||
file_glob: true |
||||
repo_name: nocodb/nc-secret-mgr |
||||
|
||||
- uses: actions/upload-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
retention-days: 1 |
||||
sign-mac-executables: |
||||
runs-on: macos-latest |
||||
needs: build-executables |
||||
steps: |
||||
|
||||
- uses: actions/download-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
|
||||
- name: Sign macOS executables |
||||
run: | |
||||
/usr/bin/codesign --force -s - ./scripts/pkg-executable/mac-dist/nc-secret-arm64 -v |
||||
/usr/bin/codesign --force -s - ./scripts/pkg-executable/mac-dist/nc-secret-x64 -v |
||||
|
||||
- uses: actions/upload-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
retention-days: 1 |
||||
|
||||
|
||||
publish-mac-executables: |
||||
needs: [sign-mac-executables,build-executables] |
||||
runs-on: [self-hosted, aws] |
||||
steps: |
||||
- uses: actions/download-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
|
||||
- name: Upload executables(except mac executables) to release |
||||
uses: svenstaro/upload-release-action@v2 |
||||
with: |
||||
repo_token: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
file: mac-dist/** |
||||
tag: ${{ github.event.inputs.tag || inputs.tag }} |
||||
overwrite: true |
||||
file_glob: true |
||||
repo_name: nocodb/nc-secret-mgr |
||||
|
@ -0,0 +1,5 @@
|
||||
tsconfig.json |
||||
webpack.config.js |
||||
src |
||||
node_modules |
||||
scripts |
File diff suppressed because one or more lines are too long
@ -0,0 +1,68 @@
|
||||
/*! |
||||
* mime-db |
||||
* Copyright(c) 2014 Jonathan Ong |
||||
* Copyright(c) 2015-2022 Douglas Christopher Wilson |
||||
* MIT Licensed |
||||
*/ |
||||
|
||||
/*! |
||||
* mime-types |
||||
* Copyright(c) 2014 Jonathan Ong |
||||
* Copyright(c) 2015 Douglas Christopher Wilson |
||||
* MIT Licensed |
||||
*/ |
||||
|
||||
/*! ***************************************************************************** |
||||
Copyright (C) Microsoft. All rights reserved. |
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use |
||||
this file except in compliance with the License. You may obtain a copy of the |
||||
License at http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED |
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, |
||||
MERCHANTABLITY OR NON-INFRINGEMENT. |
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions |
||||
and limitations under the License. |
||||
***************************************************************************** */ |
||||
|
||||
/** |
||||
* @copyright Copyright (c) 2021, Xgene Cloud Ltd |
||||
* |
||||
* @author Naveen MR <oof1lab@gmail.com> |
||||
* @author Pranav C Balan <pranavxc@gmail.com> |
||||
* |
||||
* @license GNU AGPL version 3 or any later version |
||||
* |
||||
* This program is free software: you can redistribute it and/or modify |
||||
* it under the terms of the GNU Affero General Public License as |
||||
* published by the Free Software Foundation, either version 3 of the |
||||
* License, or (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU Affero General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Affero General Public License |
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>. |
||||
* |
||||
*/ |
||||
|
||||
/** @preserve |
||||
* Counter block mode compatible with Dr Brian Gladman fileenc.c |
||||
* derived from CryptoJS.mode.CTR |
||||
* Jan Hruby jhruby.web@gmail.com |
||||
*/ |
||||
|
||||
/** @preserve |
||||
(c) 2012 by Cédric Mesnil. All rights reserved. |
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: |
||||
|
||||
- Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. |
||||
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. |
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
*/ |
@ -0,0 +1,50 @@
|
||||
{ |
||||
"name": "nc-secret", |
||||
"version": "0.0.1", |
||||
"description": "", |
||||
"main": "dist/cli.js", |
||||
"bin": "dist/cli.js", |
||||
"scripts": { |
||||
"build": "webpack --config webpack.config.js", |
||||
"dev": "cross-env NC_DB=\"pg://localhost:5432?u=postgres&p=password&d=meta_2024_09_07\" nodemon --watch 'src/**/*.ts' --exec 'ts-node --project tsconfig.json' src/index.ts -- a b --nc-db abc", |
||||
"test": "mocha --require ts-node/register src/**/*.spec.ts", |
||||
"build:pkg": "npx pkg . --out-path dist --compress GZip", |
||||
"publish": "npm publish ." |
||||
}, |
||||
"pkg": { |
||||
"assets": [ |
||||
"node_modules/**/*" |
||||
], |
||||
"targets": [ |
||||
"node16-linux-arm64", |
||||
"node16-macos-arm64", |
||||
"node16-win-arm64", |
||||
"node16-linux-x64", |
||||
"node16-macos-x64", |
||||
"node16-win-x64" |
||||
] |
||||
}, |
||||
"keywords": [], |
||||
"author": "", |
||||
"license": "ISC", |
||||
"dependencies": { |
||||
"chalk": "^5.3.0", |
||||
"commander": "^12.1.0", |
||||
"enquirer": "^2.4.1", |
||||
"figlet": "^1.7.0", |
||||
"knex": "^3.1.0", |
||||
"mysql": "^2.18.1", |
||||
"parse-database-url": "^0.3.0", |
||||
"pg": "^8.12.0", |
||||
"sqlite3": "^5.1.7" |
||||
}, |
||||
"devDependencies": { |
||||
"@types/figlet": "^1.5.8", |
||||
"chai": "^4.4.1", |
||||
"class-transformer": "0.3.1", |
||||
"cross-env": "^7.0.3", |
||||
"mocha": "^10.3.0", |
||||
"nodemon": "^3.0.3", |
||||
"pkg": "^5.8.0" |
||||
} |
||||
} |
@ -0,0 +1,97 @@
|
||||
import * as path from 'path'; |
||||
import fs from 'fs'; |
||||
import { promisify } from 'util'; |
||||
const { DriverClient, getToolDir, metaUrlToDbConfig, prepareEnv } = require( '../nocodb/cli'); |
||||
|
||||
export class NcConfig { |
||||
meta: { |
||||
db: any; |
||||
} = { |
||||
db: { |
||||
client: DriverClient.SQLITE, |
||||
connection: { |
||||
filename: 'noco.db', |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
toolDir: string; |
||||
|
||||
private constructor() { |
||||
this.toolDir = getToolDir(); |
||||
} |
||||
|
||||
public static async create(param: { |
||||
meta: { |
||||
metaUrl?: string; |
||||
metaJson?: string; |
||||
metaJsonFile?: string; |
||||
databaseUrlFile?: string; |
||||
databaseUrl?: string; |
||||
}; |
||||
secret?: string; |
||||
}): Promise<NcConfig> { |
||||
const { meta, secret } = |
||||
param; |
||||
|
||||
const ncConfig = new NcConfig(); |
||||
|
||||
if (ncConfig.meta?.db?.connection?.filename) { |
||||
ncConfig.meta.db.connection.filename = path.join( |
||||
ncConfig.toolDir, |
||||
ncConfig.meta.db.connection.filename, |
||||
); |
||||
} |
||||
|
||||
if (meta?.metaUrl) { |
||||
ncConfig.meta.db = await metaUrlToDbConfig(meta.metaUrl); |
||||
} else if (meta?.metaJson) { |
||||
ncConfig.meta.db = JSON.parse(meta.metaJson); |
||||
} else if (meta?.metaJsonFile) { |
||||
if (!(await promisify(fs.exists)(meta.metaJsonFile))) { |
||||
throw new Error(`NC_DB_JSON_FILE not found: ${meta.metaJsonFile}`); |
||||
} |
||||
const fileContent = await promisify(fs.readFile)(meta.metaJsonFile, { |
||||
encoding: 'utf8', |
||||
}); |
||||
ncConfig.meta.db = JSON.parse(fileContent); |
||||
} |
||||
|
||||
|
||||
return ncConfig; |
||||
} |
||||
|
||||
public static async createByEnv(): Promise<NcConfig> { |
||||
return NcConfig.create({ |
||||
meta: { |
||||
metaUrl: process.env.NC_DB, |
||||
metaJson: process.env.NC_DB_JSON, |
||||
metaJsonFile: process.env.NC_DB_JSON_FILE, |
||||
}, |
||||
secret: process.env.NC_AUTH_JWT_SECRET, |
||||
}); |
||||
} |
||||
} |
||||
|
||||
export const getNocoConfig = async (options: { |
||||
ncDb?: string; |
||||
ncDbJson?: string; |
||||
ncDbJsonFile?: string; |
||||
databaseUrl?: string; |
||||
databaseUrlFile?: string; |
||||
} ={}) =>{ |
||||
// check for JDBC url specified in env or options
|
||||
await prepareEnv({ |
||||
databaseUrl: options.databaseUrl || process.env.NC_DATABASE_URL || process.env.DATABASE_URL, |
||||
databaseUrlFile: options.databaseUrlFile || process.env.NC_DATABASE_URL_FILE || process.env.DATABASE_URL_FILE, |
||||
}) |
||||
|
||||
// create NocoConfig using utility method which works similar to Nocodb NcConfig with only meta db config
|
||||
return NcConfig.create({ |
||||
meta: { |
||||
metaUrl: process.env.NC_DB || options.ncDb, |
||||
metaJson: process.env.NC_DB_JSON || options.ncDbJson, |
||||
metaJsonFile: process.env.NC_DB_JSON_FILE || options.ncDbJsonFile, |
||||
} |
||||
}); |
||||
} |
@ -0,0 +1,3 @@
|
||||
export class NcError extends Error { |
||||
|
||||
} |
@ -0,0 +1,138 @@
|
||||
import {NcError} from "./NcError"; |
||||
import * as logger from "./logger"; |
||||
|
||||
const { SqlClientFactory, MetaTable, decryptPropIfRequired, encryptPropIfRequired } = require('../nocodb/cli') |
||||
|
||||
export class SecretManager { |
||||
|
||||
private sqlClient; |
||||
|
||||
constructor(private prevSecret: string, private newSecret: string, private config: any) { |
||||
this.sqlClient = SqlClientFactory.create(this.config.meta.db); |
||||
} |
||||
|
||||
|
||||
// validate config by checking if database config is valid
|
||||
async validateConfig() { |
||||
// if sqlite then check the file exist in provided path
|
||||
if (this.config.meta.db.client === 'sqlite3') { |
||||
if (!existsSync(this.config.meta.db.connection.filename)) { |
||||
throw new NcError('SQLite database file not found at path: ' + this.config.meta.db.connection.filename); |
||||
} |
||||
} |
||||
|
||||
// use the sqlClientFactory to create a new sql client and then use testConnection to test the connection
|
||||
const isValid = await this.sqlClient.testConnection(); |
||||
if (!isValid) { |
||||
throw new NcError('Invalid database configuration. Please verify your database settings and ensure the database is reachable.'); |
||||
} |
||||
} |
||||
|
||||
|
||||
async validateAndExtract() { |
||||
// check if tables are present in the database
|
||||
if (!(await this.sqlClient.knex.schema.hasTable(MetaTable.SOURCES))) { |
||||
throw new NcError('Sources table not found'); |
||||
} |
||||
|
||||
if (!(await this.sqlClient.knex.schema.hasTable(MetaTable.INTEGRATIONS))) { |
||||
throw new NcError('Integrations table not found'); |
||||
} |
||||
|
||||
// if is_encrypted column is not present in the sources table then throw an error
|
||||
if( |
||||
!(await this.sqlClient.knex.schema.hasColumn(MetaTable.SOURCES, 'is_encrypted')) || |
||||
!(await this.sqlClient.knex.schema.hasColumn(MetaTable.INTEGRATIONS, 'is_encrypted'))){ |
||||
throw new NcError('Looks like you are using an older version of NocoDB. Please upgrade to the latest version and try again.'); |
||||
} |
||||
|
||||
|
||||
const sources = await this.sqlClient.knex(MetaTable.SOURCES).where(qb => { |
||||
qb.where('is_meta', false).orWhere('is_meta', null) |
||||
}); |
||||
|
||||
const integrations = await this.sqlClient.knex(MetaTable.INTEGRATIONS); |
||||
|
||||
const sourcesToUpdate: Record<string, any>[] = []; |
||||
const integrationsToUpdate: Record<string, any>[] = []; |
||||
|
||||
|
||||
let isValid = false; |
||||
for (const source of sources) { |
||||
try { |
||||
const decrypted = decryptPropIfRequired({ |
||||
data: source, |
||||
secret: this.prevSecret, |
||||
prop: 'config' |
||||
}); |
||||
isValid = true; |
||||
sourcesToUpdate.push({ ...source, config: decrypted }); |
||||
} catch (e) { |
||||
logger.error('Failed to decrypt source configuration : ' + e.message); |
||||
} |
||||
} |
||||
|
||||
for (const integration of integrations) { |
||||
try { |
||||
const decrypted = decryptPropIfRequired({ |
||||
data: integration, |
||||
secret: this.prevSecret, |
||||
prop: 'config' |
||||
}); |
||||
isValid = true; |
||||
integrationsToUpdate.push({ ...integration, config: decrypted }); |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
} |
||||
|
||||
// If all decryptions have failed, then throw an error
|
||||
if (!isValid) { |
||||
throw new NcError('Invalid old secret or no sources/integrations found'); |
||||
} |
||||
|
||||
|
||||
|
||||
return { sourcesToUpdate, integrationsToUpdate }; |
||||
} |
||||
|
||||
|
||||
async updateSecret( |
||||
sourcesToUpdate: Record<string, any>[], |
||||
integrationsToUpdate: Record<string, any>[] |
||||
) { |
||||
// start transaction
|
||||
const transaction = await this.sqlClient.transaction(); |
||||
|
||||
try { |
||||
// update sources
|
||||
for (const source of sourcesToUpdate) { |
||||
await transaction(MetaTable.SOURCES).update({ |
||||
config: encryptPropIfRequired({ |
||||
data: source, |
||||
secret: this.newSecret, |
||||
prop: 'config' |
||||
}) |
||||
}).where('id', source.id); |
||||
} |
||||
|
||||
// update integrations
|
||||
for (const integration of integrationsToUpdate) { |
||||
await transaction(MetaTable.INTEGRATIONS).update({ |
||||
config: encryptPropIfRequired({ |
||||
data: integration, |
||||
secret: this.newSecret, |
||||
prop: 'config' |
||||
}) |
||||
}).where('id', integration.id); |
||||
} |
||||
|
||||
await transaction.commit(); |
||||
|
||||
} catch (e) { |
||||
logger.error('Failed to decrypt integration configuration: ' + e.message); |
||||
await transaction.rollback(); |
||||
throw e; |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,4 @@
|
||||
export * from './NcConfig'; |
||||
export * from './NcError'; |
||||
export * as logger from './logger'; |
||||
export * from './SecretManager'; |
@ -0,0 +1,25 @@
|
||||
import chalk from 'chalk'; |
||||
|
||||
export function log(message: string) { |
||||
console.log(chalk.white(message)); |
||||
} |
||||
|
||||
export function error(message: string) { |
||||
console.error(chalk.red('Error: ' + message)); |
||||
} |
||||
|
||||
export function warn(message: string) { |
||||
console.warn(chalk.yellow('Warning: ' + message)); |
||||
} |
||||
|
||||
export function info(message: string) { |
||||
console.info(chalk.green('Info: ' + message)); |
||||
} |
||||
|
||||
export function success(message: string) { |
||||
console.log(chalk.green('Success: ' + message)); |
||||
} |
||||
|
||||
export function debug(message: string) { |
||||
console.debug(chalk.blue('Debug: ' + message)); |
||||
} |
@ -0,0 +1,17 @@
|
||||
import { describe, it } from 'mocha'; |
||||
import { expect } from 'chai'; |
||||
import { program } from 'commander'; |
||||
|
||||
describe('Index', () => { |
||||
|
||||
describe('index.ts', () => { |
||||
it('should parse the arguments and options correctly', () => { |
||||
const argv = ['node', 'index.ts', 'prevSecret', 'newSecret', '--nc-db','test_db_url', '--database-url', 'test_db_url', '-o', 'prevSecret', '-n', 'newSecret']; |
||||
program.parse(argv); |
||||
expect(program.opts().prev).to.equal('prevSecret'); |
||||
expect(program.opts().new).to.equal('newSecret'); |
||||
expect(program.opts().ncDb).to.equal('test_db_url'); |
||||
expect(program.opts().databaseUrl).to.equal('test_db_url'); |
||||
}); |
||||
}); |
||||
}); |
@ -0,0 +1,72 @@
|
||||
import figlet from "figlet"; |
||||
import { Command } from 'commander'; |
||||
import { getNocoConfig } from "./core"; |
||||
import { SecretManager } from "./core"; |
||||
import { NcError } from "./core"; |
||||
import { logger } from "./core"; |
||||
|
||||
console.log(figlet.textSync("NocoDB Secret CLI")); |
||||
|
||||
const program = new Command(); |
||||
|
||||
program |
||||
.version('1.0.0') |
||||
.description('NocoDB Secret CLI') |
||||
.arguments('<prevSecret> <newSecret>') |
||||
.option('--nc-db <char>', 'NocoDB connection database url, equivalent to NC_DB env variable') |
||||
.option('--nc-db-json <char>', 'NocoDB connection database json, equivalent to NC_DB_JSON env variable') |
||||
.option('--nc-db-json-file <char>', 'NocoDB connection database json file path, equivalent to NC_DB_JSON_FILE env variable') |
||||
.option('--database-url <char>', 'JDBC database url, equivalent to DATABASE_URL env variable') |
||||
.option('--database-url-file <char>', 'JDBC database url file path, equivalent to DATABASE_URL_FILE env variable') |
||||
.option('-p, --prev <char>', 'old secret string to decrypt sources and integrations') |
||||
.option('-n, --new <char>', 'new secret string to encrypt sources and integrations') |
||||
.action(async (prevVal, newVal) => { |
||||
|
||||
try { |
||||
// extract options
|
||||
const options = program.opts(); |
||||
const config = await getNocoConfig(options); |
||||
const { prevSecret = prevVal, newSecret = newVal } = program.opts(); |
||||
|
||||
if (!prevSecret || !newSecret) { |
||||
console.error('Error: Both prevSecret and newSecret are required.'); |
||||
program.help(); |
||||
} else { |
||||
const secretManager = new SecretManager(prevSecret, newSecret, config); |
||||
|
||||
// validate meta db config which is resolved from env variables
|
||||
await secretManager.validateConfig(); |
||||
|
||||
// validate old secret
|
||||
const { sourcesToUpdate, integrationsToUpdate } = await secretManager.validateAndExtract(); |
||||
|
||||
|
||||
// update sources and integrations
|
||||
await secretManager.updateSecret(sourcesToUpdate, integrationsToUpdate); |
||||
|
||||
} |
||||
} catch (e) { |
||||
if (e instanceof NcError) { |
||||
// print error message in a better way
|
||||
logger.error(e.message); |
||||
process.exit(1); |
||||
} |
||||
console.error(e); |
||||
process.exit(1); |
||||
} |
||||
}); |
||||
|
||||
|
||||
|
||||
// Add error handling
|
||||
program.exitOverride((err) => { |
||||
console.error(err.message); |
||||
process.exit(1); |
||||
}); |
||||
|
||||
program.parse(process.argv); |
||||
|
||||
|
||||
|
||||
|
||||
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,16 @@
|
||||
{ |
||||
"compilerOptions": { |
||||
"rootDir": "src", |
||||
"outDir": "dist", |
||||
"strict": true, |
||||
"target": "es6", |
||||
"module": "commonjs", |
||||
"sourceMap": true, |
||||
"esModuleInterop": true, |
||||
"moduleResolution": "node", |
||||
"skipLibCheck": true, |
||||
"noImplicitAny": false, |
||||
"allowJs": true, |
||||
"experimentalDecorators": true |
||||
} |
||||
} |
@ -0,0 +1,46 @@
|
||||
const nodeExternals = require('webpack-node-externals'); |
||||
const TerserPlugin = require('terser-webpack-plugin'); |
||||
const webpack = require('webpack'); |
||||
const path = require('path'); |
||||
module.exports = { |
||||
entry: './src/index.ts', |
||||
module: { |
||||
rules: [ |
||||
{ |
||||
test: /\.tsx?$/, |
||||
exclude: /node_modules/, |
||||
use: { |
||||
loader: 'ts-loader', |
||||
options: { |
||||
transpileOnly: true |
||||
} |
||||
}, |
||||
}, |
||||
], |
||||
}, |
||||
|
||||
optimization: { |
||||
minimize: true, |
||||
minimizer: [new TerserPlugin()], |
||||
nodeEnv: false |
||||
}, |
||||
externals: [nodeExternals()], |
||||
resolve: { |
||||
extensions: ['.tsx', '.ts', '.js', '.json'], |
||||
}, |
||||
output: { |
||||
filename: 'cli.js', |
||||
path: path.resolve(__dirname, 'dist'), |
||||
library: 'libs', |
||||
libraryTarget: 'umd', |
||||
globalObject: "typeof self !== 'undefined' ? self : this", |
||||
}, |
||||
// node: {
|
||||
// fs: 'empty'
|
||||
// },
|
||||
plugins: [ |
||||
new webpack.BannerPlugin({banner: "#! /usr/bin/env node", raw: true}), |
||||
], |
||||
|
||||
target: 'node', |
||||
}; |
@ -0,0 +1,61 @@
|
||||
--- |
||||
title: 'Updating Secrets' |
||||
description: 'Learn how to update secrets in NocoDB using the nc-secret-mgr package.' |
||||
tags: ['Secrets', 'nc-secret-mgr', 'Update', 'Security'] |
||||
keywords: ['NocoDB secrets', 'nc-secret-mgr', 'Update', 'Security'] |
||||
--- |
||||
|
||||
## Updating Secrets |
||||
|
||||
To update a secret in NocoDB, you can use the `nc-secret-mgr` package. Follow the steps below to update a secret: |
||||
|
||||
### Using the Command Line Interface (CLI) |
||||
|
||||
1. Install the `nc-secret-mgr` package if you haven't already. You can do this by running the following command in your terminal: |
||||
|
||||
```bash |
||||
npm install -g nc-secret-mgr |
||||
``` |
||||
|
||||
2. Once the package is installed, you can update a secret by running the following command: |
||||
|
||||
```bash |
||||
NC_DB="pg://host:port?u=user&p=password&d=database" nc-secret-mgr update --prev <previous-secret> --new <new-secret> |
||||
``` |
||||
|
||||
OR |
||||
|
||||
```bash |
||||
NC_DB="pg://host:port?u=user&p=password&d=database" nc-secret-mgr <previous-secret> <new-secret> |
||||
``` |
||||
|
||||
Replace `<previous-secret>` with the name of the secret you used previously, and `<new-secret>` with the new value of the secret. |
||||
|
||||
3. After running the command, the secret will be updated in NocoDB. |
||||
|
||||
### Using Executables |
||||
|
||||
Alternatively, you can use the `nc-secret-mgr` executable to update secrets. |
||||
|
||||
1. Download the `nc-secret-mgr` executable from the [NocoDB website](https://github.com/nocodb/nc-secret-mgr/releases/latest). |
||||
2. Run the executable using the following command: |
||||
|
||||
```bash |
||||
NC_DB="pg://host:port?u=user&p=password&d=database" ./nc-secret-macos-arm64 update --prev <previous-secret> --new <new-secret> |
||||
``` |
||||
|
||||
Replace `<previous-secret>` with the name of the secret you used previously, and `<new-secret>` with the new value of the secret. |
||||
|
||||
3. After running the command, the secret will be updated in NocoDB. |
||||
|
||||
|
||||
Note: All environment variables are supported, including `NC_DB`, `NC_DB_JSON`, `NC_DB_JSON_FILE`, `DATABASE_URL`, and `DATABASE_URL_FILE`. You can use any of these variables to specify your database connection. Alternatively, you can use the following equivalent parameters. |
||||
|
||||
|
||||
| Environment Variable | CLI Parameter | |
||||
| --------------------- | -------------- | |
||||
| `NC_DB` | `--nc-db` | |
||||
| `NC_DB_JSON` | `--nc-db-json` | |
||||
| `NC_DB_JSON_FILE` | `--nc-db-json-file` | |
||||
| `DATABASE_URL` | `--database-url` | |
||||
| `DATABASE_URL_FILE` | `--database-url-file` | |
@ -0,0 +1,41 @@
|
||||
// jest.config.js
|
||||
// In the following statement, replace `./tsconfig` with the path to your `tsconfig` file
|
||||
// which contains the path mapping (ie the `compilerOptions.paths` option):
|
||||
|
||||
module.exports = { |
||||
moduleFileExtensions: ['js', 'json', 'ts', 'node'], |
||||
rootDir: 'src', |
||||
testRegex: '(Integration|Source)\\.spec\\.ts$', |
||||
collectCoverageFrom: ['**/*.(t|j)s'], |
||||
coverageDirectory: '../coverage', |
||||
testEnvironment: 'node', |
||||
moduleNameMapper: { |
||||
'^src/(.*)$': [ |
||||
'<rootDir>/$1', |
||||
// '<rootDir>/$1/index'
|
||||
], |
||||
'^~/(.*)$': [ |
||||
'<rootDir>/ee/$1', |
||||
'<rootDir>/$1', |
||||
// '<rootDir>/ee/$1/index',
|
||||
// '<rootDir>/$1/index',
|
||||
], |
||||
'^@/(.*)$': ['<rootDir>/ee/$1', '<rootDir>/$1'], |
||||
}, |
||||
// [...]
|
||||
// moduleNameMapper: pathsToModuleNameMapper(
|
||||
// compilerOptions.paths /*, { prefix: '<rootDir>/' } */,
|
||||
// ),
|
||||
// modulePaths: [compilerOptions.baseUrl],
|
||||
// moduleNameMapper: pathsToModuleNameMapper(compilerOptions.paths, {
|
||||
// prefix: '<rootDir>/../',
|
||||
// }),
|
||||
transform: { |
||||
'^.+\\.ts$': [ |
||||
'ts-jest', |
||||
{ |
||||
tsconfig: 'tsconfig.json', |
||||
}, |
||||
], |
||||
}, |
||||
}; |
@ -0,0 +1,9 @@
|
||||
export { SqlClientFactory } from '~/db/sql-client/lib/SqlClientFactory'; |
||||
export { MetaTable } from '~/utils/globals'; |
||||
export * from '~/utils/encryptDecrypt'; |
||||
export { |
||||
getToolDir, |
||||
metaUrlToDbConfig, |
||||
prepareEnv, |
||||
} from '~/utils/nc-config/helpers'; |
||||
export { DriverClient } from '~/utils/nc-config/constants'; |
@ -0,0 +1,114 @@
|
||||
import { Logger } from '@nestjs/common'; |
||||
import Noco from '~/Noco'; |
||||
import { MetaTable, RootScopes } from '~/utils/globals'; |
||||
import { encryptPropIfRequired } from '~/utils'; |
||||
|
||||
const logger = new Logger('initDataSourceEncryption'); |
||||
|
||||
export default async function initDataSourceEncryption(_ncMeta = Noco.ncMeta) { |
||||
// return if env is not set
|
||||
if (!process.env.NC_CONNECTION_ENCRYPT_KEY) { |
||||
return; |
||||
} |
||||
|
||||
const secret = process.env.NC_CONNECTION_ENCRYPT_KEY; |
||||
|
||||
const ncMeta = await _ncMeta.startTransaction(); |
||||
|
||||
const successStatus: boolean[] = []; |
||||
|
||||
try { |
||||
// if configured, check for any non-encrypted data source by checking is_encrypted flag
|
||||
const sources = await ncMeta |
||||
.knex(MetaTable.SOURCES) |
||||
.where((qb) => { |
||||
qb.where('is_encrypted', false).orWhereNull('is_encrypted'); |
||||
}) |
||||
.whereNotNull('config'); |
||||
|
||||
for (const source of sources) { |
||||
// skip if no config
|
||||
if (!source.config) { |
||||
continue; |
||||
} |
||||
|
||||
// check if valid json, if not warn and skip
|
||||
try { |
||||
JSON.parse(source.config); |
||||
} catch (e) { |
||||
console.error('Invalid JSON in integration config', source.alias); |
||||
successStatus.push(false); |
||||
continue; |
||||
} |
||||
|
||||
// encrypt the data source
|
||||
await ncMeta.metaUpdate( |
||||
source.fk_workspace_id, |
||||
source.base_id, |
||||
MetaTable.SOURCES, |
||||
{ |
||||
config: encryptPropIfRequired({ |
||||
data: source, |
||||
secret, |
||||
}), |
||||
is_encrypted: true, |
||||
}, |
||||
source.id, |
||||
); |
||||
successStatus.push(true); |
||||
} |
||||
|
||||
const integrations = await ncMeta |
||||
.knex(MetaTable.INTEGRATIONS) |
||||
.where((qb) => { |
||||
qb.where('is_encrypted', false).orWhereNull('is_encrypted'); |
||||
}) |
||||
.whereNotNull('config'); |
||||
|
||||
for (const integration of integrations) { |
||||
// skip if no config
|
||||
if (!integrations.config) { |
||||
continue; |
||||
} |
||||
|
||||
// check if valid json, if not warn and skip
|
||||
try { |
||||
JSON.parse(integrations.config); |
||||
} catch (e) { |
||||
logger.warn('Invalid JSON in integration config', integration.title); |
||||
successStatus.push(false); |
||||
continue; |
||||
} |
||||
|
||||
// encrypt the data source
|
||||
await ncMeta.metaUpdate( |
||||
RootScopes.WORKSPACE, |
||||
RootScopes.WORKSPACE, |
||||
MetaTable.INTEGRATIONS, |
||||
{ |
||||
config: encryptPropIfRequired({ |
||||
data: integration, |
||||
secret, |
||||
}), |
||||
is_encrypted: true, |
||||
}, |
||||
integration.id, |
||||
); |
||||
successStatus.push(true); |
||||
} |
||||
|
||||
// if all failed, throw error
|
||||
if (successStatus.length && successStatus.every((status) => !status)) { |
||||
// if all fails then rollback and exit
|
||||
throw new Error( |
||||
'Failed to encrypt all data sources, please remove invalid data sources and try again.', |
||||
); |
||||
} |
||||
|
||||
await ncMeta.commit(); |
||||
} catch (e) { |
||||
await ncMeta.rollback(); |
||||
console.error('Failed to encrypt data sources'); |
||||
throw e; |
||||
} |
||||
} |
@ -0,0 +1,22 @@
|
||||
import type { Knex } from 'knex'; |
||||
import { MetaTable } from '~/utils/globals'; |
||||
|
||||
const up = async (knex: Knex) => { |
||||
await knex.schema.alterTable(MetaTable.SOURCES, (table) => { |
||||
table.boolean('is_encrypted').defaultTo(false); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.INTEGRATIONS, (table) => { |
||||
table.boolean('is_encrypted').defaultTo(false); |
||||
}); |
||||
}; |
||||
|
||||
const down = async (knex: Knex) => { |
||||
await knex.schema.alterTable(MetaTable.SOURCES, (table) => { |
||||
table.dropColumn('is_encrypted'); |
||||
}); |
||||
await knex.schema.alterTable(MetaTable.INTEGRATIONS, (table) => { |
||||
table.dropColumn('is_encrypted'); |
||||
}); |
||||
}; |
||||
|
||||
export { up, down }; |
@ -0,0 +1,257 @@
|
||||
import { IntegrationsType } from 'nocodb-sdk'; |
||||
import { Integration } from '~/models'; |
||||
import { MetaTable } from '~/utils/globals'; |
||||
import { decryptPropIfRequired, isEE } from '~/utils'; |
||||
|
||||
jest.mock('~/Noco'); |
||||
|
||||
describe('Integration Model', () => { |
||||
let integration: Integration; |
||||
let mockNcMeta: jest.Mocked<any>; |
||||
|
||||
beforeEach(() => { |
||||
mockNcMeta = { |
||||
metaList: jest.fn(), |
||||
metaGet2: jest.fn(), |
||||
metaInsert2: jest.fn(), |
||||
metaUpdate: jest.fn(), |
||||
metaDelete: jest.fn(), |
||||
metaGetNextOrder: jest.fn(), |
||||
}; |
||||
integration = new Integration({ |
||||
id: 'test-id', |
||||
title: 'Test Integration', |
||||
base_id: 'project-1', |
||||
}); |
||||
}); |
||||
|
||||
afterEach(() => { |
||||
jest.clearAllMocks(); |
||||
}); |
||||
|
||||
describe('list', () => { |
||||
it('should list integrations', async () => { |
||||
const mockIntegrations = [ |
||||
{ id: '1', title: 'Integration 1' }, |
||||
{ id: '2', title: 'Integration 2' }, |
||||
]; |
||||
// Mock the knex function
|
||||
mockNcMeta.knex = jest.fn().mockReturnValue({ |
||||
select: jest.fn().mockReturnThis(), |
||||
from: jest.fn().mockReturnThis(), |
||||
where: jest.fn().mockReturnThis(), |
||||
whereNull: jest.fn().mockReturnThis(), |
||||
orWhereNull: jest.fn().mockReturnThis(), |
||||
leftJoin: jest.fn().mockReturnThis(), |
||||
andWhere: jest.fn().mockReturnThis(), |
||||
clone: jest.fn().mockReturnThis(), |
||||
limit: jest.fn().mockReturnThis(), |
||||
offset: jest.fn().mockReturnThis(), |
||||
orderBy: jest.fn().mockReturnThis(), |
||||
then: jest |
||||
.fn() |
||||
.mockImplementation((callback) => |
||||
Promise.resolve(callback(mockIntegrations)), |
||||
), |
||||
}); |
||||
|
||||
const result = await Integration.list( |
||||
{ |
||||
userId: 'user-id', |
||||
workspaceId: 'workspace-id', |
||||
}, |
||||
mockNcMeta, |
||||
); |
||||
|
||||
expect(result.list).toEqual( |
||||
mockIntegrations.map((i) => expect.objectContaining(i)), |
||||
); |
||||
|
||||
// Verify that knex was called with the correct table
|
||||
expect(mockNcMeta.knex).toHaveBeenCalledWith(MetaTable.INTEGRATIONS); |
||||
|
||||
// Verify the chain of method calls
|
||||
const knexMock = mockNcMeta.knex.mock.results[0].value; |
||||
expect(knexMock.where).toHaveBeenCalled(); |
||||
expect(knexMock.orderBy).toHaveBeenCalledWith( |
||||
'nc_integrations_v2.order', |
||||
'asc', |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('get', () => { |
||||
it('should get an integration by id', async () => { |
||||
const mockIntegration = { id: 'test-id', title: 'Test Integration' }; |
||||
mockNcMeta.metaGet2.mockResolvedValue(mockIntegration); |
||||
|
||||
const result = await Integration.get( |
||||
{ |
||||
workspace_id: null, |
||||
}, |
||||
'test-id', |
||||
false, |
||||
mockNcMeta, |
||||
); |
||||
|
||||
expect(result).toBeInstanceOf(Integration); |
||||
expect(result).toEqual(expect.objectContaining(mockIntegration)); |
||||
expect(mockNcMeta.metaGet2).toBeCalledWith( |
||||
null, |
||||
'workspace', |
||||
MetaTable.INTEGRATIONS, |
||||
isEE ? { fk_workspace_id: null, id: 'test-id' } : 'test-id', |
||||
null, |
||||
{ _or: [{ deleted: { neq: true } }, { deleted: { eq: null } }] }, |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('create', () => { |
||||
it('should create a new integration', async () => { |
||||
const newIntegration = { |
||||
id: 'new-id', |
||||
title: 'New Integration', |
||||
workspaceId: 'workspace-1', |
||||
config: { |
||||
client: 'pg', |
||||
}, |
||||
}; |
||||
mockNcMeta.metaInsert2.mockResolvedValue({ |
||||
...newIntegration, |
||||
}); |
||||
mockNcMeta.metaGet2.mockResolvedValue({ |
||||
...newIntegration, |
||||
}); |
||||
mockNcMeta.metaGetNextOrder.mockResolvedValue(2); |
||||
|
||||
const result = await Integration.createIntegration( |
||||
newIntegration, |
||||
mockNcMeta, |
||||
); |
||||
|
||||
expect(result).toBeInstanceOf(Integration); |
||||
expect(result).toEqual( |
||||
expect.objectContaining({ id: 'new-id', ...newIntegration }), |
||||
); |
||||
expect(mockNcMeta.metaInsert2).toHaveBeenCalledWith( |
||||
'workspace-1', |
||||
'workspace', |
||||
MetaTable.INTEGRATIONS, |
||||
{ |
||||
...newIntegration, |
||||
order: 2, |
||||
fk_workspace_id: 'workspace-1', |
||||
workspaceId: undefined, |
||||
id: undefined, |
||||
config: JSON.stringify(newIntegration.config), |
||||
is_encrypted: false, |
||||
}, |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('create with encryption', () => { |
||||
beforeAll(() => { |
||||
process.env.NC_CONNECTION_ENCRYPT_KEY = 'test-secret'; |
||||
}); |
||||
|
||||
afterAll(() => { |
||||
process.env.NC_CONNECTION_ENCRYPT_KEY = undefined; |
||||
}); |
||||
|
||||
it('should create a new integration with encrypted config', async () => { |
||||
const newIntegration = { |
||||
id: 'new-id', |
||||
title: 'New Integration', |
||||
workspaceId: 'workspace-1', |
||||
config: { |
||||
client: 'pg', |
||||
}, |
||||
}; |
||||
mockNcMeta.metaInsert2.mockResolvedValue({ |
||||
...newIntegration, |
||||
}); |
||||
mockNcMeta.metaInsert2.mockResolvedValue({ |
||||
...newIntegration, |
||||
}); |
||||
mockNcMeta.metaGet2.mockResolvedValue({ |
||||
...newIntegration, |
||||
}); |
||||
mockNcMeta.metaGetNextOrder.mockResolvedValue(2); |
||||
|
||||
const result = await Integration.createIntegration( |
||||
newIntegration, |
||||
mockNcMeta, |
||||
); |
||||
|
||||
expect(result).toBeInstanceOf(Integration); |
||||
expect(result).toEqual( |
||||
expect.objectContaining({ id: 'new-id', ...newIntegration }), |
||||
); |
||||
|
||||
// Extract the arguments used in the call
|
||||
const calledWithArgs = mockNcMeta.metaInsert2.mock.calls[0][3]; |
||||
|
||||
// veify the 'config' field is encrypted
|
||||
expect(calledWithArgs.config).not.toEqual( |
||||
JSON.stringify(newIntegration.config), |
||||
); |
||||
|
||||
// Decrypt the 'config' field
|
||||
const decryptedConfig = decryptPropIfRequired({ data: calledWithArgs }); |
||||
|
||||
// Verify the decrypted config matches the original integration
|
||||
expect(decryptedConfig).toEqual(newIntegration.config); |
||||
}); |
||||
}); |
||||
|
||||
describe('update', () => { |
||||
it('should update an existing integration', async () => { |
||||
const updateData = { |
||||
title: 'Updated Integration', |
||||
type: IntegrationsType.Database, |
||||
}; |
||||
mockNcMeta.metaUpdate.mockResolvedValue({ |
||||
id: 'test-id', |
||||
type: IntegrationsType.Database, |
||||
...updateData, |
||||
}); |
||||
mockNcMeta.metaGet2.mockResolvedValue({ |
||||
id: 'test-id', |
||||
type: IntegrationsType.Database, |
||||
...updateData, |
||||
}); |
||||
|
||||
await Integration.updateIntegration( |
||||
{ |
||||
workspace_id: null, |
||||
}, |
||||
'test-id', |
||||
updateData, |
||||
mockNcMeta, |
||||
); |
||||
|
||||
expect(mockNcMeta.metaUpdate).toHaveBeenCalledWith( |
||||
null, |
||||
'workspace', |
||||
MetaTable.INTEGRATIONS, |
||||
updateData, |
||||
integration.id, |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('delete', () => { |
||||
it('should delete an integration', async () => { |
||||
await integration.delete(mockNcMeta); |
||||
|
||||
expect(mockNcMeta.metaDelete).toHaveBeenCalledWith( |
||||
undefined, |
||||
'workspace', |
||||
MetaTable.INTEGRATIONS, |
||||
integration.id, |
||||
); |
||||
}); |
||||
}); |
||||
}); |
@ -0,0 +1,54 @@
|
||||
import CryptoJS from 'crypto-js'; |
||||
|
||||
export const getCredentialEncryptSecret = () => |
||||
process.env.NC_CONNECTION_ENCRYPT_KEY; |
||||
|
||||
export const isEncryptionRequired = (secret = getCredentialEncryptSecret()) => { |
||||
return !!secret; |
||||
}; |
||||
export const encryptPropIfRequired = ({ |
||||
data, |
||||
prop = 'config', |
||||
secret = getCredentialEncryptSecret(), |
||||
}: { |
||||
data: Record<string, any>; |
||||
prop?: string; |
||||
secret?: string; |
||||
}) => { |
||||
if (!data || data[prop] === null || data[prop] === undefined) { |
||||
return; |
||||
} |
||||
|
||||
if (!secret) { |
||||
return JSON.stringify(data[prop]); |
||||
} |
||||
|
||||
return CryptoJS.AES.encrypt(JSON.stringify(data[prop]), secret).toString(); |
||||
}; |
||||
|
||||
export const decryptPropIfRequired = ({ |
||||
data, |
||||
prop = 'config', |
||||
secret = getCredentialEncryptSecret(), |
||||
}: { |
||||
data: Record<string, any>; |
||||
prop?: string; |
||||
secret?: string; |
||||
}) => { |
||||
if (!data || data[prop] === null || data[prop] === undefined) { |
||||
return; |
||||
} |
||||
|
||||
let jsonString = data[prop]; |
||||
|
||||
if (secret) { |
||||
try { |
||||
jsonString = CryptoJS.AES.decrypt(data[prop], secret).toString( |
||||
CryptoJS.enc.Utf8, |
||||
); |
||||
} catch { |
||||
throw new Error('Config decryption failed'); |
||||
} |
||||
} |
||||
return typeof jsonString === 'string' ? JSON.parse(jsonString) : jsonString; |
||||
}; |
@ -1,4 +1,4 @@
|
||||
import type { NcUpgraderCtx } from './NcUpgrader'; |
||||
import type { NcUpgraderCtx } from '~/version-upgrader/NcUpgrader'; |
||||
import { MetaTable } from '~/utils/globals'; |
||||
import View from '~/models/View'; |
||||
import Hook from '~/models/Hook'; |
@ -1,6 +1,6 @@
|
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import type { MetaService } from '~/meta/meta.service'; |
||||
import type { NcUpgraderCtx } from './NcUpgrader'; |
||||
import type { NcUpgraderCtx } from '~/version-upgrader/NcUpgrader'; |
||||
import type { SelectOptionsType } from 'nocodb-sdk'; |
||||
import type { NcContext } from '~/interface/config'; |
||||
import { MetaTable } from '~/utils/globals'; |
@ -1,4 +1,4 @@
|
||||
import type { NcUpgraderCtx } from './NcUpgrader'; |
||||
import type { NcUpgraderCtx } from '~/version-upgrader/NcUpgrader'; |
||||
import { MetaTable } from '~/utils/globals'; |
||||
|
||||
// before 0.104.3, display value column can be in any position in table
|
@ -1,6 +1,6 @@
|
||||
import { UITypes } from 'nocodb-sdk'; |
||||
import type { MetaService } from '~/meta/meta.service'; |
||||
import type { NcUpgraderCtx } from './NcUpgrader'; |
||||
import type { NcUpgraderCtx } from '~/version-upgrader/NcUpgrader'; |
||||
import type { NcContext } from '~/interface/config'; |
||||
import { MetaTable } from '~/utils/globals'; |
||||
import Column from '~/models/Column'; |
@ -1,4 +1,4 @@
|
||||
import type { NcUpgraderCtx } from './NcUpgrader'; |
||||
import type { NcUpgraderCtx } from '~/version-upgrader/NcUpgrader'; |
||||
import { MetaTable } from '~/utils/globals'; |
||||
|
||||
export default async function ({ ncMeta }: NcUpgraderCtx) { |
@ -0,0 +1,119 @@
|
||||
import CryptoJS from 'crypto-js'; |
||||
import type { NcUpgraderCtx } from '~/version-upgrader/NcUpgrader'; |
||||
import { MetaTable, RootScopes } from '~/utils/globals'; |
||||
|
||||
const logger = { |
||||
log: (message: string) => { |
||||
console.log(`[0225002_ncDatasourceDecrypt ${Date.now()}] ` + message); |
||||
}, |
||||
error: (message: string) => { |
||||
console.error(`[0225002_ncDatasourceDecrypt ${Date.now()}] ` + message); |
||||
}, |
||||
}; |
||||
|
||||
const decryptConfig = async (encryptedConfig: string, secret: string) => { |
||||
if (!encryptedConfig) return encryptedConfig; |
||||
|
||||
const decryptedVal = CryptoJS.AES.decrypt(encryptedConfig, secret).toString( |
||||
CryptoJS.enc.Utf8, |
||||
); |
||||
|
||||
// validate by parsing JSON
|
||||
try { |
||||
JSON.parse(decryptedVal); |
||||
} catch { |
||||
throw new Error('Config decryption failed'); |
||||
} |
||||
return decryptedVal; |
||||
}; |
||||
|
||||
// decrypt datasource details in source table and integration table
|
||||
export default async function ({ ncMeta }: NcUpgraderCtx) { |
||||
let encryptionKey = process.env.NC_AUTH_JWT_SECRET; |
||||
|
||||
if (!encryptionKey) { |
||||
encryptionKey = ( |
||||
await ncMeta.metaGet(RootScopes.ROOT, RootScopes.ROOT, MetaTable.STORE, { |
||||
key: 'nc_auth_jwt_secret', |
||||
}) |
||||
)?.value; |
||||
} |
||||
|
||||
// if encryption key is same as previous, just update is_encrypted flag and return
|
||||
if ( |
||||
process.env.NC_CONNECTION_ENCRYPT_KEY && |
||||
process.env.NC_CONNECTION_ENCRYPT_KEY === encryptionKey |
||||
) { |
||||
logger.log('Encryption key is same as previous. Skipping decryption'); |
||||
await ncMeta.knexConnection(MetaTable.SOURCES).update({ |
||||
is_encrypted: true, |
||||
}); |
||||
await ncMeta.knexConnection(MetaTable.INTEGRATIONS).update({ |
||||
is_encrypted: true, |
||||
}); |
||||
return; |
||||
} |
||||
|
||||
// if encryption key is not present, return
|
||||
if (!encryptionKey) { |
||||
throw Error('Encryption key not found'); |
||||
} |
||||
|
||||
// get all external sources
|
||||
const sources = await ncMeta.knexConnection(MetaTable.SOURCES); |
||||
|
||||
const passed = []; |
||||
|
||||
// iterate, decrypt and update
|
||||
for (const source of sources) { |
||||
if (source?.config) { |
||||
try { |
||||
const decrypted = await decryptConfig(source.config, encryptionKey); |
||||
await ncMeta |
||||
.knexConnection(MetaTable.SOURCES) |
||||
.update({ |
||||
config: decrypted, |
||||
}) |
||||
.where('id', source.id); |
||||
passed.push(true); |
||||
} catch (e) { |
||||
logger.error(`Failed to decrypt source ${source.id}`); |
||||
passed.push(false); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// get all integrations
|
||||
const integrations = await ncMeta.knexConnection(MetaTable.INTEGRATIONS); |
||||
|
||||
// iterate, decrypt and update
|
||||
for (const integration of integrations) { |
||||
if (integration?.config) { |
||||
try { |
||||
const decrypted = await decryptConfig( |
||||
integration.config, |
||||
encryptionKey, |
||||
); |
||||
await ncMeta |
||||
.knexConnection(MetaTable.INTEGRATIONS) |
||||
.update({ |
||||
config: decrypted, |
||||
}) |
||||
.where('id', integration.id); |
||||
passed.push(true); |
||||
} catch (e) { |
||||
logger.error(`Failed to decrypt integration ${integration.id}`); |
||||
passed.push(false); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// if all failed, log and exit
|
||||
if (passed.length > 0 && passed.every((v) => !v)) { |
||||
throw new Error( |
||||
`Failed to decrypt any source or integration. Please configure correct encryption key.`, |
||||
); |
||||
} |
||||
|
||||
logger.log(`Decrypted ${passed.length} sources and integrations`); |
||||
} |
@ -0,0 +1,61 @@
|
||||
const path = require('path'); |
||||
const nodeExternals = require('webpack-node-externals'); |
||||
const webpack = require('webpack'); |
||||
const TerserPlugin = require('terser-webpack-plugin'); |
||||
const { resolveTsAliases } = require('./build-utils/resolveTsAliases'); |
||||
|
||||
module.exports = { |
||||
entry: './src/cli.ts', |
||||
module: { |
||||
rules: [ |
||||
{ |
||||
test: /\.tsx?$/, |
||||
exclude: /node_modules/, |
||||
use: { |
||||
loader: 'ts-loader', |
||||
options: { |
||||
transpileOnly: true, |
||||
}, |
||||
}, |
||||
}, |
||||
], |
||||
}, |
||||
|
||||
optimization: { |
||||
minimize: true, |
||||
minimizer: [ |
||||
new TerserPlugin({ |
||||
extractComments: false, |
||||
}), |
||||
], |
||||
nodeEnv: false, |
||||
}, |
||||
externals: [ |
||||
nodeExternals({ |
||||
allowlist: ['nocodb-sdk'], |
||||
}), |
||||
], |
||||
resolve: { |
||||
extensions: ['.tsx', '.ts', '.js', '.json'], |
||||
alias: resolveTsAliases(path.resolve('tsconfig.json')), |
||||
}, |
||||
mode: 'production', |
||||
output: { |
||||
filename: 'cli.js', |
||||
path: path.resolve(__dirname, '..', 'nc-secret-mgr', 'src/nocodb'), |
||||
library: 'libs', |
||||
libraryTarget: 'umd', |
||||
globalObject: "typeof self !== 'undefined' ? self : this", |
||||
}, |
||||
node: { |
||||
__dirname: false, |
||||
}, |
||||
plugins: [ |
||||
new webpack.EnvironmentPlugin(['EE']), |
||||
new webpack.BannerPlugin({ |
||||
banner: 'This is a generated file. Do not edit', |
||||
entryOnly:true |
||||
}), |
||||
], |
||||
target: 'node', |
||||
}; |
@ -0,0 +1,14 @@
|
||||
const fs = require('fs') |
||||
const path = require('path') |
||||
|
||||
const packageJsonPath = path.join(__dirname, '..', 'packages', 'nc-secret-mgr', 'package.json') |
||||
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8')) |
||||
|
||||
if (!process.env.targetVersion) { |
||||
console.error('Error: targetVersion environment variable is not defined.'); |
||||
process.exit(1); |
||||
} |
||||
|
||||
packageJson.version = process.env.targetVersion |
||||
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2)) |
Loading…
Reference in new issue