mirror of https://github.com/nocodb/nocodb
աɨռɢӄաօռɢ
3 years ago
committed by
GitHub
124 changed files with 9522 additions and 651 deletions
@ -0,0 +1,266 @@
|
||||
name: "Release : Executables" |
||||
|
||||
on: |
||||
# Triggered manually |
||||
workflow_dispatch: |
||||
inputs: |
||||
tag: |
||||
description: "Tag name" |
||||
required: true |
||||
# Triggered by release-nocodb.yml |
||||
workflow_call: |
||||
inputs: |
||||
tag: |
||||
description: "Tag name" |
||||
required: true |
||||
type: string |
||||
secrets: |
||||
NC_GITHUB_TOKEN: |
||||
required: true |
||||
jobs: |
||||
build-executables: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
# Get the latest draft release for asset upload url |
||||
- uses: cardinalby/git-get-release-action@v1 |
||||
id: get_release |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
with: |
||||
latest: 1 |
||||
draft: true |
||||
- uses: actions/checkout@v3 |
||||
- name: Cache node modules |
||||
id: cache-npm |
||||
uses: actions/cache@v3 |
||||
env: |
||||
cache-name: cache-node-modules |
||||
with: |
||||
# npm cache files are stored in `~/.npm` on Linux/macOS |
||||
path: ~/.npm |
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-build-${{ env.cache-name }}- |
||||
${{ runner.os }}-build- |
||||
${{ runner.os }}- |
||||
|
||||
- name: Cache pkg modules |
||||
id: cache-pkg |
||||
uses: actions/cache@v3 |
||||
env: |
||||
cache-name: cache-pkg |
||||
with: |
||||
# pkg cache files are stored in `~/.pkg-cache` |
||||
path: ~/.pkg-cache |
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-build-${{ env.cache-name }}- |
||||
${{ runner.os }}-build- |
||||
${{ runner.os }}- |
||||
|
||||
# for building images for all platforms these libraries are required in Linux |
||||
- name: Install QEMU and ldid |
||||
run: | |
||||
sudo apt update |
||||
# Install qemu |
||||
sudo apt install qemu binfmt-support qemu-user-static |
||||
# install ldid |
||||
git clone https://github.com/daeken/ldid.git |
||||
cd ./ldid |
||||
./make.sh |
||||
sudo cp ./ldid /usr/local/bin |
||||
|
||||
- uses: actions/setup-node@v3 |
||||
with: |
||||
node-version: 16 |
||||
|
||||
- name : Install nocodb, other dependencies and build executables |
||||
run: | |
||||
cd ./scripts/pkg-executable |
||||
|
||||
# Install nocodb version based on provided tag name |
||||
npm i -E nocodb@$TAG |
||||
|
||||
# install npm dependendencies |
||||
npm i |
||||
|
||||
# Copy sqlite binaries |
||||
rsync -rvzhP ./binaries/binding/ ./node_modules/sqlite3/lib/binding/ |
||||
|
||||
# clean up code to optimize size |
||||
npx modclean --patterns="default:*" --ignore="nc-lib-gui/**,dayjs/**,express-status-monitor/**,sqlite3/**" --run |
||||
|
||||
# build executables |
||||
npm run build |
||||
|
||||
ls ./dist |
||||
|
||||
# Move macOS executables for signing |
||||
mkdir ./mac-dist |
||||
mv ./dist/Noco-macos-arm64 ./mac-dist/ |
||||
mv ./dist/Noco-macos-x64 ./mac-dist/ |
||||
|
||||
- name: Upload win-arm64 build to asset |
||||
id: upload-release-asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ steps.get_release.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/dist/Noco-win-arm64.exe |
||||
asset_name: Noco-win-arm64 |
||||
asset_content_type: application/octet-stream |
||||
|
||||
- name: Upload win-x64 build to asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ steps.get_release.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/dist/Noco-win-x64.exe |
||||
asset_name: Noco-win-x64 |
||||
asset_content_type: application/octet-stream |
||||
|
||||
- name: Upload linux-arm64 build to asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ steps.get_release.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/dist/Noco-linux-arm64 |
||||
asset_name: Noco-linux-arm64 |
||||
asset_content_type: application/octet-stream |
||||
|
||||
- name: Upload linux-x64 build to asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ steps.get_release.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/dist/Noco-linux-x64 |
||||
asset_name: Noco-linux-x64 |
||||
asset_content_type: application/octet-stream |
||||
|
||||
- uses: actions/upload-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
retention-days: 1 |
||||
outputs: |
||||
upload_url: ${{ steps.get_release.outputs.upload_url }} |
||||
sign-mac-executables: |
||||
runs-on: macos-latest |
||||
needs: build-executables |
||||
steps: |
||||
|
||||
- uses: actions/download-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
|
||||
- name: Sign macOS executables |
||||
run: | |
||||
/usr/bin/codesign --force -s - ./scripts/pkg-executable/mac-dist/Noco-macos-arm64 -v |
||||
/usr/bin/codesign --force -s - ./scripts/pkg-executable/mac-dist/Noco-macos-x64 -v |
||||
|
||||
- uses: actions/upload-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
retention-days: 1 |
||||
|
||||
|
||||
publish-mac-executables-and-homebrew: |
||||
needs: [sign-mac-executables,build-executables] |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/download-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: scripts/pkg-executable/mac-dist |
||||
|
||||
|
||||
|
||||
- uses: actions/checkout@v3 |
||||
with: |
||||
path: 'homebrew-nocodb' |
||||
token: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
repository: 'nocodb/homebrew-nocodb' |
||||
fetch-depth: 0 |
||||
|
||||
- name: Compress files and calculate checksum |
||||
run: | |
||||
cd ./scripts/pkg-executable |
||||
cp ./mac-dist/Noco-macos-x64 ./mac-dist/nocodb |
||||
tar -czf ./mac-dist/nocodb.tar.gz ./mac-dist/nocodb |
||||
rm ./mac-dist/nocodb |
||||
echo "::set-output name=CHECKSUM::$(shasum -a 256 ./mac-dist/nocodb.tar.gz | awk '{print $1}')" |
||||
id: compress |
||||
|
||||
|
||||
- name: Upload macos-x64 build to asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ needs.build-executables.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/mac-dist/Noco-macos-x64 |
||||
asset_name: Noco-macos-x64 |
||||
asset_content_type: application/octet-stream |
||||
|
||||
|
||||
|
||||
- name: Upload macos-arm64 build to asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ needs.build-executables.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/mac-dist/Noco-macos-arm64 |
||||
asset_name: Noco-macos-arm64 |
||||
asset_content_type: application/octet-stream |
||||
|
||||
|
||||
|
||||
- name: Upload macos compressed build(for homebrew) to asset |
||||
uses: actions/upload-release-asset@v1 |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
with: |
||||
upload_url: ${{ needs.build-executables.outputs.upload_url }} |
||||
asset_path: ./scripts/pkg-executable/mac-dist/nocodb.tar.gz |
||||
asset_name: nocodb.tar.gz |
||||
asset_content_type: application/octet-stream |
||||
|
||||
|
||||
- name: Generate Homebrew Formula class and push |
||||
run: | |
||||
FORMULA_CLASS_STR=$(cat << EOF |
||||
class Nocodb < Formula |
||||
desc "Get Human Readable file size information. - CLI" |
||||
homepage "https://github.com/nocodb/nocodb" |
||||
url "https://github.com/nocodb/nocodb/releases/download/${{ github.event.inputs.tag || inputs.tag }}/nocodb.tar.gz" |
||||
sha256 "${{ steps.compress.outputs.CHECKSUM }}" |
||||
license "MIT" |
||||
version "${{ github.event.inputs.tag || inputs.tag }}" |
||||
|
||||
def install |
||||
bin.install "nocodb" |
||||
end |
||||
end |
||||
EOF |
||||
) |
||||
|
||||
cd ./homebrew-nocodb |
||||
|
||||
printf "$FORMULA_CLASS_STR" > ./Formula/nocodb.rb |
||||
|
||||
git config user.name 'github-actions[bot]' |
||||
git config user.email 'github-actions[bot]@users.noreply.github.com' |
||||
|
||||
git commit ./Formula/nocodb.rb -m "Automatic publish" |
||||
git push |
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,158 @@
|
||||
name: "Release : Timely Executables" |
||||
|
||||
on: |
||||
# Triggered manually |
||||
workflow_dispatch: |
||||
inputs: |
||||
tag: |
||||
description: "Timely version" |
||||
required: true |
||||
# Triggered by release-nightly-dev.yml / release-pr.yml |
||||
workflow_call: |
||||
inputs: |
||||
tag: |
||||
description: "Timely version" |
||||
required: true |
||||
type: string |
||||
secrets: |
||||
NC_GITHUB_TOKEN: |
||||
required: true |
||||
jobs: |
||||
build-executables: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v3 |
||||
with: |
||||
token: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
repository: 'nocodb/nocodb-timely' |
||||
- name: Cache node modules |
||||
id: cache-npm |
||||
uses: actions/cache@v3 |
||||
env: |
||||
cache-name: cache-node-modules |
||||
with: |
||||
# npm cache files are stored in `~/.npm` on Linux/macOS |
||||
path: ~/.npm |
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-build-${{ env.cache-name }}- |
||||
${{ runner.os }}-build- |
||||
${{ runner.os }}- |
||||
- name: Cache pkg modules |
||||
id: cache-pkg |
||||
uses: actions/cache@v3 |
||||
env: |
||||
cache-name: cache-pkg |
||||
with: |
||||
# pkg cache files are stored in `~/.pkg-cache` |
||||
path: ~/.pkg-cache |
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} |
||||
restore-keys: | |
||||
${{ runner.os }}-build-${{ env.cache-name }}- |
||||
${{ runner.os }}-build- |
||||
${{ runner.os }}- |
||||
- name: Install QEMU and ldid |
||||
run: | |
||||
sudo apt update |
||||
# Install qemu |
||||
sudo apt install qemu binfmt-support qemu-user-static |
||||
# install ldid |
||||
git clone https://github.com/daeken/ldid.git |
||||
cd ./ldid |
||||
./make.sh |
||||
sudo cp ./ldid /usr/local/bin |
||||
|
||||
- name: Update nocodb-timely |
||||
env: |
||||
TAG: ${{ github.event.inputs.tag || inputs.tag }} |
||||
run: | |
||||
npm i -E nocodb-daily@$TAG |
||||
|
||||
git config user.name 'github-actions[bot]' |
||||
git config user.email 'github-actions[bot]@users.noreply.github.com' |
||||
|
||||
git commit package.json -m "Update to $TAG" |
||||
git tag $TAG |
||||
git push --tags |
||||
|
||||
|
||||
- uses: actions/setup-node@v3 |
||||
with: |
||||
node-version: 16 |
||||
|
||||
- name : Install dependencies and build executables |
||||
run: | |
||||
# install npm dependendencies |
||||
npm i |
||||
|
||||
# Copy sqlite binaries |
||||
rsync -rvzhP ./binaries/binding/ ./node_modules/sqlite3/lib/binding/ |
||||
|
||||
# clean up code to optimize size |
||||
npx modclean --patterns="default:*" --ignore="nc-lib-gui-daily/**,dayjs/**,express-status-monitor/**,sqlite3/**" --run |
||||
|
||||
# build executables |
||||
npm run build |
||||
|
||||
mkdir ./mac-dist |
||||
mv ./dist/Noco-macos-arm64 ./mac-dist/ |
||||
mv ./dist/Noco-macos-x64 ./mac-dist/ |
||||
|
||||
- name: Upload executables(except mac executables) to release |
||||
uses: svenstaro/upload-release-action@v2 |
||||
with: |
||||
repo_token: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
file: dist/** |
||||
tag: ${{ github.event.inputs.tag || inputs.tag }} |
||||
overwrite: true |
||||
file_glob: true |
||||
repo_name: nocodb/nocodb-timely |
||||
|
||||
- uses: actions/upload-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: mac-dist |
||||
retention-days: 1 |
||||
|
||||
sign-mac-executables: |
||||
runs-on: macos-latest |
||||
needs: build-executables |
||||
steps: |
||||
|
||||
- uses: actions/download-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: mac-dist |
||||
|
||||
- name: Sign macOS executables |
||||
run: | |
||||
/usr/bin/codesign --force -s - ./mac-dist/Noco-macos-arm64 -v |
||||
/usr/bin/codesign --force -s - ./mac-dist/Noco-macos-x64 -v |
||||
|
||||
- uses: actions/upload-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: mac-dist |
||||
retention-days: 1 |
||||
|
||||
|
||||
publish-mac-executables: |
||||
needs: sign-mac-executables |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/download-artifact@master |
||||
with: |
||||
name: ${{ github.event.inputs.tag || inputs.tag }} |
||||
path: mac-dist |
||||
|
||||
- name: Upload mac executables to release |
||||
uses: svenstaro/upload-release-action@v2 |
||||
with: |
||||
repo_token: ${{ secrets.NC_GITHUB_TOKEN }} |
||||
file: mac-dist/** |
||||
tag: ${{ github.event.inputs.tag || inputs.tag }} |
||||
overwrite: true |
||||
file_glob: true |
||||
repo_name: nocodb/nocodb-timely |
||||
|
||||
|
@ -0,0 +1,484 @@
|
||||
<template> |
||||
<div :class="{'pt-10':!hideLabel}"> |
||||
<v-dialog v-model="dropOrUpload" max-width="600"> |
||||
<v-card max-width="600"> |
||||
<v-tabs height="30"> |
||||
<v-tab> |
||||
<v-icon small class="mr-1"> |
||||
mdi-file-upload-outline |
||||
</v-icon> |
||||
<span class="caption text-capitalize">Upload</span> |
||||
</v-tab> |
||||
<!-- <v-tab>--> |
||||
<!-- <v-icon small class="mr-1"> |
||||
mdi-link-variant |
||||
</v-icon> |
||||
<span class="caption text-capitalize">URL</span> |
||||
</v-tab>--> |
||||
<v-tab> |
||||
<v-icon small class="mr-1"> |
||||
mdi-link-variant |
||||
</v-icon> |
||||
<span class="caption text-capitalize">String</span> |
||||
</v-tab> |
||||
|
||||
<v-tab-item> |
||||
<div class="nc-json-import-tab-item "> |
||||
<div |
||||
class="nc-droppable d-flex align-center justify-center flex-column" |
||||
:style="{ |
||||
background : dragOver ? '#7772' : '' |
||||
}" |
||||
@click="$refs.file.click()" |
||||
@drop.prevent="dropHandler" |
||||
@dragover.prevent="dragOver = true" |
||||
@dragenter.prevent="dragOver = true" |
||||
@dragexit="dragOver = false" |
||||
@dragleave="dragOver = false" |
||||
@dragend="dragOver = false" |
||||
> |
||||
<x-icon :color="['primary','grey']" size="50"> |
||||
mdi-file-plus-outline |
||||
</x-icon> |
||||
<p class="title mb-1 mt-2"> |
||||
<!-- Select File to Upload--> |
||||
{{ $t('msg.info.upload') }} |
||||
</p> |
||||
<p class="grey--text mb-1"> |
||||
<!-- or drag and drop file--> |
||||
{{ $t('msg.info.upload_sub') }} |
||||
</p> |
||||
|
||||
<p v-if="quickImportType == 'excel'" class="caption grey--text"> |
||||
<!-- Supported: .xls, .xlsx, .xlsm, .ods, .ots --> |
||||
{{ $t('msg.info.excelSupport') }} |
||||
</p> |
||||
</div> |
||||
</div> |
||||
</v-tab-item> |
||||
<!-- <v-tab-item> |
||||
<div class="nc-json-import-tab-item align-center"> |
||||
<div class="pa-4 d-100 h-100"> |
||||
<v-form ref="form" v-model="valid"> |
||||
<div class="d-flex"> |
||||
<!– todo: i18n label–> |
||||
<v-text-field |
||||
v-model="url" |
||||
hide-details="auto" |
||||
type="url" |
||||
label="Enter JSON file url" |
||||
class="caption" |
||||
outlined |
||||
dense |
||||
:rules=" |
||||
[ |
||||
v => !!v || $t('general.required'), |
||||
v => !(/(10)(\.([2]([0-5][0-5]|[01234][6-9])|[1][0-9][0-9]|[1-9][0-9]|[0-9])){3}|(172)\.(1[6-9]|2[0-9]|3[0-1])(\.(2[0-4][0-9]|25[0-5]|[1][0-9][0-9]|[1-9][0-9]|[0-9])){2}|(192)\.(168)(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])){2}|(0.0.0.0)|localhost?/g).test(v) || errorMessages.ipBlockList |
||||
]" |
||||
/> |
||||
<v-btn v-t="['c:project:create:json:load-url']" class="ml-3" color="primary" @click="loadUrl"> |
||||
<!–Load–> |
||||
{{ $t('general.load') }} |
||||
</v-btn> |
||||
</div> |
||||
</v-form> |
||||
</div> |
||||
</div> |
||||
</v-tab-item>--> |
||||
<v-tab-item> |
||||
<div class="nc-json-import-tab-item align-center"> |
||||
<div class="pa-4 d-100 h-100"> |
||||
<v-form ref="form" v-model="valid"> |
||||
<div class="nc-json-editor-wrapper"> |
||||
<v-btn small class="nc-json-format-btn" @click="formatJson"> |
||||
Format |
||||
</v-btn> |
||||
|
||||
<!--label="Enter excel file url"--> |
||||
<monaco-json-editor |
||||
ref="editor" |
||||
v-model="jsonString" |
||||
style="height:320px" |
||||
/> |
||||
<div class="text-center mt-4"> |
||||
<v-btn v-t="['c:project:create:excel:load-url']" class="ml-3" color="primary" @click="loadJsonString"> |
||||
<!--Load--> |
||||
{{ $t('general.load') }} |
||||
</v-btn> |
||||
</div> |
||||
</div> |
||||
</v-form> |
||||
</div> |
||||
</div> |
||||
</v-tab-item> |
||||
</v-tabs> |
||||
|
||||
<div class="px-4 pb-2"> |
||||
<div class="d-flex"> |
||||
<v-spacer /> |
||||
<span class="caption pointer grey--text" @click="showMore = !showMore"> |
||||
{{ showMore ? $t('general.hideAll') : $t('general.showMore') }} |
||||
<v-icon small color="grey lighten-1">mdi-menu-{{ showMore ? 'up' : 'down' }}</v-icon> |
||||
</span> |
||||
</div> |
||||
<div class="mb-2 pt-2 nc-json-import-options" :style="{ maxHeight: showMore ? '200px' : '0'}"> |
||||
<p /> |
||||
<!--hint="# of rows to parse to infer data type"--> |
||||
<v-text-field |
||||
v-model="parserConfig.maxRowsToParse" |
||||
style="max-width: 250px" |
||||
class="caption mx-auto" |
||||
dense |
||||
persistent-hint |
||||
:hint="$t('msg.info.footMsg')" |
||||
outlined |
||||
type="number" |
||||
/> |
||||
|
||||
<v-checkbox |
||||
v-model="parserConfig.normalizeNested" |
||||
style="width: 250px" |
||||
class="mx-auto mb-2" |
||||
dense |
||||
hide-details |
||||
> |
||||
<template #label> |
||||
<span class="caption">Flatten nested</span> |
||||
<v-tooltip bottom position-y=""> |
||||
<template #activator="{ on }"> |
||||
<v-icon small class="ml-1" v-on="on"> |
||||
mdi-information-outline |
||||
</v-icon> |
||||
</template> |
||||
<div class="caption" style="width: 260px"> |
||||
If flatten nested option is set it will flatten nested object as root level property. In normal case nested object will treat as JSON column. |
||||
<br> |
||||
<br> |
||||
For example the following input: <code class="caption font-weight-bold">{ |
||||
"prop1": { |
||||
"prop2": "value" |
||||
}, |
||||
"prop3": "value", |
||||
"prop4": 1 |
||||
}</code> will treat as: |
||||
<code class="caption font-weight-bold">{ |
||||
"prop1_prop2": "value", |
||||
"prop3": "value", |
||||
"prop4": 1 |
||||
}</code> |
||||
</div> |
||||
</v-tooltip> |
||||
</template> |
||||
</v-checkbox> |
||||
<v-checkbox |
||||
v-model="parserConfig.importData" |
||||
style="width: 250px" |
||||
class="mx-auto mb-2" |
||||
dense |
||||
hide-details |
||||
> |
||||
<template #label> |
||||
<span class="caption">Import data</span> |
||||
</template> |
||||
</v-checkbox> |
||||
</div> |
||||
</div> |
||||
</v-card> |
||||
</v-dialog> |
||||
|
||||
<v-tooltip bottom> |
||||
<template #activator="{on}"> |
||||
<input |
||||
ref="file" |
||||
class="nc-json-import-input" |
||||
type="file" |
||||
style="display: none" |
||||
accept=".json" |
||||
@change="_change($event)" |
||||
> |
||||
<v-btn |
||||
|
||||
v-if="!hideLabel" |
||||
small |
||||
outlined |
||||
v-on="on" |
||||
@click="$refs.file.click()" |
||||
> |
||||
<v-icon small class="mr-1"> |
||||
mdi-file-excel-outline |
||||
</v-icon> |
||||
<!--Import--> |
||||
{{ $t('activity.import') }} |
||||
</v-btn> |
||||
</template> |
||||
<span class="caption">Create template from JSON</span> |
||||
</v-tooltip> |
||||
|
||||
<v-dialog v-if="templateData" v-model="templateEditorModal" max-width="1000"> |
||||
<v-card class="pa-6" min-width="500"> |
||||
<template-editor :project-template.sync="templateData" json-import :quick-import-type="quickImportType"> |
||||
<template #toolbar="{valid}"> |
||||
<h3 class="mt-2 grey--text"> |
||||
<span> |
||||
JSON Import |
||||
</span> |
||||
</h3> |
||||
<v-spacer /> |
||||
<v-spacer /> |
||||
<create-project-from-template-btn |
||||
:template-data="templateData" |
||||
:import-data="importData" |
||||
:import-to-project="importToProject" |
||||
json-import |
||||
:valid="valid" |
||||
create-gql-text="Import as GQL Project" |
||||
create-rest-text="Import as REST Project" |
||||
@closeModal="$emit('closeModal'),templateEditorModal = false" |
||||
> |
||||
<!--Import Excel--> |
||||
<span v-if="quickImportType === 'excel'"> |
||||
{{ $t('activity.importExcel') }} |
||||
</span> |
||||
<!--Import CSV--> |
||||
<span v-if="quickImportType === 'csv'"> |
||||
{{ $t('activity.importCSV') }} |
||||
</span> |
||||
</create-project-from-template-btn> |
||||
</template> |
||||
</template-editor> |
||||
</v-card> |
||||
</v-dialog> |
||||
</div> |
||||
</template> |
||||
|
||||
<script> |
||||
|
||||
import TemplateEditor from '~/components/templates/Editor' |
||||
import CreateProjectFromTemplateBtn from '~/components/templates/CreateProjectFromTemplateBtn' |
||||
import MonacoJsonEditor from '~/components/monaco/MonacoJsonEditor' |
||||
import JSONTemplateAdapter from '~/components/import/templateParsers/JSONTemplateAdapter' |
||||
import JSONUrlTemplateAdapter from '~/components/import/templateParsers/JSONUrlTemplateAdapter' |
||||
|
||||
export default { |
||||
name: 'JsonImport', |
||||
components: { MonacoJsonEditor, CreateProjectFromTemplateBtn, TemplateEditor }, |
||||
props: { |
||||
hideLabel: Boolean, |
||||
value: Boolean, |
||||
importToProject: Boolean, |
||||
quickImportType: String |
||||
}, |
||||
data() { |
||||
return { |
||||
templateEditorModal: false, |
||||
valid: null, |
||||
templateData: null, |
||||
importData: null, |
||||
dragOver: false, |
||||
url: '', |
||||
showMore: false, |
||||
parserConfig: { |
||||
maxRowsToParse: 500, |
||||
normalizeNested: true, |
||||
importData: true |
||||
}, |
||||
filename: '', |
||||
jsonString: '', |
||||
errorMessages: { |
||||
ipBlockList: 'IP Not allowed!', |
||||
importJSON: 'Target file is not an accepted file type. The accepted file type is .json!' |
||||
} |
||||
} |
||||
}, |
||||
computed: { |
||||
dropOrUpload: { |
||||
set(v) { |
||||
this.$emit('input', v) |
||||
}, |
||||
get() { |
||||
return this.value |
||||
} |
||||
}, |
||||
tables() { |
||||
return this.$store.state.project.tables || [] |
||||
} |
||||
}, |
||||
mounted() { |
||||
if (this.$route && this.$route.query && this.$route.query.excelUrl) { |
||||
this.url = this.$route.query.excelUrl |
||||
this.loadUrl() |
||||
} |
||||
}, |
||||
methods: { |
||||
formatJson() { |
||||
console.log(this.$refs.editor) |
||||
this.$refs.editor.format() |
||||
}, |
||||
|
||||
selectFile() { |
||||
this.$refs.file.files = null |
||||
this.$refs.file.click() |
||||
}, |
||||
|
||||
_change(event) { |
||||
const files = event.target.files |
||||
if (files && files[0]) { |
||||
this._file(files[0]) |
||||
event.target.value = '' |
||||
} |
||||
}, |
||||
async _file(file) { |
||||
this.templateData = null |
||||
this.importData = null |
||||
this.$store.commit('loader/MutMessage', 'Loading excel file') |
||||
let i = 0 |
||||
const int = setInterval(() => { |
||||
this.$store.commit('loader/MutMessage', `Loading excel file${'.'.repeat(++i % 4)}`) |
||||
}, 1000) |
||||
this.dropOrUpload = false |
||||
const reader = new FileReader() |
||||
this.filename = file.name |
||||
|
||||
reader.onload = async(e) => { |
||||
const ab = e.target.result |
||||
await this.parseAndExtractData('file', ab, file.name) |
||||
this.$store.commit('loader/MutMessage', null) |
||||
|
||||
clearInterval(int) |
||||
} |
||||
|
||||
const handleEvent = (event) => { |
||||
this.$store.commit('loader/MutMessage', `${event.type}: ${event.loaded} bytes transferred`) |
||||
} |
||||
|
||||
reader.addEventListener('progress', handleEvent) |
||||
reader.onerror = (e) => { |
||||
console.log('error', e) |
||||
this.$store.commit('loader/MutClear') |
||||
} |
||||
reader.readAsText(file) |
||||
}, |
||||
|
||||
async parseAndExtractData(type, val, name) { |
||||
try { |
||||
let templateGenerator |
||||
this.templateData = null |
||||
this.importData = null |
||||
switch (type) { |
||||
case 'file': |
||||
templateGenerator = new JSONTemplateAdapter(name, val, this.parserConfig) |
||||
break |
||||
case 'url': |
||||
templateGenerator = new JSONUrlTemplateAdapter(val, this.$store, this.parserConfig, this.$api) |
||||
break |
||||
case 'string': |
||||
templateGenerator = new JSONTemplateAdapter(name, val, this.parserConfig) |
||||
break |
||||
} |
||||
await templateGenerator.init() |
||||
templateGenerator.parse() |
||||
this.templateData = templateGenerator.getTemplate() |
||||
|
||||
this.templateData.tables[0].table_name = this.populateUniqueTableName() |
||||
|
||||
this.importData = templateGenerator.getData() |
||||
this.templateEditorModal = true |
||||
} catch (e) { |
||||
console.log(e) |
||||
this.$toast |
||||
.error(await this._extractSdkResponseErrorMsg(e)) |
||||
.goAway(3000) |
||||
} |
||||
}, |
||||
|
||||
dropHandler(ev) { |
||||
this.dragOver = false |
||||
let file |
||||
if (ev.dataTransfer.items) { |
||||
// Use DataTransferItemList interface to access the file(s) |
||||
if (ev.dataTransfer.items.length && ev.dataTransfer.items[0].kind === 'file') { |
||||
file = ev.dataTransfer.items[0].getAsFile() |
||||
} |
||||
} else if (ev.dataTransfer.files.length) { |
||||
file = ev.dataTransfer.files[0] |
||||
} |
||||
|
||||
if (!file) { |
||||
return |
||||
} |
||||
|
||||
if (!/.*\.json/.test(file.name)) { |
||||
return this.$toast.error(this.errorMessages.importJSON).goAway(3000) |
||||
} |
||||
|
||||
this._file(file) |
||||
}, |
||||
dragOverHandler(ev) { |
||||
// Prevent default behavior (Prevent file from being opened) |
||||
ev.preventDefault() |
||||
}, |
||||
populateUniqueTableName() { |
||||
let c = 1 |
||||
while (this.tables.some(t => t.title === `Sheet${c}`)) { c++ } |
||||
return `Sheet${c}` |
||||
}, |
||||
async loadUrl() { |
||||
if ((this.$refs.form && !this.$refs.form.validate()) || !this.url) { |
||||
return |
||||
} |
||||
|
||||
this.$store.commit('loader/MutMessage', 'Loading json file from url') |
||||
|
||||
let i = 0 |
||||
const int = setInterval(() => { |
||||
this.$store.commit('loader/MutMessage', `Loading json file${'.'.repeat(++i % 4)}`) |
||||
}, 1000) |
||||
|
||||
this.dropOrUpload = false |
||||
|
||||
await this.parseAndExtractData('url', this.url, '') |
||||
clearInterval(int) |
||||
this.$store.commit('loader/MutClear') |
||||
}, |
||||
|
||||
async loadJsonString() { |
||||
await this.parseAndExtractData('string', this.jsonString) |
||||
this.$store.commit('loader/MutClear') |
||||
} |
||||
|
||||
} |
||||
} |
||||
</script> |
||||
|
||||
<style scoped> |
||||
.nc-droppable { |
||||
width: 100%; |
||||
min-height: 200px; |
||||
border-radius: 4px; |
||||
border: 2px dashed #ddd; |
||||
} |
||||
|
||||
.nc-json-import-tab-item { |
||||
min-height: 400px; |
||||
padding: 20px; |
||||
display: flex; |
||||
align-items: stretch; |
||||
width: 100%; |
||||
} |
||||
|
||||
.nc-json-import-options { |
||||
transition: .4s max-height; |
||||
overflow: hidden; |
||||
} |
||||
|
||||
.nc-json-editor-wrapper{ |
||||
position: relative; |
||||
} |
||||
|
||||
.nc-json-format-btn{ |
||||
position:absolute; |
||||
right:4px; |
||||
top:4px; |
||||
z-index:9; |
||||
} |
||||
</style> |
@ -0,0 +1,150 @@
|
||||
import { TemplateGenerator, UITypes } from 'nocodb-sdk' |
||||
import { |
||||
extractMultiOrSingleSelectProps, |
||||
getCheckboxValue, |
||||
isCheckboxType, isDecimalType, isEmailType, |
||||
isMultiLineTextType, isUrlType |
||||
} from '~/components/import/templateParsers/parserHelpers' |
||||
|
||||
const jsonTypeToUidt = { |
||||
number: UITypes.Number, |
||||
string: UITypes.SingleLineText, |
||||
date: UITypes.DateTime, |
||||
boolean: UITypes.Checkbox, |
||||
object: UITypes.JSON |
||||
} |
||||
|
||||
const extractNestedData = (obj, path) => path.reduce((val, key) => val && val[key], obj) |
||||
|
||||
export default class JSONTemplateAdapter extends TemplateGenerator { |
||||
constructor(name = 'test', data, parserConfig = {}) { |
||||
super() |
||||
this.config = { |
||||
maxRowsToParse: 500, |
||||
...parserConfig |
||||
} |
||||
this.name = name |
||||
this._jsonData = typeof data === 'string' ? JSON.parse(data) : data |
||||
this.project = { |
||||
title: this.name, |
||||
tables: [] |
||||
} |
||||
this.data = {} |
||||
} |
||||
|
||||
async init() { |
||||
} |
||||
|
||||
parseData() { |
||||
this.columns = this.csv.meta.fields |
||||
this.data = this.csv.data |
||||
} |
||||
|
||||
getColumns() { |
||||
return this.columns |
||||
} |
||||
|
||||
getData() { |
||||
return this.data |
||||
} |
||||
|
||||
get jsonData() { |
||||
return Array.isArray(this._jsonData) ? this._jsonData : [this._jsonData] |
||||
} |
||||
|
||||
parse() { |
||||
const jsonData = this.jsonData |
||||
const tn = 'table' |
||||
const table = { table_name: tn, ref_table_name: tn, columns: [] } |
||||
|
||||
this.data[tn] = [] |
||||
|
||||
for (const col of Object.keys(jsonData[0])) { |
||||
const columns = this._parseColumn([col], jsonData) |
||||
table.columns.push(...columns) |
||||
} |
||||
|
||||
if (this.config.importData) { this._parseTableData(table) } |
||||
|
||||
this.project.tables.push(table) |
||||
} |
||||
|
||||
getTemplate() { |
||||
return this.project |
||||
} |
||||
|
||||
_parseColumn(path = [], jsonData = this.jsonData, firstRowVal = path.reduce((val, k) => val && val[k], this.jsonData[0])) { |
||||
const columns = [] |
||||
// parse nested
|
||||
if (firstRowVal && typeof firstRowVal === 'object' && !Array.isArray(firstRowVal) && this.config.normalizeNested) { |
||||
for (const key of Object.keys(firstRowVal)) { |
||||
const normalizedNestedColumns = this._parseColumn([...path, key], this.jsonData, firstRowVal[key]) |
||||
columns.push(...normalizedNestedColumns) |
||||
} |
||||
} else { |
||||
const cn = path.join('_').replace(/\W/g, '_').trim() |
||||
|
||||
const column = { |
||||
column_name: cn, |
||||
ref_column_name: cn, |
||||
path |
||||
} |
||||
|
||||
column.uidt = jsonTypeToUidt[typeof firstRowVal] || UITypes.SingleLineText |
||||
|
||||
const colData = jsonData.map(r => extractNestedData(r, path)) |
||||
Object.assign(column, this._getColumnUIDTAndMetas(colData, column.uidt)) |
||||
columns.push(column) |
||||
} |
||||
|
||||
return columns |
||||
} |
||||
|
||||
_getColumnUIDTAndMetas(colData, defaultType) { |
||||
const colProps = { uidt: defaultType } |
||||
// todo: optimize
|
||||
if (colProps.uidt === UITypes.SingleLineText) { |
||||
// check for long text
|
||||
if (isMultiLineTextType(colData)) { |
||||
colProps.uidt = UITypes.LongText |
||||
} if (isEmailType(colData)) { |
||||
colProps.uidt = UITypes.Email |
||||
} if (isUrlType(colData)) { |
||||
colProps.uidt = UITypes.URL |
||||
} else { |
||||
const checkboxType = isCheckboxType(colData) |
||||
if (checkboxType.length === 1) { |
||||
colProps.uidt = UITypes.Checkbox |
||||
} else { |
||||
Object.assign(colProps, extractMultiOrSingleSelectProps(colData)) |
||||
} |
||||
} |
||||
} else if (colProps.uidt === UITypes.Number) { |
||||
if (isDecimalType(colData)) { |
||||
colProps.uidt = UITypes.Decimal |
||||
} |
||||
} |
||||
return colProps |
||||
} |
||||
|
||||
_parseTableData(tableMeta) { |
||||
for (const row of this.jsonData) { |
||||
const rowData = {} |
||||
for (let i = 0; i < tableMeta.columns.length; i++) { |
||||
const value = extractNestedData(row, tableMeta.columns[i].path || []) |
||||
if (tableMeta.columns[i].uidt === UITypes.Checkbox) { |
||||
rowData[tableMeta.columns[i].ref_column_name] = getCheckboxValue(value) |
||||
} else if (tableMeta.columns[i].uidt === UITypes.SingleSelect || tableMeta.columns[i].uidt === UITypes.MultiSelect) { |
||||
rowData[tableMeta.columns[i].ref_column_name] = (value || '').toString().trim() || null |
||||
} else if (tableMeta.columns[i].uidt === UITypes.JSON) { |
||||
rowData[tableMeta.columns[i].ref_column_name] = JSON.stringify(value) |
||||
} else { |
||||
// toto: do parsing if necessary based on type
|
||||
rowData[tableMeta.columns[i].column_name] = value |
||||
} |
||||
} |
||||
this.data[tableMeta.ref_table_name].push(rowData) |
||||
// rowIndex++
|
||||
} |
||||
} |
||||
} |
@ -0,0 +1,21 @@
|
||||
import JSONTemplateAdapter from '~/components/import/templateParsers/JSONTemplateAdapter' |
||||
|
||||
export default class JSONUrlTemplateAdapter extends JSONTemplateAdapter { |
||||
constructor(url, $store, parserConfig, $api) { |
||||
const name = url.split('/').pop() |
||||
super(name, null, parserConfig) |
||||
this.url = url |
||||
this.$api = $api |
||||
this.$store = $store |
||||
} |
||||
|
||||
async init() { |
||||
const data = await this.$api.utils.axiosRequestMake({ |
||||
apiMeta: { |
||||
url: this.url |
||||
} |
||||
}) |
||||
this._jsonData = data |
||||
await super.init() |
||||
} |
||||
} |
@ -0,0 +1,69 @@
|
||||
<template> |
||||
<input |
||||
v-model="localValue" |
||||
:placeholder="durationPlaceholder" |
||||
readonly |
||||
> |
||||
</template> |
||||
|
||||
<script> |
||||
import { durationOptions, convertMS2Duration } from '~/helpers/durationHelper' |
||||
|
||||
export default { |
||||
name: 'DurationCell', |
||||
props: { |
||||
column: Object, |
||||
value: [String, Number] |
||||
}, |
||||
data: () => ({ |
||||
showWarningMessage: false, |
||||
localValue: null |
||||
}), |
||||
computed: { |
||||
durationPlaceholder() { |
||||
return durationOptions[this.column?.meta?.duration || 0].title |
||||
} |
||||
}, |
||||
watch: { |
||||
'column.meta.duration'(newValue, oldValue) { |
||||
if (oldValue !== newValue) { |
||||
this.localValue = convertMS2Duration(this.value, newValue) |
||||
} |
||||
}, |
||||
value(val, oldVal) { |
||||
this.localValue = convertMS2Duration(val !== oldVal && (!val && val !== 0) ? oldVal : val, this.column?.meta?.duration || 0) |
||||
} |
||||
}, |
||||
created() { |
||||
this.localValue = convertMS2Duration(this.value, this.column?.meta?.duration || 0) |
||||
} |
||||
} |
||||
</script> |
||||
|
||||
<style scoped> |
||||
|
||||
</style> |
||||
|
||||
<!-- |
||||
/** |
||||
* @copyright Copyright (c) 2021, Xgene Cloud Ltd |
||||
* |
||||
* @author Wing-Kam Wong <wingkwong.code@gmail.com> |
||||
* |
||||
* @license GNU AGPL version 3 or any later version |
||||
* |
||||
* This program is free software: you can redistribute it and/or modify |
||||
* it under the terms of the GNU Affero General Public License as |
||||
* published by the Free Software Foundation, either version 3 of the |
||||
* License, or (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU Affero General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Affero General Public License |
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>. |
||||
* |
||||
*/ |
||||
--> |
@ -0,0 +1,72 @@
|
||||
<template> |
||||
<v-row class="duration-wrapper"> |
||||
<div class="caption"> |
||||
A duration of time in minutes or seconds (e.g. 1:23). |
||||
</div> |
||||
<!-- TODO: i18n --> |
||||
<v-autocomplete |
||||
v-model="colMeta.duration" |
||||
hide-details |
||||
class="caption ui-type nc-ui-dt-dropdown" |
||||
label="Duration Format" |
||||
dense |
||||
outlined |
||||
item-value="id" |
||||
item-text="title" |
||||
:items="durationOptionList" |
||||
> |
||||
<template #selection="{ item }"> |
||||
<div> |
||||
<span class="caption grey--text text--darken-4"> |
||||
{{ item.title }} |
||||
</span> |
||||
</div> |
||||
</template> |
||||
<template #item="{ item }"> |
||||
<div class="caption"> |
||||
{{ item.title }} |
||||
</div> |
||||
</template> |
||||
</v-autocomplete> |
||||
</v-row> |
||||
</template> |
||||
|
||||
<script> |
||||
import { durationOptions } from '~/helpers/durationHelper' |
||||
|
||||
export default { |
||||
name: 'DuractionOptions', |
||||
props: ['column', 'meta', 'value'], |
||||
data: () => ({ |
||||
durationOptionList: durationOptions.map(o => ({ |
||||
...o, |
||||
// h:mm:ss (e.g. 3:45, 1:23:40) |
||||
title: `${o.title} ${o.example}` |
||||
})), |
||||
colMeta: { |
||||
duration: 0 |
||||
} |
||||
}), |
||||
watch: { |
||||
value() { |
||||
this.colMeta = this.value || {} |
||||
}, |
||||
colMeta(v) { |
||||
this.$emit('input', v) |
||||
} |
||||
}, |
||||
created() { |
||||
this.colMeta = this.value ? { ...this.value } : { ...this.colMeta } |
||||
} |
||||
} |
||||
</script> |
||||
|
||||
<style scoped> |
||||
.duration-wrapper { |
||||
margin: 0; |
||||
} |
||||
|
||||
.duration-wrapper .caption:first-child { |
||||
margin: -10px 0px 10px 5px; |
||||
} |
||||
</style> |
@ -0,0 +1,139 @@
|
||||
<template> |
||||
<div class="duration-cell-wrapper"> |
||||
<input |
||||
ref="durationInput" |
||||
v-model="localState" |
||||
:placeholder="durationPlaceholder" |
||||
@blur="onBlur" |
||||
@keypress="checkDurationFormat($event)" |
||||
@keydown.enter="isEdited && $emit('input', durationInMS)" |
||||
v-on="parentListeners" |
||||
> |
||||
<div v-if="showWarningMessage == true" class="duration-warning"> |
||||
<!-- TODO: i18n --> |
||||
Please enter a number |
||||
</div> |
||||
</div> |
||||
</template> |
||||
|
||||
<script> |
||||
import { durationOptions, convertMS2Duration, convertDurationToSeconds } from '~/helpers/durationHelper' |
||||
|
||||
export default { |
||||
name: 'DurationCell', |
||||
props: { |
||||
column: Object, |
||||
value: [Number, String], |
||||
readOnly: Boolean |
||||
}, |
||||
data: () => ({ |
||||
// flag to determine to show warning message or not |
||||
showWarningMessage: false, |
||||
// duration in milliseconds |
||||
durationInMS: null, |
||||
// check if the cell is edited or not |
||||
isEdited: false |
||||
}), |
||||
computed: { |
||||
localState: { |
||||
get() { |
||||
return convertMS2Duration(this.value, this.durationType) |
||||
}, |
||||
set(val) { |
||||
this.isEdited = true |
||||
const res = convertDurationToSeconds(val, this.durationType) |
||||
if (res._isValid) { |
||||
this.durationInMS = res._sec |
||||
} |
||||
} |
||||
}, |
||||
durationPlaceholder() { |
||||
return durationOptions[this.durationType].title |
||||
}, |
||||
durationType() { |
||||
return this.column?.meta?.duration || 0 |
||||
}, |
||||
parentListeners() { |
||||
const $listeners = {} |
||||
|
||||
if (this.$listeners.blur) { |
||||
$listeners.blur = this.$listeners.blur |
||||
} |
||||
if (this.$listeners.focus) { |
||||
$listeners.focus = this.$listeners.focus |
||||
} |
||||
|
||||
return $listeners |
||||
} |
||||
}, |
||||
mounted() { |
||||
window.addEventListener('keypress', (_) => { |
||||
if (this.$refs.durationInput) { |
||||
this.$refs.durationInput.focus() |
||||
} |
||||
}) |
||||
}, |
||||
methods: { |
||||
checkDurationFormat(evt) { |
||||
evt = evt || window.event |
||||
const charCode = (evt.which) ? evt.which : evt.keyCode |
||||
// ref: http://www.columbia.edu/kermit/ascii.html |
||||
const PRINTABLE_CTL_RANGE = charCode > 31 |
||||
const NON_DIGIT = charCode < 48 || charCode > 57 |
||||
const NON_COLON = charCode !== 58 |
||||
const NON_PERIOD = charCode !== 46 |
||||
if (PRINTABLE_CTL_RANGE && NON_DIGIT && NON_COLON && NON_PERIOD) { |
||||
this.showWarningMessage = true |
||||
evt.preventDefault() |
||||
} else { |
||||
this.showWarningMessage = false |
||||
// only allow digits, '.' and ':' (without quotes) |
||||
return true |
||||
} |
||||
}, |
||||
onBlur() { |
||||
if (this.isEdited) { |
||||
this.$emit('input', this.durationInMS) |
||||
} |
||||
this.isEdited = false |
||||
} |
||||
} |
||||
} |
||||
</script> |
||||
|
||||
<style scoped> |
||||
|
||||
.duration-cell-wrapper { |
||||
padding: 10px; |
||||
} |
||||
|
||||
.duration-warning { |
||||
text-align: left; |
||||
margin-top: 10px; |
||||
color: #E65100; |
||||
} |
||||
</style> |
||||
|
||||
<!-- |
||||
/** |
||||
* @copyright Copyright (c) 2021, Xgene Cloud Ltd |
||||
* |
||||
* @author Wing-Kam Wong <wingkwong.code@gmail.com> |
||||
* |
||||
* @license GNU AGPL version 3 or any later version |
||||
* |
||||
* This program is free software: you can redistribute it and/or modify |
||||
* it under the terms of the GNU Affero General Public License as |
||||
* published by the Free Software Foundation, either version 3 of the |
||||
* License, or (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU Affero General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Affero General Public License |
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>. |
||||
* |
||||
*/ |
||||
--> |
@ -0,0 +1,192 @@
|
||||
export const durationOptions = [ |
||||
{ |
||||
id: 0, |
||||
title: 'h:mm', |
||||
example: '(e.g. 1:23)', |
||||
regex: /(\d+)(?::(\d+))?/ |
||||
}, { |
||||
id: 1, |
||||
title: 'h:mm:ss', |
||||
example: '(e.g. 3:45, 1:23:40)', |
||||
regex: /(\d+)?(?::(\d+))?(?::(\d+))?/ |
||||
}, { |
||||
id: 2, |
||||
title: 'h:mm:ss.s', |
||||
example: '(e.g. 3:34.6, 1:23:40.0)', |
||||
regex: /(\d+)?(?::(\d+))?(?::(\d+))?(?:.(\d{0,4})?)?/ |
||||
}, { |
||||
id: 3, |
||||
title: 'h:mm:ss.ss', |
||||
example: '(e.g. 3.45.67, 1:23:40.00)', |
||||
regex: /(\d+)?(?::(\d+))?(?::(\d+))?(?:.(\d{0,4})?)?/ |
||||
}, { |
||||
id: 4, |
||||
title: 'h:mm:ss.sss', |
||||
example: '(e.g. 3.45.678, 1:23:40.000)', |
||||
regex: /(\d+)?(?::(\d+))?(?::(\d+))?(?:.(\d{0,4})?)?/ |
||||
} |
||||
] |
||||
|
||||
// pad zero
|
||||
// mm && ss
|
||||
// e.g. 3 -> 03
|
||||
// e.g. 12 -> 12
|
||||
// sss
|
||||
// e.g. 1 -> 001
|
||||
// e.g. 10 -> 010
|
||||
const padZero = (val, isSSS = false) => { |
||||
return (val + '').padStart(isSSS ? 3 : 2, '0') |
||||
} |
||||
|
||||
export const convertMS2Duration = (val, durationType) => { |
||||
if (val === "" || val === null || val === undefined) { return val } |
||||
// 600.000 s --> 10:00 (10 mins)
|
||||
const milliseconds = Math.round((val % 1) * 1000) |
||||
const centiseconds = Math.round(milliseconds / 10) |
||||
const deciseconds = Math.round(centiseconds / 10) |
||||
const hours = Math.floor(parseInt(val, 10) / (60 * 60)) |
||||
const minutes = Math.floor((parseInt(val, 10) - (hours * 60 * 60)) / 60) |
||||
const seconds = parseInt(val, 10) - (hours * 60 * 60) - (minutes * 60) |
||||
|
||||
if (durationType === 0) { |
||||
// h:mm
|
||||
return `${padZero(hours)}:${padZero(minutes + (seconds >= 30))}` |
||||
} else if (durationType === 1) { |
||||
// h:mm:ss
|
||||
return `${padZero(hours)}:${padZero(minutes)}:${padZero(seconds)}` |
||||
} else if (durationType === 2) { |
||||
// h:mm:ss.s
|
||||
return `${padZero(hours)}:${padZero(minutes)}:${padZero(seconds)}.${deciseconds}` |
||||
} else if (durationType === 3) { |
||||
// h:mm:ss.ss
|
||||
return `${padZero(hours)}:${padZero(minutes)}:${padZero(seconds)}.${padZero(centiseconds)}` |
||||
} else if (durationType === 4) { |
||||
// h:mm:ss.sss
|
||||
return `${padZero(hours)}:${padZero(minutes)}:${padZero(seconds)}.${padZero(milliseconds, true)}` |
||||
} |
||||
return val |
||||
} |
||||
|
||||
export const convertDurationToSeconds = (val, durationType) => { |
||||
// 10:00 (10 mins) -> 600.000 s
|
||||
const res = { |
||||
_ms: null, |
||||
_isValid: true |
||||
} |
||||
const durationRegex = durationOptions[durationType].regex |
||||
if (durationRegex.test(val)) { |
||||
let h, mm, ss |
||||
const groups = val.match(durationRegex) |
||||
if (groups[0] && groups[1] && !groups[2] && !groups[3] && !groups[4]) { |
||||
const val = parseInt(groups[1], 10) |
||||
if (groups.input.slice(-1) === ':') { |
||||
// e.g. 30:
|
||||
h = groups[1] |
||||
mm = 0 |
||||
ss = 0 |
||||
} else if (durationType === 0) { |
||||
// consider it as minutes
|
||||
// e.g. 360 -> 06:00
|
||||
h = Math.floor(val / 60) |
||||
mm = Math.floor((val - ((h * 3600)) / 60)) |
||||
ss = 0 |
||||
} else { |
||||
// consider it as seconds
|
||||
// e.g. 3600 -> 01:00:00
|
||||
h = Math.floor(groups[1] / 3600) |
||||
mm = Math.floor(groups[1] / 60) % 60 |
||||
ss = val % 60 |
||||
} |
||||
} else if (durationType !== 0 && groups[1] && groups[2] && !groups[3]) { |
||||
// 10:10 means mm:ss instead of h:mm
|
||||
// 10:10:10 means h:mm:ss
|
||||
h = 0 |
||||
mm = groups[1] |
||||
ss = groups[2] |
||||
} else { |
||||
h = groups[1] || 0 |
||||
mm = groups[2] || 0 |
||||
ss = groups[3] || 0 |
||||
} |
||||
|
||||
if (durationType === 0) { |
||||
// h:mm
|
||||
res._sec = h * 3600 + mm * 60 |
||||
} else if (durationType === 1) { |
||||
// h:mm:ss
|
||||
res._sec = h * 3600 + mm * 60 + ss * 1 |
||||
} else if (durationType === 2) { |
||||
// h:mm:ss.s (deciseconds)
|
||||
const ds = groups[4] || 0 |
||||
const len = Math.log(ds) * Math.LOG10E + 1 | 0 |
||||
const ms = ( |
||||
// e.g. len = 4: 1234 -> 1, 1456 -> 1
|
||||
// e.g. len = 3: 123 -> 1, 191 -> 2
|
||||
// e.g. len = 2: 12 -> 1 , 16 -> 2
|
||||
len === 4 |
||||
? Math.round(ds / 1000) |
||||
: len === 3 |
||||
? Math.round(ds / 100) |
||||
: len === 2 |
||||
? Math.round(ds / 10) |
||||
// take whatever it is
|
||||
: ds |
||||
) * 100 |
||||
res._sec = h * 3600 + mm * 60 + ss * 1 + ms / 1000 |
||||
} else if (durationType === 3) { |
||||
// h:mm:ss.ss (centiseconds)
|
||||
const cs = groups[4] || 0 |
||||
const len = Math.log(cs) * Math.LOG10E + 1 | 0 |
||||
const ms = ( |
||||
// e.g. len = 4: 1234 -> 12, 1285 -> 13
|
||||
// e.g. len = 3: 123 -> 12, 128 -> 13
|
||||
// check the third digit
|
||||
len === 4 |
||||
? Math.round(cs / 100) |
||||
: len === 3 |
||||
? Math.round(cs / 10) |
||||
// take whatever it is
|
||||
: cs |
||||
) * 10 |
||||
res._sec = h * 3600 + mm * 60 + ss * 1 + ms / 1000 |
||||
} else if (durationType === 4) { |
||||
// h:mm:ss.sss (milliseconds)
|
||||
let ms = groups[4] || 0 |
||||
const len = Math.log(ms) * Math.LOG10E + 1 | 0 |
||||
ms = ( |
||||
// e.g. 1235 -> 124
|
||||
// e.g. 1234 -> 123
|
||||
len === 4 |
||||
? Math.round(ms / 10) |
||||
// take whatever it is
|
||||
: ms |
||||
) * 1 |
||||
res._sec = h * 3600 + mm * 60 + ss * 1 + ms / 1000 |
||||
} |
||||
} else { |
||||
res._isValid = false |
||||
} |
||||
return res |
||||
} |
||||
|
||||
/** |
||||
* @copyright Copyright (c) 2021, Xgene Cloud Ltd |
||||
* |
||||
* @author Wing-Kam Wong <wingkwong.code@gmail.com> |
||||
* |
||||
* @license GNU AGPL version 3 or any later version |
||||
* |
||||
* This program is free software: you can redistribute it and/or modify |
||||
* it under the terms of the GNU Affero General Public License as |
||||
* published by the Free Software Foundation, either version 3 of the |
||||
* License, or (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU Affero General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Affero General Public License |
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
* |
||||
*/ |
@ -0,0 +1,42 @@
|
||||
--- |
||||
title: "Timely Build" |
||||
description: "Timely Build" |
||||
position: 5000 |
||||
category: "Engineering" |
||||
menuTitle: "Timely Build" |
||||
--- |
||||
|
||||
NocoDB provides timely build versions on Docker and Executables by compiling our source code and packaging as a deliverable so that it can |
||||
|
||||
- reduce pull request cycle time |
||||
- allow issue reporters / reviewers to verify the fix without setting up their local machines |
||||
|
||||
## Docker |
||||
|
||||
When a non-draft Pull Request is created, reopened or synchronized, a timely build for Docker would be triggered for the changes only included in the following paths. |
||||
|
||||
- `packages/nocodb-sdk/**` |
||||
- `packages/nc-gui/**` |
||||
- `packages/nc-plugin/**` |
||||
- `packages/nocodb/**` |
||||
|
||||
The docker images will be built and pushed to Docker Hub (See [nocodb/nocodb-timely](https://hub.docker.com/r/nocodb/nocodb-timely/tags) for the full list). Once the image is ready, Github bot will add a comment with the command in the pull request. The tag would be `<NOCODB_CURRENT_VERSION>-pr-<PR_NUMBER>-<YYYYMMDD>-<HHMM>`. |
||||
|
||||
![image](https://user-images.githubusercontent.com/35857179/175012097-240dab05-da93-4c4e-87c1-1c36fb1350bd.png) |
||||
|
||||
## Executables |
||||
|
||||
Similarly, we provide a timely build for executables for non-docker users. The source code will be built, packaged as binary files, and pushed to Github (See [nocodb/nocodb-timely](https://github.com/nocodb/nocodb-timely/releases) for the full list). |
||||
|
||||
Currently, we only support the following targets: |
||||
|
||||
- `node16-linux-arm64` |
||||
- `node16-macos-arm64` |
||||
- `node16-win-arm64` |
||||
- `node16-linux-x64` |
||||
- `node16-macos-x64` |
||||
- `node16-win-x64` |
||||
|
||||
Once the executables are ready, Github bot will add a comment with the commands in the pull request. |
||||
|
||||
![image](https://user-images.githubusercontent.com/35857179/175012070-f5f3e7b8-6dc5-4d1c-9f7e-654bc5039521.png) |
@ -0,0 +1,44 @@
|
||||
--- |
||||
title: "Primary Key" |
||||
description: "Primary Key" |
||||
position: 575 |
||||
category: "Product" |
||||
menuTitle: "Primary Key" |
||||
--- |
||||
|
||||
## What is a Primary Key ? |
||||
- A primary key is a special database table column designated to uniquely identify each table record. |
||||
|
||||
## What is the use of Primary Key ? |
||||
- As it uniquely identifies an individual record of a table, it is used internally by NocoDB for all operations associated with a record |
||||
|
||||
## Primary Key in NocoDB |
||||
- Primary Key that gets defined / used in NocoDB depends on how underlying table was created. Summary is captured below |
||||
1. From UI, Create new table / Import from Excel / Import from CSV |
||||
1. An `ID` [datatype: Integer] system field created by default during table creation is used as primary key |
||||
2. Additional system fields `created-at`, `updated-at` are inserted by default & can be omitted optionally; these fields can be deleted after table creation |
||||
2. Connect to existing external database |
||||
1. Existing `primary key` field defined for a table is retained as is; NocoDB doesn't insert a new ID field |
||||
2. Additional system fields `created-at`, `updated-at` are not inserted by default |
||||
3. Import from Airtable |
||||
1. Airtable record ID is marked as primary key for imported records, and is mapped to field `ncRecordId` [datatype: varchar] |
||||
2. If a new record is inserted after migration & if ncRecordId field was omitted during record insertion - auto generated string will be inserted by NocoDB |
||||
3. Computed hash value for the entire record is stored in system field `ncRecordHash` |
||||
4. Additional system fields `created-at`, `updated-at` are not inserted by default |
||||
4. Create new table using SDK / API |
||||
1. No default primary key field is introduced by NocoDB. It has to be explicitly specified during table creation (using attribute `pk: true`) |
||||
|
||||
## What if Primary Key was missing? |
||||
It is possible to have a table without any primary key. |
||||
- External database table can be created without primary key configuration. |
||||
- New table can be created using SDK / API without primary key |
||||
In such scenario's, new records can be created in NocoDB for this table, but records can't be updated or deleted [as there is now way for NocoDB to uniquely identify these records] |
||||
|
||||
#### Example : Primary Key & optional system fields during new table creation |
||||
![Screenshot 2022-06-16 at 12 15 43 PM](https://user-images.githubusercontent.com/86527202/174010350-8610b9c1-a761-4bff-a53d-dc728df47e1b.png) |
||||
|
||||
#### Example : Show System Fields |
||||
![Screenshot 2022-06-16 at 12 16 07 PM](https://user-images.githubusercontent.com/86527202/174010379-9e300d42-ad89-4653-afa2-f70fca407ca8.png) |
||||
|
||||
## Can I change the Primary Key to another column within tables ? |
||||
- You can't update Primary Key from NocoDB UI. You can reconfigure it at database level directly & trigger `metasync` explicitly |
@ -0,0 +1,30 @@
|
||||
import UITypes from './UITypes'; |
||||
|
||||
export interface Column { |
||||
column_name: string; |
||||
ref_column_name: string; |
||||
uidt?: UITypes; |
||||
dtxp?: any; |
||||
dt?: any; |
||||
} |
||||
export interface Table { |
||||
table_name: string; |
||||
ref_table_name: string; |
||||
columns: Array<Column>; |
||||
} |
||||
export interface Template { |
||||
title: string; |
||||
tables: Array<Table>; |
||||
} |
||||
|
||||
export default abstract class TemplateGenerator { |
||||
abstract parse(): Promise<any>; |
||||
abstract parseTemplate(): Promise<Template>; |
||||
abstract getColumns(): Promise<any>; |
||||
abstract parseData(): Promise<any>; |
||||
abstract getData(): Promise<{ |
||||
[table_name: string]: Array<{ |
||||
[key: string]: any; |
||||
}>; |
||||
}>; |
||||
} |
@ -0,0 +1,41 @@
|
||||
export function validatePassword(p) { |
||||
let error = ''; |
||||
let progress = 0; |
||||
let hint = null; |
||||
let valid = true; |
||||
if (!p) { |
||||
error = |
||||
'At least 8 letters with one Uppercase, one number and one special letter'; |
||||
valid = false; |
||||
} else { |
||||
if (!(p.length >= 8)) { |
||||
error += 'Atleast 8 letters. '; |
||||
valid = false; |
||||
} else { |
||||
progress = Math.min(100, progress + 25); |
||||
} |
||||
|
||||
if (!p.match(/.*[A-Z].*/)) { |
||||
error += 'One Uppercase Letter. '; |
||||
valid = false; |
||||
} else { |
||||
progress = Math.min(100, progress + 25); |
||||
} |
||||
|
||||
if (!p.match(/.*[0-9].*/)) { |
||||
error += 'One Number. '; |
||||
valid = false; |
||||
} else { |
||||
progress = Math.min(100, progress + 25); |
||||
} |
||||
|
||||
if (!p.match(/[$&+,:;=?@#|'<>.^*()%!_-]/)) { |
||||
error += 'One special letter. '; |
||||
hint = "Allowed special character list : $&+,:;=?@#|'<>.^*()%!_-"; |
||||
valid = false; |
||||
} else { |
||||
progress = Math.min(100, progress + 25); |
||||
} |
||||
} |
||||
return { error, valid, progress, hint }; |
||||
} |
@ -0,0 +1,9 @@
|
||||
export function sanitize(v) { |
||||
return v?.replace(/([^\\]|^)(\?+)/g, (_, m1, m2) => { |
||||
return `${m1}${m2.split('?').join('\\?')}`; |
||||
}); |
||||
} |
||||
|
||||
export function unsanitize(v) { |
||||
return v?.replace(/\\[?]/g, '?'); |
||||
} |
@ -0,0 +1,236 @@
|
||||
import User from '../../../models/User'; |
||||
import { v4 as uuidv4 } from 'uuid'; |
||||
import { promisify } from 'util'; |
||||
import { Tele } from 'nc-help'; |
||||
|
||||
import bcrypt from 'bcryptjs'; |
||||
import Noco from '../../../Noco'; |
||||
import { CacheScope, MetaTable } from '../../../utils/globals'; |
||||
import ProjectUser from '../../../models/ProjectUser'; |
||||
import { validatePassword } from 'nocodb-sdk'; |
||||
import boxen from 'boxen'; |
||||
import NocoCache from '../../../cache/NocoCache'; |
||||
|
||||
const { isEmail } = require('validator'); |
||||
const rolesLevel = { owner: 0, creator: 1, editor: 2, commenter: 3, viewer: 4 }; |
||||
|
||||
export default async function initAdminFromEnv(_ncMeta = Noco.ncMeta) { |
||||
if (process.env.NC_ADMIN_EMAIL && process.env.NC_ADMIN_PASSWORD) { |
||||
if (!isEmail(process.env.NC_ADMIN_EMAIL?.trim())) { |
||||
console.log( |
||||
'\n', |
||||
boxen( |
||||
`Provided admin email '${process.env.NC_ADMIN_EMAIL}' is not valid`, |
||||
{ |
||||
title: 'Invalid admin email', |
||||
padding: 1, |
||||
borderStyle: 'double', |
||||
titleAlignment: 'center', |
||||
borderColor: 'red' |
||||
} |
||||
), |
||||
'\n' |
||||
); |
||||
process.exit(1); |
||||
} |
||||
|
||||
const { valid, error, hint } = validatePassword( |
||||
process.env.NC_ADMIN_PASSWORD |
||||
); |
||||
if (!valid) { |
||||
console.log( |
||||
'\n', |
||||
boxen(`${error}${hint ? `\n\n${hint}` : ''}`, { |
||||
title: 'Invalid admin password', |
||||
padding: 1, |
||||
borderStyle: 'double', |
||||
titleAlignment: 'center', |
||||
borderColor: 'red' |
||||
}), |
||||
'\n' |
||||
); |
||||
process.exit(1); |
||||
} |
||||
|
||||
let ncMeta; |
||||
try { |
||||
ncMeta = await _ncMeta.startTransaction(); |
||||
const email = process.env.NC_ADMIN_EMAIL.toLowerCase().trim(); |
||||
|
||||
const salt = await promisify(bcrypt.genSalt)(10); |
||||
const password = await promisify(bcrypt.hash)( |
||||
process.env.NC_ADMIN_PASSWORD, |
||||
salt |
||||
); |
||||
const email_verification_token = uuidv4(); |
||||
|
||||
// if super admin not present
|
||||
if (await User.isFirst(ncMeta)) { |
||||
const roles = 'user,super'; |
||||
|
||||
// roles = 'owner,creator,editor'
|
||||
Tele.emit('evt', { |
||||
evt_type: 'project:invite', |
||||
count: 1 |
||||
}); |
||||
|
||||
await User.insert( |
||||
{ |
||||
firstname: '', |
||||
lastname: '', |
||||
email, |
||||
salt, |
||||
password, |
||||
email_verification_token, |
||||
roles |
||||
}, |
||||
ncMeta |
||||
); |
||||
} else { |
||||
const salt = await promisify(bcrypt.genSalt)(10); |
||||
const password = await promisify(bcrypt.hash)( |
||||
process.env.NC_ADMIN_PASSWORD, |
||||
salt |
||||
); |
||||
const email_verification_token = uuidv4(); |
||||
const superUser = await ncMeta.metaGet2(null, null, MetaTable.USERS, { |
||||
roles: 'user,super' |
||||
}); |
||||
|
||||
if (email !== superUser.email) { |
||||
// update admin email and password and migrate projects
|
||||
// if user already present and associated with some project
|
||||
|
||||
// check user account already present with the new admin email
|
||||
const existingUserWithNewEmail = await User.getByEmail(email, ncMeta); |
||||
|
||||
if (existingUserWithNewEmail?.id) { |
||||
// get all project access belongs to the existing account
|
||||
// and migrate to the admin account
|
||||
const existingUserProjects = await ncMeta.metaList2( |
||||
null, |
||||
null, |
||||
MetaTable.PROJECT_USERS, |
||||
{ |
||||
condition: { fk_user_id: existingUserWithNewEmail.id } |
||||
} |
||||
); |
||||
|
||||
for (const existingUserProject of existingUserProjects) { |
||||
const userProject = await ProjectUser.get( |
||||
existingUserProject.project_id, |
||||
superUser.id, |
||||
ncMeta |
||||
); |
||||
|
||||
// if admin user already have access to the project
|
||||
// then update role based on the highest access level
|
||||
if (userProject) { |
||||
if ( |
||||
rolesLevel[userProject.roles] > |
||||
rolesLevel[existingUserProject.roles] |
||||
) { |
||||
await ProjectUser.update( |
||||
userProject.project_id, |
||||
superUser.id, |
||||
existingUserProject.roles, |
||||
ncMeta |
||||
); |
||||
} |
||||
} else { |
||||
// if super doesn't have access then add the access
|
||||
await ProjectUser.insert( |
||||
{ |
||||
...existingUserProject, |
||||
fk_user_id: superUser.id |
||||
}, |
||||
ncMeta |
||||
); |
||||
} |
||||
// delete the old project access entry from DB
|
||||
await ProjectUser.delete( |
||||
existingUserProject.project_id, |
||||
existingUserProject.fk_user_id, |
||||
ncMeta |
||||
); |
||||
} |
||||
|
||||
// delete existing user
|
||||
await ncMeta.metaDelete( |
||||
null, |
||||
null, |
||||
MetaTable.USERS, |
||||
existingUserWithNewEmail.id |
||||
); |
||||
|
||||
// clear cache
|
||||
await NocoCache.delAll( |
||||
CacheScope.USER, |
||||
`${existingUserWithNewEmail.email}___*` |
||||
); |
||||
await NocoCache.del( |
||||
`${CacheScope.USER}:${existingUserWithNewEmail.id}` |
||||
); |
||||
await NocoCache.del( |
||||
`${CacheScope.USER}:${existingUserWithNewEmail.email}` |
||||
); |
||||
|
||||
// Update email and password of super admin account
|
||||
await User.update( |
||||
superUser.id, |
||||
{ |
||||
salt, |
||||
email, |
||||
password, |
||||
email_verification_token, |
||||
token_version: null, |
||||
refresh_token: null |
||||
}, |
||||
ncMeta |
||||
); |
||||
} else { |
||||
// if email's are not different update the password and hash
|
||||
await User.update( |
||||
superUser.id, |
||||
{ |
||||
salt, |
||||
email, |
||||
password, |
||||
email_verification_token, |
||||
token_version: null, |
||||
refresh_token: null |
||||
}, |
||||
ncMeta |
||||
); |
||||
} |
||||
} else { |
||||
const newPasswordHash = await promisify(bcrypt.hash)( |
||||
process.env.NC_ADMIN_PASSWORD, |
||||
superUser.salt |
||||
); |
||||
|
||||
if (newPasswordHash !== superUser.password) { |
||||
// if email's are same and passwords are different
|
||||
// then update the password and token version
|
||||
await User.update( |
||||
superUser.id, |
||||
{ |
||||
salt, |
||||
password, |
||||
email_verification_token, |
||||
token_version: null, |
||||
refresh_token: null |
||||
}, |
||||
ncMeta |
||||
); |
||||
} |
||||
} |
||||
} |
||||
await ncMeta.commit(); |
||||
} catch (e) { |
||||
console.log('Error occurred while updating/creating admin user'); |
||||
console.log(e); |
||||
await ncMeta.rollback(e); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,5 @@
|
||||
import crypto from 'crypto'; |
||||
|
||||
export function randomTokenString(): string { |
||||
return crypto.randomBytes(40).toString('hex'); |
||||
} |
@ -0,0 +1,23 @@
|
||||
## config.json |
||||
{ |
||||
"srcProject": "sample", |
||||
"dstProject": "sample-copy", |
||||
"baseURL": "http://localhost:8080", |
||||
"xc-auth": "Copy Auth Token" |
||||
} |
||||
- baseURL & xc-auth are common configurations for both import & export |
||||
|
||||
## Export |
||||
- `srcProject`: specify source project name to be exported. |
||||
- Export JSON file will be created as `srcProject.json` |
||||
- execute |
||||
`cd packages/nocodb/tests/export-import` |
||||
`node exportSchema.js` |
||||
|
||||
## Import |
||||
- `srcProject`: specify JSON file name to be imported (sans .JSON suffix) |
||||
- `dstProject`: new project name to be imported as |
||||
- Data will also be imported if `srcProject` exists in NocoDB. Note that, data import isn't via exported JSON |
||||
- execute |
||||
`cd packages/nocodb/tests/export-import` |
||||
`node importSchema.js` |
@ -0,0 +1,6 @@
|
||||
{ |
||||
"srcProject": "sample", |
||||
"dstProject": "sample-copy", |
||||
"baseURL": "http://localhost:8080", |
||||
"xc-auth": "Copy Auth Token" |
||||
} |
@ -0,0 +1,297 @@
|
||||
const Api = require('nocodb-sdk').Api; |
||||
const { UITypes } = require('nocodb-sdk'); |
||||
const jsonfile = require('jsonfile'); |
||||
|
||||
const GRID = 3, GALLERY = 2, FORM = 1; |
||||
|
||||
let ncMap = { /* id: name <string> */ }; |
||||
let tblSchema = []; |
||||
let api = {}; |
||||
let viewStore = { columns: {}, sort: {}, filter: {} }; |
||||
|
||||
let inputConfig = jsonfile.readFileSync(`config.json`) |
||||
let ncConfig = { |
||||
projectName: inputConfig.srcProject, |
||||
baseURL: inputConfig.baseURL, |
||||
headers: { |
||||
'xc-auth': `${inputConfig["xc-auth"]}` |
||||
} |
||||
}; |
||||
|
||||
|
||||
// helper routines
|
||||
// remove objects containing 0/ false/ null
|
||||
// fixme: how to handle when cdf (default value) is configured as 0/ null/ false
|
||||
function removeEmpty(obj) { |
||||
return Object.fromEntries( |
||||
Object.entries(obj) |
||||
.filter(([_, v]) => v != null && v != 0 && v != false) |
||||
.map(([k, v]) => [k, v === Object(v) ? removeEmpty(v) : v]) |
||||
); |
||||
} |
||||
|
||||
function addColumnSpecificData(c) { |
||||
// pick required fields to proceed further
|
||||
let col = removeEmpty( |
||||
(({ id, title, column_name, uidt, dt, pk, pv, rqd, dtxp, system }) => ({ |
||||
id, |
||||
title, |
||||
column_name, |
||||
uidt, |
||||
dt, |
||||
pk, |
||||
pv, |
||||
rqd, |
||||
dtxp, |
||||
system |
||||
}))(c) |
||||
); |
||||
|
||||
switch (c.uidt) { |
||||
case UITypes.Formula: |
||||
col.formula = c.colOptions.formula; |
||||
col.formula_raw = c.colOptions.formula_raw; |
||||
break; |
||||
case UITypes.LinkToAnotherRecord: |
||||
col[`colOptions`] = { |
||||
fk_model_id: c.fk_model_id, |
||||
fk_related_model_id: c.colOptions.fk_related_model_id, |
||||
fk_child_column_id: c.colOptions.fk_child_column_id, |
||||
fk_parent_column_id: c.colOptions.fk_parent_column_id, |
||||
type: c.colOptions.type |
||||
}; |
||||
break; |
||||
case UITypes.Lookup: |
||||
col[`colOptions`] = { |
||||
fk_model_id: c.fk_model_id, |
||||
fk_relation_column_id: c.colOptions.fk_relation_column_id, |
||||
fk_lookup_column_id: c.colOptions.fk_lookup_column_id |
||||
}; |
||||
break; |
||||
case UITypes.Rollup: |
||||
col[`colOptions`] = { |
||||
fk_model_id: c.fk_model_id, |
||||
fk_relation_column_id: c.colOptions.fk_relation_column_id, |
||||
fk_rollup_column_id: c.colOptions.fk_rollup_column_id, |
||||
rollup_function: c.colOptions.rollup_function |
||||
}; |
||||
break; |
||||
} |
||||
|
||||
return col; |
||||
} |
||||
|
||||
function addViewDetails(v) { |
||||
// pick required fields to proceed further
|
||||
let view = (({ id, title, type, show_system_fields, lock_type, order }) => ({ |
||||
id, |
||||
title, |
||||
type, |
||||
show_system_fields, |
||||
lock_type, |
||||
order |
||||
}))(v); |
||||
|
||||
// form view
|
||||
if (v.type === FORM) { |
||||
view.property = (({ |
||||
heading, |
||||
subheading, |
||||
success_msg, |
||||
redirect_after_secs, |
||||
email, |
||||
submit_another_form, |
||||
show_blank_form |
||||
}) => ({ |
||||
heading, |
||||
subheading, |
||||
success_msg, |
||||
redirect_after_secs, |
||||
email, |
||||
submit_another_form, |
||||
show_blank_form |
||||
}))(v.view); |
||||
} |
||||
|
||||
// gallery view
|
||||
else if (v.type === GALLERY) { |
||||
view.property = { |
||||
fk_cover_image_col_id: ncMap[v.view.fk_cover_image_col_id] |
||||
}; |
||||
} |
||||
|
||||
// gallery view doesn't share column information in api yet
|
||||
if (v.type !== GALLERY) { |
||||
if (v.type === GRID) |
||||
view.columns = viewStore.columns[v.id].map(a => |
||||
(({ id, width, order, show }) => ({ id, width, order, show }))(a) |
||||
); |
||||
if (v.type === FORM) |
||||
view.columns = viewStore.columns[v.id].map(a => |
||||
(({ id, order, show, label, help, description, required }) => ({ |
||||
id, |
||||
order, |
||||
show, |
||||
label, |
||||
help, |
||||
description, |
||||
required |
||||
}))(a) |
||||
); |
||||
|
||||
for (let i = 0; i < view.columns?.length; i++) |
||||
view.columns[i].title = ncMap[viewStore.columns[v.id][i].id]; |
||||
|
||||
// skip hm & mm columns
|
||||
view.columns = view.columns |
||||
?.filter(a => a.title?.includes('_nc_m2m_') === false) |
||||
.filter(a => a.title?.includes('nc_') === false); |
||||
} |
||||
|
||||
// filter & sort configurations
|
||||
if (v.type !== FORM) { |
||||
view.sort = viewStore.sort[v.id].map(a => |
||||
(({ fk_column_id, direction, order }) => ({ |
||||
fk_column_id, |
||||
direction, |
||||
order |
||||
}))(a) |
||||
); |
||||
view.filter = viewStore.filter[v.id].map(a => |
||||
(({ fk_column_id, logical_op, comparison_op, value, order }) => ({ |
||||
fk_column_id, |
||||
logical_op, |
||||
comparison_op, |
||||
value, |
||||
order |
||||
}))(a) |
||||
); |
||||
} |
||||
return view; |
||||
} |
||||
|
||||
// view data stored as is for quick access
|
||||
async function storeViewDetails(tableId) { |
||||
// read view data for each table
|
||||
let viewList = await api.dbView.list(tableId); |
||||
for (let j = 0; j < viewList.list.length; j++) { |
||||
let v = viewList.list[j]; |
||||
let viewDetails = []; |
||||
|
||||
// invoke view specific read to populate columns information
|
||||
if (v.type === FORM) viewDetails = (await api.dbView.formRead(v.id)).columns; |
||||
else if (v.type === GALLERY) viewDetails = await api.dbView.galleryRead(v.id); |
||||
else if (v.type === GRID) viewDetails = await api.dbView.gridColumnsList(v.id); |
||||
viewStore.columns[v.id] = viewDetails; |
||||
|
||||
// populate sort information
|
||||
let vSort = await api.dbTableSort.list(v.id); |
||||
viewStore.sort[v.id] = vSort.sorts.list; |
||||
|
||||
let vFilter = await api.dbTableFilter.read(v.id); |
||||
viewStore.filter[v.id] = vFilter; |
||||
} |
||||
} |
||||
|
||||
// mapping table for quick information access
|
||||
// store maps for tableId, columnId, viewColumnId & viewId to their names
|
||||
async function generateMapTbl(pId) { |
||||
const tblList = await api.dbTable.list(pId); |
||||
|
||||
for (let i = 0; i < tblList.list.length; i++) { |
||||
let tblId = tblList.list[i].id; |
||||
let tbl = await api.dbTable.read(tblId); |
||||
|
||||
// table ID <> name
|
||||
ncMap[tblId] = tbl.title; |
||||
|
||||
// column ID <> name
|
||||
tbl.columns.map(x => (ncMap[x.id] = x.title)); |
||||
|
||||
// view ID <> name
|
||||
tbl.views.map(x => (ncMap[x.id] = x.tn)); |
||||
|
||||
for (let i = 0; i < tbl.views.length; i++) { |
||||
let x = tbl.views[i]; |
||||
let viewColumns = []; |
||||
if (x.type === FORM) viewColumns = (await api.dbView.formRead(x.id)).columns; |
||||
else if (x.type === GALLERY) |
||||
viewColumns = (await api.dbView.galleryRead(x.id)).columns; |
||||
else if (x.type === GRID) viewColumns = await api.dbView.gridColumnsList(x.id); |
||||
|
||||
// view column ID <> name
|
||||
viewColumns?.map(a => (ncMap[a.id] = ncMap[a.fk_column_id])); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// main
|
||||
//
|
||||
async function exportSchema() { |
||||
api = new Api(ncConfig); |
||||
|
||||
// fetch project details (id et.al)
|
||||
const x = await api.project.list(); |
||||
const p = x.list.find(a => a.title === ncConfig.projectName); |
||||
|
||||
await generateMapTbl(p.id); |
||||
|
||||
// read project
|
||||
const tblList = await api.dbTable.list(p.id); |
||||
|
||||
// for each table
|
||||
for (let i = 0; i < tblList.list.length; i++) { |
||||
let tblId = tblList.list[i].id; |
||||
await storeViewDetails(tblId); |
||||
|
||||
let tbl = await api.dbTable.read(tblId); |
||||
|
||||
// prepare schema
|
||||
let tSchema = { |
||||
id: tbl.id, |
||||
title: tbl.title, |
||||
table_name: tbl?.table_name, |
||||
columns: [...tbl.columns.map(c => addColumnSpecificData(c))] |
||||
.filter(a => a.title.includes('_nc_m2m_') === false) // mm
|
||||
.filter(a => a.title.includes(p.prefix) === false) // hm
|
||||
.filter( |
||||
a => !(a?.system === 1 && a.uidt === UITypes.LinkToAnotherRecord) |
||||
), |
||||
views: [...tbl.views.map(v => addViewDetails(v))] |
||||
}; |
||||
tblSchema.push(tSchema); |
||||
} |
||||
} |
||||
|
||||
(async () => { |
||||
await exportSchema(); |
||||
jsonfile.writeFileSync( |
||||
`${ncConfig.projectName.replace(/ /g, '_')}.json`, |
||||
tblSchema, |
||||
{ spaces: 2 } |
||||
); |
||||
})().catch(e => { |
||||
console.log(e); |
||||
}); |
||||
|
||||
/** |
||||
* @copyright Copyright (c) 2021, Xgene Cloud Ltd |
||||
* |
||||
* @author Raju Udava <sivadstala@gmail.com> |
||||
* |
||||
* @license GNU AGPL version 3 or any later version |
||||
* |
||||
* This program is free software: you can redistribute it and/or modify |
||||
* it under the terms of the GNU Affero General Public License as |
||||
* published by the Free Software Foundation, either version 3 of the |
||||
* License, or (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU Affero General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Affero General Public License |
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
* |
||||
*/ |
@ -0,0 +1,537 @@
|
||||
// tbd
|
||||
// - formula dependency list
|
||||
// - nested lookup/ rollup
|
||||
|
||||
const Api = require('nocodb-sdk').Api; |
||||
const { UITypes } = require('nocodb-sdk'); |
||||
const jsonfile = require('jsonfile'); |
||||
|
||||
let inputConfig = jsonfile.readFileSync(`config.json`) |
||||
let ncConfig = { |
||||
srcProject: inputConfig.srcProject, |
||||
projectName: inputConfig.dstProject, |
||||
baseURL: inputConfig.baseURL, |
||||
headers: { |
||||
'xc-auth': `${inputConfig["xc-auth"]}` |
||||
} |
||||
}; |
||||
let ncIn = jsonfile.readFileSync(`${ncConfig.srcProject}.json`); |
||||
|
||||
let api = {}; |
||||
let ncProject = {}; |
||||
let link = []; |
||||
let lookup = []; |
||||
let rollup = []; |
||||
let formula = []; |
||||
|
||||
let rootLinks = []; |
||||
|
||||
// maps v1 table ID, v2 table ID & table title to table schema
|
||||
let ncTables = {}; |
||||
|
||||
|
||||
async function createBaseTables() { |
||||
console.log(`createBaseTables`); |
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let reducedColumnSet = tblSchema.columns.filter( |
||||
a => |
||||
a.uidt !== UITypes.LinkToAnotherRecord && |
||||
a.uidt !== UITypes.Lookup && |
||||
a.uidt !== UITypes.Rollup && |
||||
a.uidt !== UITypes.Formula |
||||
); |
||||
link.push( |
||||
...tblSchema.columns.filter(a => a.uidt === UITypes.LinkToAnotherRecord) |
||||
); |
||||
lookup.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Lookup)); |
||||
rollup.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Rollup)); |
||||
formula.push(...tblSchema.columns.filter(a => a.uidt === UITypes.Formula)); |
||||
formula.map(a => (a['table_id'] = tblSchema.id)); |
||||
|
||||
let tbl = await api.dbTable.create(ncProject.id, { |
||||
title: tblSchema.title, |
||||
table_name: tblSchema.title, |
||||
columns: reducedColumnSet.map(({ id, ...rest }) => ({ ...rest })) |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[tblSchema.id] = tbl; |
||||
} |
||||
} |
||||
|
||||
let linksCreated = []; |
||||
function isLinkCreated(pId, cId) { |
||||
let idx = linksCreated.findIndex(a => a.cId === pId && a.pId === cId); |
||||
if (idx === -1) { |
||||
linksCreated.push({ pId: pId, cId: cId }); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
// retrieve nc-view column ID from corresponding nc-column ID
|
||||
async function nc_getViewColumnId(viewId, viewType, ncColumnId) { |
||||
// retrieve view Info
|
||||
let viewDetails; |
||||
|
||||
if (viewType === 'form') |
||||
viewDetails = (await api.dbView.formRead(viewId)).columns; |
||||
else if (viewType === 'gallery') |
||||
viewDetails = (await api.dbView.galleryRead(viewId)).columns; |
||||
else viewDetails = await api.dbView.gridColumnsList(viewId); |
||||
|
||||
return viewDetails.find(x => x.fk_column_id === ncColumnId)?.id; |
||||
} |
||||
|
||||
async function createFormula() { |
||||
for (let i = 0; i < formula.length; i++) { |
||||
let tbl = await api.dbTableColumn.create(ncTables[formula[i].table_id].id, { |
||||
uidt: UITypes.Formula, |
||||
title: formula[i].title, |
||||
formula_raw: formula[i].formula_raw |
||||
}); |
||||
} |
||||
} |
||||
|
||||
async function createLinks() { |
||||
console.log(`createLinks`); |
||||
|
||||
for (let i = 0; i < link.length; i++) { |
||||
if ( |
||||
(link[i].colOptions.type === 'mm' && |
||||
false === |
||||
isLinkCreated( |
||||
link[i].colOptions.fk_parent_column_id, |
||||
link[i].colOptions.fk_child_column_id |
||||
)) || |
||||
link[i].colOptions.type === 'hm' |
||||
) { |
||||
let srcTbl = ncTables[link[i].colOptions.fk_model_id]; |
||||
let dstTbl = ncTables[link[i].colOptions.fk_related_model_id]; |
||||
|
||||
// create link
|
||||
let tbl = await api.dbTableColumn.create(srcTbl.id, { |
||||
uidt: UITypes.LinkToAnotherRecord, |
||||
title: link[i].title, |
||||
parentId: srcTbl.id, |
||||
childId: dstTbl.id, |
||||
type: link[i].colOptions.type |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[link[i].colOptions.fk_model_id] = tbl; |
||||
|
||||
// for data-link procedure later
|
||||
rootLinks.push({ linkColumn: link[i], linkSrcTbl: srcTbl }); |
||||
|
||||
// symmetry field update
|
||||
//
|
||||
let v2ColSchema = tbl.columns.find(x => x.title === link[i].title); |
||||
// read related table again after link is created
|
||||
dstTbl = await api.dbTable.read(dstTbl.id); |
||||
let v2SymmetricColumn = |
||||
link[i].colOptions.type === 'mm' |
||||
? dstTbl.columns.find( |
||||
x => |
||||
x.uidt === UITypes.LinkToAnotherRecord && |
||||
x?.colOptions.fk_parent_column_id === |
||||
v2ColSchema.colOptions.fk_child_column_id && |
||||
x?.colOptions.fk_child_column_id === |
||||
v2ColSchema.colOptions.fk_parent_column_id |
||||
) |
||||
: dstTbl.columns.find( |
||||
x => |
||||
x.uidt === UITypes.LinkToAnotherRecord && |
||||
x?.colOptions.fk_parent_column_id === |
||||
v2ColSchema.colOptions.fk_parent_column_id && |
||||
x?.colOptions.fk_child_column_id === |
||||
v2ColSchema.colOptions.fk_child_column_id |
||||
); |
||||
let v1SymmetricColumn = |
||||
link[i].colOptions.type === 'mm' |
||||
? link.find( |
||||
x => |
||||
x.colOptions.fk_parent_column_id === |
||||
link[i].colOptions.fk_child_column_id && |
||||
x.colOptions.fk_child_column_id === |
||||
link[i].colOptions.fk_parent_column_id && |
||||
x.colOptions.type === 'mm' |
||||
) |
||||
: link.find( |
||||
x => |
||||
x.colOptions.fk_parent_column_id === |
||||
link[i].colOptions.fk_parent_column_id && |
||||
x.colOptions.fk_child_column_id === |
||||
link[i].colOptions.fk_child_column_id && |
||||
x.colOptions.type === 'bt' |
||||
); |
||||
|
||||
tbl = await api.dbTableColumn.update(v2SymmetricColumn.id, { |
||||
...v2SymmetricColumn, |
||||
title: v1SymmetricColumn.title, |
||||
column_name: null |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[v1SymmetricColumn.colOptions.fk_model_id] = tbl; |
||||
} |
||||
} |
||||
} |
||||
|
||||
function get_v2Id(v1ColId) { |
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let colSchema = {}; |
||||
if ( |
||||
undefined !== (colSchema = tblSchema.columns.find(x => x.id === v1ColId)) |
||||
) { |
||||
let colName = colSchema.title; |
||||
let v2Tbl = ncTables[tblSchema.id]; |
||||
return v2Tbl.columns.find(y => y.title === colName)?.id; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function createLookup() { |
||||
console.log(`createLookup`); |
||||
|
||||
for (let i = 0; i < lookup.length; i++) { |
||||
let srcTbl = ncTables[lookup[i].colOptions.fk_model_id]; |
||||
let v2_fk_relation_column_id = get_v2Id( |
||||
lookup[i].colOptions.fk_relation_column_id |
||||
); |
||||
let v2_lookup_column_id = get_v2Id( |
||||
lookup[i].colOptions.fk_lookup_column_id |
||||
); |
||||
|
||||
if (v2_lookup_column_id) { |
||||
let tbl = await api.dbTableColumn.create(srcTbl.id, { |
||||
uidt: UITypes.Lookup, |
||||
title: lookup[i].title, |
||||
fk_relation_column_id: v2_fk_relation_column_id, |
||||
fk_lookup_column_id: v2_lookup_column_id |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[lookup[i].colOptions.fk_model_id] = tbl; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function createRollup() { |
||||
console.log(`createRollup`); |
||||
|
||||
for (let i = 0; i < rollup.length; i++) { |
||||
let srcTbl = ncTables[rollup[i].colOptions.fk_model_id]; |
||||
let v2_fk_relation_column_id = get_v2Id( |
||||
rollup[i].colOptions.fk_relation_column_id |
||||
); |
||||
let v2_rollup_column_id = get_v2Id( |
||||
rollup[i].colOptions.fk_rollup_column_id |
||||
); |
||||
|
||||
if (v2_rollup_column_id) { |
||||
let tbl = await api.dbTableColumn.create(srcTbl.id, { |
||||
uidt: UITypes.Rollup, |
||||
title: rollup[i].title, |
||||
column_name: rollup[i].title, |
||||
fk_relation_column_id: v2_fk_relation_column_id, |
||||
fk_rollup_column_id: v2_rollup_column_id, |
||||
rollup_function: rollup[i].colOptions.rollup_function |
||||
}); |
||||
ncTables[tbl.title] = tbl; |
||||
ncTables[tbl.id] = tbl; |
||||
ncTables[rollup[i].colOptions.fk_model_id] = tbl; |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function configureGrid() { |
||||
console.log(`configureGrid`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let gridList = tblSchema.views.filter(a => a.type === 3); |
||||
let srcTbl = await api.dbTable.read(tblId); |
||||
|
||||
const view = await api.dbView.list(tblId); |
||||
|
||||
// create / rename view
|
||||
for (let gridCnt = 0; gridCnt < gridList.length; gridCnt++) { |
||||
let viewCreated = {}; |
||||
// rename first view; default view already created
|
||||
if (gridCnt === 0) { |
||||
viewCreated = await api.dbView.update(view.list[0].id, { |
||||
title: gridList[gridCnt].title |
||||
}); |
||||
} |
||||
// create new views
|
||||
else { |
||||
viewCreated = await api.dbView.gridCreate(tblId, { |
||||
title: gridList[gridCnt].title |
||||
}); |
||||
} |
||||
|
||||
// retrieve view Info
|
||||
let viewId = viewCreated.id; |
||||
let viewDetails = await api.dbView.gridColumnsList(viewId); |
||||
|
||||
// column visibility
|
||||
for ( |
||||
let colCnt = 0; |
||||
colCnt < gridList[gridCnt].columns.length; |
||||
colCnt++ |
||||
) { |
||||
let ncColumnId = srcTbl.columns.find( |
||||
a => a.title === gridList[gridCnt].columns[colCnt].title |
||||
)?.id; |
||||
// let ncViewColumnId = await nc_getViewColumnId( viewCreated.id, "grid", ncColumnId )
|
||||
let ncViewColumnId = viewDetails.find( |
||||
x => x.fk_column_id === ncColumnId |
||||
)?.id; |
||||
// column order & visibility
|
||||
await api.dbViewColumn.update(viewCreated.id, ncViewColumnId, { |
||||
show: gridList[gridCnt].columns[colCnt].show, |
||||
order: gridList[gridCnt].columns[colCnt].order |
||||
}); |
||||
await api.dbView.gridColumnUpdate(ncViewColumnId, { |
||||
width: gridList[gridCnt].columns[colCnt].width |
||||
}); |
||||
} |
||||
|
||||
// sort
|
||||
for (let sCnt = 0; sCnt < gridList[gridCnt].sort.length; sCnt++) { |
||||
let sColName = tblSchema.columns.find( |
||||
a => gridList[gridCnt].sort[sCnt].fk_column_id === a.id |
||||
).title; |
||||
await api.dbTableSort.create(viewId, { |
||||
fk_column_id: srcTbl.columns.find(a => a.title === sColName)?.id, |
||||
direction: gridList[gridCnt].sort[sCnt].direction |
||||
}); |
||||
} |
||||
|
||||
// filter
|
||||
for (let fCnt = 0; fCnt < gridList[gridCnt].filter.length; fCnt++) { |
||||
let fColName = tblSchema.columns.find( |
||||
a => gridList[gridCnt].sort[fCnt].fk_column_id === a.id |
||||
).title; |
||||
await api.dbTableFilter.create(viewId, { |
||||
...gridList[gridCnt].filter[fCnt], |
||||
fk_column_id: srcTbl.columns.find(a => a.title === fColName)?.id |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function configureGallery() { |
||||
console.log(`configureGallery`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let galleryList = tblSchema.views.filter(a => a.type === 2); |
||||
for (let cnt = 0; cnt < galleryList.length; cnt++) { |
||||
const viewCreated = await api.dbView.galleryCreate(tblId, { |
||||
title: galleryList[cnt].title |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function configureForm() { |
||||
console.log(`configureForm`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let formList = tblSchema.views.filter(a => a.type === 1); |
||||
let srcTbl = await api.dbTable.read(tblId); |
||||
|
||||
for (let formCnt = 0; formCnt < formList.length; formCnt++) { |
||||
const formData = { |
||||
title: formList[formCnt].title, |
||||
...formList[formCnt].property |
||||
}; |
||||
const viewCreated = await api.dbView.formCreate(tblId, formData); |
||||
|
||||
// column visibility
|
||||
for ( |
||||
let colCnt = 0; |
||||
colCnt < formList[formCnt].columns.length; |
||||
colCnt++ |
||||
) { |
||||
let ncColumnId = srcTbl.columns.find( |
||||
a => a.title === formList[formCnt].columns[colCnt].title |
||||
)?.id; |
||||
let ncViewColumnId = await nc_getViewColumnId( |
||||
viewCreated.id, |
||||
'form', |
||||
ncColumnId |
||||
); |
||||
// column order & visibility
|
||||
await api.dbView.formColumnUpdate(ncViewColumnId, { |
||||
show: formList[formCnt].columns[colCnt].show, |
||||
order: formList[formCnt].columns[colCnt].order, |
||||
label: formList[formCnt].columns[colCnt].label, |
||||
description: formList[formCnt].columns[colCnt].description, |
||||
required: formList[formCnt].columns[colCnt].required |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function restoreBaseData() { |
||||
console.log(`restoreBaseData`); |
||||
|
||||
for (let i = 0; i < ncIn.length; i++) { |
||||
let tblSchema = ncIn[i]; |
||||
let tblId = ncTables[tblSchema.id].id; |
||||
let pk = tblSchema.columns.find(a => a.pk).title; |
||||
|
||||
let moreRecords = true; |
||||
let offset = 0, |
||||
limit = 25; |
||||
|
||||
while (moreRecords) { |
||||
let recList = await api.dbTableRow.list( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
tblSchema.title, |
||||
{}, |
||||
{ |
||||
query: { limit: limit, offset: offset } |
||||
} |
||||
); |
||||
moreRecords = !recList.pageInfo.isLastPage; |
||||
offset += limit; |
||||
|
||||
for (let recCnt = 0; recCnt < recList.list.length; recCnt++) { |
||||
let record = await api.dbTableRow.read( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
tblSchema.title, |
||||
recList.list[recCnt][pk] |
||||
); |
||||
|
||||
// post-processing on the record
|
||||
for (const [key, value] of Object.entries(record)) { |
||||
let table = ncTables[tblId]; |
||||
// retrieve datatype
|
||||
const dt = table.columns.find(x => x.title === key)?.uidt; |
||||
if (dt === UITypes.LinkToAnotherRecord) delete record[key]; |
||||
if (dt === UITypes.Lookup) delete record[key]; |
||||
if (dt === UITypes.Rollup) delete record[key]; |
||||
} |
||||
await api.dbTableRow.create( |
||||
'nc', |
||||
ncConfig.projectName, |
||||
tblSchema.title, |
||||
record |
||||
); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function restoreLinks() { |
||||
console.log(`restoreLinks`); |
||||
|
||||
for (let i = 0; i < rootLinks.length; i++) { |
||||
let pk = rootLinks[i].linkSrcTbl.columns.find(a => a.pk).title; |
||||
let moreRecords = true; |
||||
let offset = 0, |
||||
limit = 25; |
||||
|
||||
while (moreRecords) { |
||||
let recList = await api.dbTableRow.list( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
rootLinks[i].linkSrcTbl.title, |
||||
{}, |
||||
{ |
||||
query: { limit: limit, offset: offset } |
||||
} |
||||
); |
||||
moreRecords = !recList.pageInfo.isLastPage; |
||||
offset += limit; |
||||
|
||||
for (let recCnt = 0; recCnt < recList.list.length; recCnt++) { |
||||
let record = await api.dbTableRow.read( |
||||
'nc', |
||||
ncConfig.srcProject, |
||||
rootLinks[i].linkSrcTbl.title, |
||||
recList.list[recCnt][pk] |
||||
); |
||||
let linkField = record[rootLinks[i].linkColumn.title]; |
||||
if (linkField.length) { |
||||
await api.dbTableRow.nestedAdd( |
||||
'nc', |
||||
ncConfig.projectName, |
||||
rootLinks[i].linkSrcTbl.title, |
||||
record[pk], |
||||
rootLinks[i].linkColumn.colOptions.type, |
||||
encodeURIComponent(rootLinks[i].linkColumn.title), |
||||
linkField[0][pk] |
||||
); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
async function importSchema() { |
||||
api = new Api(ncConfig); |
||||
|
||||
const x = await api.project.list(); |
||||
const p = x.list.find(a => a.title === ncConfig.projectName); |
||||
if (p) await api.project.delete(p.id); |
||||
ncProject = await api.project.create({ title: ncConfig.projectName }); |
||||
|
||||
await createBaseTables(); |
||||
await createLinks(); |
||||
await createLookup(); |
||||
await createRollup(); |
||||
await createFormula(); |
||||
|
||||
// configure views
|
||||
await configureGrid(); |
||||
await configureGallery(); |
||||
await configureForm(); |
||||
|
||||
// restore data only if source project exists
|
||||
const p2 = x.list.find(a => a.title === ncConfig.srcProject); |
||||
if (p2 !== undefined) { |
||||
await restoreBaseData(); |
||||
await restoreLinks(); |
||||
} |
||||
} |
||||
(async () => { |
||||
await importSchema(); |
||||
console.log('completed'); |
||||
})().catch(e => console.log(e)); |
||||
|
||||
/** |
||||
* @copyright Copyright (c) 2021, Xgene Cloud Ltd |
||||
* |
||||
* @author Raju Udava <sivadstala@gmail.com> |
||||
* |
||||
* @license GNU AGPL version 3 or any later version |
||||
* |
||||
* This program is free software: you can redistribute it and/or modify |
||||
* it under the terms of the GNU Affero General Public License as |
||||
* published by the Free Software Foundation, either version 3 of the |
||||
* License, or (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU Affero General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Affero General Public License |
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
* |
||||
*/ |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,17 @@
|
||||
version: "2.1" |
||||
|
||||
services: |
||||
pg96: |
||||
image: postgres:9.6 |
||||
restart: always |
||||
environment: |
||||
POSTGRES_PASSWORD: password |
||||
ports: |
||||
- 5432:5432 |
||||
volumes: |
||||
- ../../packages/nocodb/tests/pg-cy-quick:/docker-entrypoint-initdb.d |
||||
healthcheck: |
||||
test: ["CMD-SHELL", "pg_isready -U postgres"] |
||||
interval: 10s |
||||
timeout: 5s |
||||
retries: 5 |
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue