mirror of https://github.com/nocodb/nocodb
Anbarasu
1 month ago
committed by
GitHub
41 changed files with 19158 additions and 21257 deletions
@ -0,0 +1,161 @@ |
|||||||
|
name: "Release : Docker" |
||||||
|
|
||||||
|
on: |
||||||
|
# Triggered manually |
||||||
|
workflow_dispatch: |
||||||
|
inputs: |
||||||
|
tag: |
||||||
|
description: "Docker image tag" |
||||||
|
required: true |
||||||
|
targetEnv: |
||||||
|
description: "Target Environment" |
||||||
|
required: true |
||||||
|
type: choice |
||||||
|
options: |
||||||
|
- DEV |
||||||
|
- PROD |
||||||
|
# Triggered by release-nocodb.yml / release-nightly-dev.yml / release-pr.yml |
||||||
|
workflow_call: |
||||||
|
inputs: |
||||||
|
tag: |
||||||
|
description: "Docker image tag" |
||||||
|
required: true |
||||||
|
type: string |
||||||
|
targetEnv: |
||||||
|
description: "Target Environment" |
||||||
|
required: true |
||||||
|
type: string |
||||||
|
isDaily: |
||||||
|
description: "Is it triggered by daily schedule" |
||||||
|
required: false |
||||||
|
type: string |
||||||
|
currentVersion: |
||||||
|
description: "The current NocoDB version" |
||||||
|
required: false |
||||||
|
type: string |
||||||
|
secrets: |
||||||
|
DOCKERHUB_USERNAME: |
||||||
|
required: true |
||||||
|
DOCKERHUB_TOKEN: |
||||||
|
required: true |
||||||
|
|
||||||
|
jobs: |
||||||
|
buildx: |
||||||
|
runs-on: ubuntu-latest |
||||||
|
env: |
||||||
|
working-directory: ./packages/nocodb |
||||||
|
steps: |
||||||
|
- name: Setup pnpm |
||||||
|
uses: pnpm/action-setup@v4 |
||||||
|
with: |
||||||
|
version: 9 |
||||||
|
- name: Get Docker Repository |
||||||
|
id: get-docker-repository |
||||||
|
run: | |
||||||
|
DOCKER_REPOSITORY=nocodb-daily |
||||||
|
DOCKER_BUILD_TAG=${{ github.event.inputs.tag || inputs.tag }} |
||||||
|
DOCKER_BUILD_LATEST_TAG=latest |
||||||
|
if [[ "$DOCKER_BUILD_TAG" =~ "-beta." ]]; then |
||||||
|
DOCKER_BUILD_LATEST_TAG=$(echo $DOCKER_BUILD_TAG | awk -F '-beta.' '{print $1}')-beta.latest |
||||||
|
fi |
||||||
|
if [[ ${{ github.event.inputs.targetEnv || inputs.targetEnv }} == 'DEV' ]]; then |
||||||
|
if [[ ${{ github.event.inputs.currentVersion || inputs.currentVersion || 'N/A' }} != 'N/A' ]]; then |
||||||
|
DOCKER_BUILD_TAG=${{ github.event.inputs.currentVersion || inputs.currentVersion }}-${{ github.event.inputs.tag || inputs.tag }} |
||||||
|
fi |
||||||
|
if [[ ${{ inputs.isDaily || 'N' }} == 'Y' ]]; then |
||||||
|
DOCKER_REPOSITORY=${DOCKER_REPOSITORY}-daily |
||||||
|
else |
||||||
|
DOCKER_REPOSITORY=${DOCKER_REPOSITORY}-timely |
||||||
|
fi |
||||||
|
fi |
||||||
|
echo "DOCKER_REPOSITORY=${DOCKER_REPOSITORY}" >> $GITHUB_OUTPUT |
||||||
|
echo "DOCKER_BUILD_TAG=${DOCKER_BUILD_TAG}" >> $GITHUB_OUTPUT |
||||||
|
echo "DOCKER_BUILD_LATEST_TAG=${DOCKER_BUILD_LATEST_TAG}" >> $GITHUB_OUTPUT |
||||||
|
echo DOCKER_REPOSITORY: ${DOCKER_REPOSITORY} |
||||||
|
echo DOCKER_BUILD_TAG: ${DOCKER_BUILD_TAG} |
||||||
|
echo DOCKER_BUILD_LATEST_TAG: ${DOCKER_BUILD_LATEST_TAG} |
||||||
|
|
||||||
|
- name: Checkout |
||||||
|
uses: actions/checkout@v3 |
||||||
|
with: |
||||||
|
fetch-depth: 0 |
||||||
|
ref: ${{ github.ref }} |
||||||
|
|
||||||
|
- name: Use Node.js ${{ matrix.node-version }} |
||||||
|
uses: actions/setup-node@v3 |
||||||
|
with: |
||||||
|
node-version: 18.19.1 |
||||||
|
|
||||||
|
- name: install dependencies |
||||||
|
run: pnpm bootstrap |
||||||
|
|
||||||
|
- name: Build gui and sdk |
||||||
|
run: | |
||||||
|
|
||||||
|
pnpm bootstrap && |
||||||
|
cd packages/nc-gui && |
||||||
|
pnpm run generate |
||||||
|
|
||||||
|
# copy build to nocodb |
||||||
|
rsync -rvzh ./dist/ ../nocodb/docker/nc-gui/ |
||||||
|
|
||||||
|
|
||||||
|
- name: build nocodb |
||||||
|
run: | |
||||||
|
# build nocodb ( pack nocodb-sdk and nc-gui ) |
||||||
|
cd packages/nocodb && |
||||||
|
EE=true pnpm exec webpack --config webpack.timely.config.js && |
||||||
|
# remove bundled libraries (nocodb-sdk, knex-snowflake) |
||||||
|
pnpm uninstall --save-prod nocodb-sdk |
||||||
|
|
||||||
|
- name: Update version in package.json |
||||||
|
run: | |
||||||
|
# update package.json |
||||||
|
cd packages/nocodb && |
||||||
|
jq --arg VERSION "$VERSION" '.version = $VERSION' package.json > tmp.json && |
||||||
|
mv tmp.json package.json |
||||||
|
env: |
||||||
|
VERSION: ${{ steps.get-docker-repository.outputs.DOCKER_BUILD_TAG }} |
||||||
|
|
||||||
|
- name: Set up QEMU |
||||||
|
uses: docker/setup-qemu-action@v2.1.0 |
||||||
|
|
||||||
|
- name: Set up Docker Buildx |
||||||
|
id: buildx |
||||||
|
uses: docker/setup-buildx-action@v2.2.1 |
||||||
|
|
||||||
|
- name: Cache Docker layers |
||||||
|
uses: actions/cache@v3 |
||||||
|
with: |
||||||
|
path: /tmp/.buildx-cache |
||||||
|
key: ${{ runner.os }}-buildx-${{ github.sha }} |
||||||
|
restore-keys: | |
||||||
|
${{ runner.os }}-buildx- |
||||||
|
|
||||||
|
- name: Login to DockerHub |
||||||
|
uses: docker/login-action@v2.1.0 |
||||||
|
with: |
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }} |
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }} |
||||||
|
|
||||||
|
- name: Build and push |
||||||
|
uses: docker/build-push-action@v3.2.0 |
||||||
|
with: |
||||||
|
context: ${{ env.working-directory }} |
||||||
|
file: ${{ env.working-directory }}/Dockerfile.timely |
||||||
|
build-args: NC_VERSION=${{ steps.get-docker-repository.outputs.DOCKER_BUILD_TAG }} |
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7 |
||||||
|
cache-from: type=local,src=/tmp/.buildx-cache |
||||||
|
cache-to: type=local,dest=/tmp/.buildx-cache-new |
||||||
|
push: true |
||||||
|
tags: | |
||||||
|
nocodb/${{ steps.get-docker-repository.outputs.DOCKER_REPOSITORY }}:${{ steps.get-docker-repository.outputs.DOCKER_BUILD_TAG }} |
||||||
|
nocodb/${{ steps.get-docker-repository.outputs.DOCKER_REPOSITORY }}:${{ steps.get-docker-repository.outputs.DOCKER_BUILD_LATEST_TAG }} |
||||||
|
|
||||||
|
# Temp fix |
||||||
|
# https://github.com/docker/build-push-action/issues/252 |
||||||
|
# https://github.com/moby/buildkit/issues/1896 |
||||||
|
- name: Move cache |
||||||
|
run: | |
||||||
|
rm -rf /tmp/.buildx-cache |
||||||
|
mv /tmp/.buildx-cache-new /tmp/.buildx-cache |
@ -0,0 +1,153 @@ |
|||||||
|
import { useStorage, useTimeoutFn } from '@vueuse/core' |
||||||
|
|
||||||
|
interface SharedExecutionOptions { |
||||||
|
timeout?: number // Maximum time a lock can be held before it's considered stale - default 5000ms
|
||||||
|
storageDelay?: number // Delay before reading from storage to allow for changes to propagate - default 50ms
|
||||||
|
debug?: boolean // Enable or disable debug logging
|
||||||
|
} |
||||||
|
|
||||||
|
const tabId = `tab-${Math.random().toString(36).slice(2, 9)}` |
||||||
|
|
||||||
|
/** |
||||||
|
* Creates a composable that ensures a function is executed only once across all tabs |
||||||
|
* @param key Unique key to identify the function |
||||||
|
* @param fn Function to be executed |
||||||
|
* @param options Optional configuration (timeout, storageDelay) |
||||||
|
* @returns A wrapped function that ensures single execution across tabs |
||||||
|
*/ |
||||||
|
export function useSharedExecutionFn<T>(key: string, fn: () => Promise<T> | T, options: SharedExecutionOptions = {}) { |
||||||
|
const { timeout = 5000, storageDelay = 50, debug = false } = options |
||||||
|
|
||||||
|
const storageResultKey = `nc-shared-execution-${key}-result` |
||||||
|
const storageLockKey = `nc-shared-execution-${key}-lock` |
||||||
|
const storageResultState = useStorage<{ |
||||||
|
status?: 'success' | 'error' |
||||||
|
result?: T |
||||||
|
error?: any |
||||||
|
}>(storageResultKey, {}) |
||||||
|
|
||||||
|
const debugLog = (...args: any[]) => { |
||||||
|
if (debug) console.log(`[${tabId}]`, ...args) |
||||||
|
} |
||||||
|
|
||||||
|
debugLog(`Tab initialized with ID: ${tabId}`) |
||||||
|
|
||||||
|
const getLock = (): { timestamp: number; tabId: string } | null => { |
||||||
|
try { |
||||||
|
return JSON.parse(localStorage.getItem(storageLockKey) || 'null') |
||||||
|
} catch (error) { |
||||||
|
debugLog(`Error reading lock:`, error) |
||||||
|
return null |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
const acquireLock = async (): Promise<boolean> => { |
||||||
|
let currentLock = getLock() |
||||||
|
const now = Date.now() |
||||||
|
|
||||||
|
if (!currentLock) { |
||||||
|
localStorage.setItem(storageLockKey, JSON.stringify({ timestamp: now, tabId })) |
||||||
|
|
||||||
|
// Allow storage updates to propagate - which will determine strictness of lock
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, storageDelay)) |
||||||
|
|
||||||
|
currentLock = getLock() |
||||||
|
if (currentLock?.tabId === tabId) { |
||||||
|
debugLog(`Lock acquired successfully`) |
||||||
|
return true |
||||||
|
} |
||||||
|
|
||||||
|
debugLog(`Lock acquired by ${currentLock?.tabId}`) |
||||||
|
return false |
||||||
|
} |
||||||
|
|
||||||
|
const lockIsStale = now - currentLock.timestamp > timeout |
||||||
|
if (lockIsStale) { |
||||||
|
localStorage.setItem(storageLockKey, JSON.stringify({ timestamp: now, tabId })) |
||||||
|
|
||||||
|
// Allow storage updates to propagate - which will determine strictness of lock
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, storageDelay)) |
||||||
|
|
||||||
|
currentLock = getLock() |
||||||
|
if (currentLock?.tabId === tabId) { |
||||||
|
debugLog(`Stale lock acquired successfully`) |
||||||
|
return true |
||||||
|
} |
||||||
|
|
||||||
|
debugLog(`Stale lock acquired by ${currentLock?.tabId}`) |
||||||
|
return false |
||||||
|
} |
||||||
|
|
||||||
|
debugLog(`Lock is held by ${currentLock?.tabId}`) |
||||||
|
return false |
||||||
|
} |
||||||
|
|
||||||
|
const releaseLock = (): void => { |
||||||
|
const currentLock = getLock() |
||||||
|
if (currentLock?.tabId === tabId) { |
||||||
|
debugLog(`Releasing lock.`) |
||||||
|
localStorage.removeItem(storageLockKey) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
const sharedExecutionFn = async (): Promise<T> => { |
||||||
|
debugLog(`sharedExecutionFn called`) |
||||||
|
|
||||||
|
if (!(await acquireLock())) { |
||||||
|
const currentLock = getLock() |
||||||
|
return new Promise((resolve, reject) => { |
||||||
|
let timedOut = false |
||||||
|
|
||||||
|
const { start: startTimeout, stop: stopTimeout } = useTimeoutFn( |
||||||
|
() => { |
||||||
|
timedOut = true |
||||||
|
localStorage.removeItem(storageLockKey) |
||||||
|
reject(new Error(`Timeout waiting for result on key ${key}`)) |
||||||
|
}, |
||||||
|
currentLock?.timestamp ? timeout - (Date.now() - currentLock.timestamp) : timeout, |
||||||
|
) |
||||||
|
|
||||||
|
startTimeout() |
||||||
|
|
||||||
|
if (storageResultState.value.status) { |
||||||
|
storageResultState.value = { ...storageResultState.value, status: undefined } |
||||||
|
} |
||||||
|
|
||||||
|
until(() => storageResultState.value) |
||||||
|
.toMatch((v) => v.status === 'success' || v.status === 'error') |
||||||
|
.then((res) => { |
||||||
|
if (timedOut) return |
||||||
|
|
||||||
|
stopTimeout() |
||||||
|
const { result, error } = res |
||||||
|
result ? resolve(result) : reject(error) |
||||||
|
}) |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
storageResultState.value = { ...storageResultState.value, status: undefined } |
||||||
|
const result = await fn() |
||||||
|
storageResultState.value = { status: 'success', result } |
||||||
|
return result |
||||||
|
} catch (error) { |
||||||
|
storageResultState.value = { status: 'error', error } |
||||||
|
throw error |
||||||
|
} finally { |
||||||
|
releaseLock() |
||||||
|
debugLog(`Function execution completed (success or failure).`) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Make sure to release lock on page unload
|
||||||
|
onBeforeMount(() => { |
||||||
|
window.addEventListener('beforeunload', releaseLock) |
||||||
|
}) |
||||||
|
|
||||||
|
// Remove listener on component unmount to avoid leaks
|
||||||
|
onBeforeUnmount(() => { |
||||||
|
window.removeEventListener('beforeunload', releaseLock) |
||||||
|
}) |
||||||
|
|
||||||
|
return sharedExecutionFn |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,80 @@ |
|||||||
|
# syntax=docker/dockerfile:1 |
||||||
|
|
||||||
|
########### |
||||||
|
# Litestream Builder |
||||||
|
########### |
||||||
|
FROM golang:alpine3.19 as lt-builder |
||||||
|
|
||||||
|
WORKDIR /usr/src/ |
||||||
|
|
||||||
|
RUN apk add --no-cache git make musl-dev gcc |
||||||
|
|
||||||
|
# build litestream |
||||||
|
RUN git clone https://github.com/benbjohnson/litestream.git litestream |
||||||
|
RUN cd litestream && go install ./cmd/litestream |
||||||
|
RUN cp $GOPATH/bin/litestream /usr/src/lt |
||||||
|
|
||||||
|
########### |
||||||
|
# Builder |
||||||
|
########### |
||||||
|
FROM node:18.19.1-alpine as builder |
||||||
|
WORKDIR /usr/src/app |
||||||
|
|
||||||
|
# install node-gyp dependencies |
||||||
|
RUN apk add --no-cache python3 make g++ |
||||||
|
|
||||||
|
# install pnpm |
||||||
|
RUN corepack enable && corepack prepare pnpm@latest --activate |
||||||
|
|
||||||
|
# Copy application dependency manifests to the container image. |
||||||
|
COPY --link ./package.json ./package.json |
||||||
|
COPY --link ./docker/main.js ./docker/main.js |
||||||
|
COPY --link ./docker/start-litestream.sh /usr/src/appEntry/start.sh |
||||||
|
COPY --link src/public/ ./docker/public/ |
||||||
|
COPY --link ./docker/nc-gui/ ./docker/nc-gui/ |
||||||
|
|
||||||
|
# for pnpm to generate a flat node_modules without symlinks |
||||||
|
# so that modclean could work as expected |
||||||
|
RUN echo "node-linker=hoisted" > .npmrc |
||||||
|
|
||||||
|
# install production dependencies, |
||||||
|
# reduce node_module size with modclean & removing sqlite deps, |
||||||
|
# and add execute permission to start.sh |
||||||
|
RUN pnpm install --prod --shamefully-hoist \ |
||||||
|
&& pnpm dlx modclean --patterns="default:*" --ignore="nc-lib-gui/**,dayjs/**,express-status-monitor/**,@azure/msal-node/dist/**" --run \ |
||||||
|
&& rm -rf ./node_modules/sqlite3/deps \ |
||||||
|
&& chmod +x /usr/src/appEntry/start.sh |
||||||
|
|
||||||
|
########## |
||||||
|
# Runner |
||||||
|
########## |
||||||
|
FROM alpine:3.19 |
||||||
|
WORKDIR /usr/src/app |
||||||
|
|
||||||
|
ENV LITESTREAM_S3_SKIP_VERIFY=false \ |
||||||
|
LITESTREAM_RETENTION=1440h \ |
||||||
|
LITESTREAM_RETENTION_CHECK_INTERVAL=72h \ |
||||||
|
LITESTREAM_SNAPSHOT_INTERVAL=24h \ |
||||||
|
LITESTREAM_SYNC_INTERVAL=60s \ |
||||||
|
NC_DOCKER=0.6 \ |
||||||
|
NC_TOOL_DIR=/usr/app/data/ \ |
||||||
|
NODE_ENV=production \ |
||||||
|
PORT=8080 |
||||||
|
|
||||||
|
RUN apk add --update --no-cache \ |
||||||
|
dasel \ |
||||||
|
dumb-init \ |
||||||
|
nodejs |
||||||
|
|
||||||
|
# Copy litestream binary and config file |
||||||
|
COPY --link --from=lt-builder /usr/src/lt /usr/local/bin/litestream |
||||||
|
COPY --link ./docker/litestream.yml /etc/litestream.yml |
||||||
|
# Copy production code & main entry file |
||||||
|
COPY --link --from=builder /usr/src/app/ /usr/src/app/ |
||||||
|
COPY --link --from=builder /usr/src/appEntry/ /usr/src/appEntry/ |
||||||
|
|
||||||
|
EXPOSE 8080 |
||||||
|
ENTRYPOINT ["/usr/bin/dumb-init", "--"] |
||||||
|
|
||||||
|
# Start Nocodb |
||||||
|
CMD ["/usr/src/appEntry/start.sh"] |
@ -0,0 +1,21 @@ |
|||||||
|
import path from 'path'; |
||||||
|
import cors from 'cors'; |
||||||
|
import express from 'express'; |
||||||
|
|
||||||
|
import Noco from '~/Noco'; |
||||||
|
|
||||||
|
const server = express(); |
||||||
|
server.enable('trust proxy'); |
||||||
|
server.use(cors()); |
||||||
|
server.use( |
||||||
|
process.env.NC_DASHBOARD_URL ?? '/dashboard', |
||||||
|
express.static(path.join(__dirname, 'nc-gui')), |
||||||
|
); |
||||||
|
server.set('view engine', 'ejs'); |
||||||
|
|
||||||
|
(async () => { |
||||||
|
const httpServer = server.listen(process.env.PORT || 8080, async () => { |
||||||
|
console.log(`App started successfully.\nVisit -> ${Noco.dashboardUrl}`); |
||||||
|
server.use(await Noco.init({}, httpServer, server)); |
||||||
|
}); |
||||||
|
})().catch((e) => console.log(e)); |
@ -0,0 +1,57 @@ |
|||||||
|
const path = require('path'); |
||||||
|
const nodeExternals = require('webpack-node-externals'); |
||||||
|
const webpack = require('webpack'); |
||||||
|
const CopyPlugin = require('copy-webpack-plugin'); |
||||||
|
const TerserPlugin = require('terser-webpack-plugin'); |
||||||
|
const { resolveTsAliases } = require('./build-utils/resolveTsAliases'); |
||||||
|
|
||||||
|
module.exports = { |
||||||
|
entry: './src/run/timely.ts', |
||||||
|
module: { |
||||||
|
rules: [ |
||||||
|
{ |
||||||
|
test: /\.tsx?$/, |
||||||
|
exclude: /node_modules/, |
||||||
|
use: { |
||||||
|
loader: 'ts-loader', |
||||||
|
options: { |
||||||
|
transpileOnly: true, |
||||||
|
}, |
||||||
|
}, |
||||||
|
}, |
||||||
|
], |
||||||
|
}, |
||||||
|
|
||||||
|
optimization: { |
||||||
|
minimize: true, //Update this to true or false
|
||||||
|
minimizer: [new TerserPlugin()], |
||||||
|
nodeEnv: false, |
||||||
|
}, |
||||||
|
externals: [ |
||||||
|
nodeExternals({ |
||||||
|
allowlist: ['nocodb-sdk'], |
||||||
|
}), |
||||||
|
], |
||||||
|
resolve: { |
||||||
|
extensions: ['.tsx', '.ts', '.js', '.json'], |
||||||
|
alias: resolveTsAliases(path.resolve('./tsconfig.json')), |
||||||
|
}, |
||||||
|
mode: 'production', |
||||||
|
output: { |
||||||
|
filename: 'main.js', |
||||||
|
path: path.resolve(__dirname, 'docker'), |
||||||
|
library: 'libs', |
||||||
|
libraryTarget: 'umd', |
||||||
|
globalObject: "typeof self !== 'undefined' ? self : this", |
||||||
|
}, |
||||||
|
node: { |
||||||
|
__dirname: false, |
||||||
|
}, |
||||||
|
plugins: [ |
||||||
|
new webpack.EnvironmentPlugin(['EE']), |
||||||
|
new CopyPlugin({ |
||||||
|
patterns: [{ from: 'src/public', to: 'public' }], |
||||||
|
}), |
||||||
|
], |
||||||
|
target: 'node', |
||||||
|
}; |
@ -1,7 +1,7 @@ |
|||||||
packages: |
packages: |
||||||
- 'packages/nocodb-sdk' |
- "packages/nocodb-sdk" |
||||||
- 'packages/nc-gui' |
- "packages/nc-gui" |
||||||
- 'packages/nc-mail-templates' |
- "packages/nc-mail-templates" |
||||||
- 'packages/nocodb' |
- "packages/nocodb" |
||||||
- 'tests/playwright' |
- "tests/playwright" |
||||||
- 'packages/nc-secret-mgr' |
- "packages/nc-secret-mgr" |
||||||
|
Loading…
Reference in new issue