Browse Source

Merge pull request #4887 from chengshiwen/fix-docker-invalid-volume

[Fix-4886][Docker] Fix invalid volume specification in Windows
pull/3/MERGE
dailidong 3 years ago committed by GitHub
parent
commit
b8788f007b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      docker/build/Dockerfile
  2. 2
      docker/build/conf/dolphinscheduler/common.properties.tpl
  3. 18
      docker/build/conf/dolphinscheduler/env/dolphinscheduler_env.sh.tpl
  4. 19
      docker/build/startup-init-conf.sh
  5. 10
      docker/docker-swarm/docker-compose.yml
  6. 18
      docker/docker-swarm/docker-stack.yml
  7. 28
      docker/docker-swarm/dolphinscheduler_env.sh

3
docker/build/Dockerfile

@ -44,11 +44,10 @@ COPY ./startup-init-conf.sh /root/startup-init-conf.sh
COPY ./startup.sh /root/startup.sh
COPY ./conf/dolphinscheduler/*.tpl /opt/dolphinscheduler/conf/
COPY ./conf/dolphinscheduler/logback/* /opt/dolphinscheduler/conf/
COPY ./conf/dolphinscheduler/env/dolphinscheduler_env.sh /opt/dolphinscheduler/conf/env/
COPY ./conf/dolphinscheduler/env/dolphinscheduler_env.sh.tpl /opt/dolphinscheduler/conf/env/
RUN dos2unix /root/checkpoint.sh && \
dos2unix /root/startup-init-conf.sh && \
dos2unix /root/startup.sh && \
dos2unix /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh && \
dos2unix /opt/dolphinscheduler/script/*.sh && \
dos2unix /opt/dolphinscheduler/bin/*.sh && \
rm -rf /bin/sh && \

2
docker/build/conf/dolphinscheduler/common.properties.tpl

@ -64,7 +64,7 @@ yarn.application.status.address=http://ds1:8088/ws/v1/cluster/apps/%s
yarn.job.history.status.address=http://ds1:19888/ws/v1/history/mapreduce/jobs/%s
# system env path, If you want to set your own path, you need to set this env file to an absolute path
dolphinscheduler.env.path=${DOLPHINSCHEDULER_ENV_PATH}
#dolphinscheduler.env.path=env/dolphinscheduler_env.sh
development.state=false
# kerberos tgt expire time, unit is hours

18
docker/build/conf/dolphinscheduler/env/dolphinscheduler_env.sh → docker/build/conf/dolphinscheduler/env/dolphinscheduler_env.sh.tpl vendored

@ -15,14 +15,14 @@
# limitations under the License.
#
export HADOOP_HOME=/opt/soft/hadoop
export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop
export SPARK_HOME1=/opt/soft/spark1
export SPARK_HOME2=/opt/soft/spark2
export PYTHON_HOME=/usr/bin/python
export JAVA_HOME=/usr/lib/jvm/java-1.8-openjdk
export HIVE_HOME=/opt/soft/hive
export FLINK_HOME=/opt/soft/flink
export DATAX_HOME=/opt/soft/datax/bin/datax.py
export HADOOP_HOME=$HADOOP_HOME
export HADOOP_CONF_DIR=$HADOOP_CONF_DIR
export SPARK_HOME1=$SPARK_HOME1
export SPARK_HOME2=$SPARK_HOME2
export PYTHON_HOME=$PYTHON_HOME
export JAVA_HOME=$JAVA_HOME
export HIVE_HOME=$HIVE_HOME
export FLINK_HOME=$FLINK_HOME
export DATAX_HOME=$DATAX_HOME
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH:$FLINK_HOME/bin:$DATAX_HOME:$PATH

19
docker/build/startup-init-conf.sh

@ -37,7 +37,17 @@ export DATABASE_PARAMS=${DATABASE_PARAMS:-"characterEncoding=utf8"}
#============================================================================
# Common
#============================================================================
export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"}
# dolphinscheduler env
export HADOOP_HOME=${HADOOP_HOME:-"/opt/soft/hadoop"}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/opt/soft/hadoop/etc/hadoop"}
export SPARK_HOME1=${SPARK_HOME1:-"/opt/soft/spark1"}
export SPARK_HOME2=${SPARK_HOME2:-"/opt/soft/spark2"}
export PYTHON_HOME=${PYTHON_HOME:-"/usr/bin/python"}
export JAVA_HOME=${JAVA_HOME:-"/usr/lib/jvm/java-1.8-openjdk"}
export HIVE_HOME=${HIVE_HOME:-"/opt/soft/hive"}
export FLINK_HOME=${FLINK_HOME:-"/opt/soft/flink"}
export DATAX_HOME=${DATAX_HOME:-"/opt/soft/datax/bin/datax.py"}
# common env
export DOLPHINSCHEDULER_DATA_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH:-"/tmp/dolphinscheduler"}
export DOLPHINSCHEDULER_OPTS=${DOLPHINSCHEDULER_OPTS:-""}
export RESOURCE_STORAGE_TYPE=${RESOURCE_STORAGE_TYPE:-"HDFS"}
@ -83,9 +93,10 @@ export ALERT_LISTEN_HOST=${ALERT_LISTEN_HOST:-"127.0.0.1"}
export ALERT_PLUGIN_DIR=${ALERT_PLUGIN_DIR:-"lib/plugin/alert"}
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
find ${DOLPHINSCHEDULER_HOME}/conf/ -name "*.tpl" | while read file; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
$(cat ${file})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
" > ${file%.*}
done
find ${DOLPHINSCHEDULER_HOME}/conf/ -name "*.sh" -exec chmod +x {} \;

10
docker/docker-swarm/docker-compose.yml

@ -162,6 +162,15 @@ services:
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUPS: "default"
WORKER_WEIGHT: "100"
HADOOP_HOME: "/opt/soft/hadoop"
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME1: "/opt/soft/spark1"
SPARK_HOME2: "/opt/soft/spark2"
PYTHON_HOME: "/usr/bin/python"
JAVA_HOME: "/usr/lib/jvm/java-1.8-openjdk"
HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink"
DATAX_HOME: "/opt/soft/datax/bin/datax.py"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: /tmp/dolphinscheduler
ALERT_LISTEN_HOST: dolphinscheduler-alert
DATABASE_HOST: dolphinscheduler-postgresql
@ -183,7 +192,6 @@ services:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- ./dolphinscheduler_env.sh:/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh
- dolphinscheduler-worker-data:/tmp/dolphinscheduler
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
- dolphinscheduler-resource-local:/dolphinscheduler

18
docker/docker-swarm/docker-stack.yml

@ -156,6 +156,15 @@ services:
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUPS: "default"
WORKER_WEIGHT: "100"
HADOOP_HOME: "/opt/soft/hadoop"
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME1: "/opt/soft/spark1"
SPARK_HOME2: "/opt/soft/spark2"
PYTHON_HOME: "/usr/bin/python"
JAVA_HOME: "/usr/lib/jvm/java-1.8-openjdk"
HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink"
DATAX_HOME: "/opt/soft/datax/bin/datax.py"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: /tmp/dolphinscheduler
ALERT_LISTEN_HOST: dolphinscheduler-alert
DATABASE_HOST: dolphinscheduler-postgresql
@ -173,9 +182,6 @@ services:
timeout: 5s
retries: 3
start_period: 30s
configs:
- source: dolphinscheduler-worker-task-env
target: /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh
volumes:
- dolphinscheduler-worker-data:/tmp/dolphinscheduler
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
@ -193,8 +199,4 @@ volumes:
dolphinscheduler-postgresql:
dolphinscheduler-zookeeper:
dolphinscheduler-worker-data:
dolphinscheduler-logs:
configs:
dolphinscheduler-worker-task-env:
file: ./dolphinscheduler_env.sh
dolphinscheduler-logs:

28
docker/docker-swarm/dolphinscheduler_env.sh

@ -1,28 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
export HADOOP_HOME=/opt/soft/hadoop
export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop
export SPARK_HOME1=/opt/soft/spark1
export SPARK_HOME2=/opt/soft/spark2
export PYTHON_HOME=/usr/bin/python
export JAVA_HOME=/usr/lib/jvm/java-1.8-openjdk
export HIVE_HOME=/opt/soft/hive
export FLINK_HOME=/opt/soft/flink
export DATAX_HOME=/opt/soft/datax/bin/datax.py
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH:$FLINK_HOME/bin:$DATAX_HOME:$PATH
Loading…
Cancel
Save