Browse Source

Merge remote-tracking branch 'upstream/dev' into spilit

pull/3/MERGE
lenboo 3 years ago
parent
commit
6ab3892989
  1. 2
      docker/build/README.md
  2. 2
      docker/build/README_zh_CN.md
  3. 2
      docker/build/conf/dolphinscheduler/env/dolphinscheduler_env.sh.tpl
  4. 2
      docker/build/startup-init-conf.sh
  5. 2
      docker/docker-swarm/docker-compose.yml
  6. 2
      docker/docker-swarm/docker-stack.yml
  7. 4
      docker/kubernetes/dolphinscheduler/values.yaml
  8. 14
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java
  9. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/EnvFileTest.java
  10. 2
      script/env/dolphinscheduler_env.sh

2
docker/build/README.md

@ -193,7 +193,7 @@ This environment variable sets `SPARK_HOME2`. The default value is `/opt/soft/sp
**`PYTHON_HOME`**
This environment variable sets `PYTHON_HOME`. The default value is `/usr/bin/python`.
This environment variable sets `PYTHON_HOME`. The default value is `/usr`.
**`JAVA_HOME`**

2
docker/build/README_zh_CN.md

@ -193,7 +193,7 @@ DolphinScheduler Docker 容器通过环境变量进行配置,缺省时将会
**`PYTHON_HOME`**
配置`dolphinscheduler`的`PYTHON_HOME`,默认值 `/usr/bin/python`
配置`dolphinscheduler`的`PYTHON_HOME`,默认值 `/usr`
**`JAVA_HOME`**

2
docker/build/conf/dolphinscheduler/env/dolphinscheduler_env.sh.tpl vendored

@ -25,4 +25,4 @@ export HIVE_HOME=$HIVE_HOME
export FLINK_HOME=$FLINK_HOME
export DATAX_HOME=$DATAX_HOME
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH:$FLINK_HOME/bin:$DATAX_HOME:$PATH
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME/bin:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH:$FLINK_HOME/bin:$DATAX_HOME:$PATH

2
docker/build/startup-init-conf.sh

@ -42,7 +42,7 @@ export HADOOP_HOME=${HADOOP_HOME:-"/opt/soft/hadoop"}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/opt/soft/hadoop/etc/hadoop"}
export SPARK_HOME1=${SPARK_HOME1:-"/opt/soft/spark1"}
export SPARK_HOME2=${SPARK_HOME2:-"/opt/soft/spark2"}
export PYTHON_HOME=${PYTHON_HOME:-"/usr/bin/python"}
export PYTHON_HOME=${PYTHON_HOME:-"/usr"}
export JAVA_HOME=${JAVA_HOME:-"/usr/lib/jvm/java-1.8-openjdk"}
export HIVE_HOME=${HIVE_HOME:-"/opt/soft/hive"}
export FLINK_HOME=${FLINK_HOME:-"/opt/soft/flink"}

2
docker/docker-swarm/docker-compose.yml

@ -177,7 +177,7 @@ services:
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME1: "/opt/soft/spark1"
SPARK_HOME2: "/opt/soft/spark2"
PYTHON_HOME: "/usr/bin/python"
#PYTHON_HOME: "/opt/soft/python"
JAVA_HOME: "/usr/lib/jvm/java-1.8-openjdk"
HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink"

2
docker/docker-swarm/docker-stack.yml

@ -171,7 +171,7 @@ services:
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME1: "/opt/soft/spark1"
SPARK_HOME2: "/opt/soft/spark2"
PYTHON_HOME: "/usr/bin/python"
#PYTHON_HOME: "/opt/soft/python"
JAVA_HOME: "/usr/lib/jvm/java-1.8-openjdk"
HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink"

4
docker/kubernetes/dolphinscheduler/values.yaml

@ -76,12 +76,12 @@ common:
- "export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop"
- "export SPARK_HOME1=/opt/soft/spark1"
- "export SPARK_HOME2=/opt/soft/spark2"
- "export PYTHON_HOME=/usr/bin/python"
#- "export PYTHON_HOME=/opt/soft/python"
- "export JAVA_HOME=/usr/lib/jvm/java-1.8-openjdk"
- "export HIVE_HOME=/opt/soft/hive"
- "export FLINK_HOME=/opt/soft/flink"
- "export DATAX_HOME=/opt/soft/datax/bin/datax.py"
- "export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH"
- "export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
RESOURCE_STORAGE_TYPE: "HDFS"
RESOURCE_UPLOAD_PATH: "/dolphinscheduler"

14
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java

@ -109,8 +109,8 @@ public class PythonCommandExecutor extends AbstractCommandExecutor {
}
/**
* get python home
* @return python home
* Gets the command path to which Python can execute
* @return python command path
*/
@Override
protected String commandInterpreter() {
@ -118,21 +118,17 @@ public class PythonCommandExecutor extends AbstractCommandExecutor {
if (StringUtils.isEmpty(pythonHome)){
return PYTHON;
}
return pythonHome;
return pythonHome + "/bin/python";
}
/**
* get the absolute path of the Python command
* get the absolute path of the Python are installed
* note :
* common.properties
* PYTHON_HOME configured under common.properties is Python absolute path, not PYTHON_HOME itself
*
* for example :
* your PYTHON_HOM is /opt/python3.7/
* you must set PYTHON_HOME is /opt/python3.7/python under nder common.properties
* dolphinscheduler.env.path file.
* your PYTHON_HOM is /opt/python3.7
*
* @param envPath env path
* @return python home

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/EnvFileTest.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.server.worker;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import java.io.BufferedReader;
@ -51,7 +52,7 @@ public class EnvFileTest {
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(path)));
while ((line = br.readLine()) != null){
if (line.contains("PYTHON_HOME")){
if (line.contains(Constants.PYTHON_HOME)) {
sb.append(line);
break;
}

2
script/env/dolphinscheduler_env.sh vendored

@ -25,4 +25,4 @@ export HIVE_HOME=/opt/soft/hive
export FLINK_HOME=/opt/soft/flink
export DATAX_HOME=/opt/soft/datax/bin/datax.py
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH:$FLINK_HOME/bin:$DATAX_HOME:$PATH
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME/bin:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH:$FLINK_HOME/bin:$DATAX_HOME:$PATH

Loading…
Cancel
Save