break60 4 years ago
parent
commit
b90461fae9
  1. 10
      .github/workflows/ci_ut.yml
  2. 2
      README.md
  3. 2
      ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py
  4. 6
      dockerfile/Dockerfile
  5. 2
      dockerfile/hooks/check
  6. 8
      dockerfile/startup.sh
  7. 2
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java
  8. 13
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java
  9. 54
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java
  10. 1
      dolphinscheduler-alert/src/main/resources/alert.properties
  11. 2
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/FuncUtilsTest.java
  12. 27
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java
  13. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java
  14. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java
  15. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  16. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  17. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java
  18. 23
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java
  19. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  20. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java
  21. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java
  22. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java
  23. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java
  24. 17
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java
  25. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
  26. 254
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
  27. 12
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
  28. 19
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java
  29. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java
  30. 25
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java
  31. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java
  32. 16
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java
  33. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java
  34. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java
  35. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java
  36. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java
  37. 57
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java
  38. 2
      dolphinscheduler-common/src/main/resources/common.properties
  39. 55
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java
  40. 17
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java
  41. 2
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java
  42. 6
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java
  43. 22
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java
  44. 8
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java
  45. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java
  46. 134
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java
  47. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java
  48. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java
  49. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java
  50. 8
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java
  51. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java
  52. 4
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java
  53. 23
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java
  54. 39
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java
  55. 3
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java
  56. 3
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java
  57. 6
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java
  58. 53
      dolphinscheduler-dist/pom.xml
  59. 4
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java
  60. 4
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java
  61. 3
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java
  62. 21
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java
  63. 36
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java
  64. 67
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
  65. 11
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java
  66. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java
  67. 16
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java
  68. 22
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java
  69. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java
  70. 45
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java
  71. 10
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java
  72. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java
  73. 29
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java
  74. 17
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java
  75. 31
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java
  76. 9
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
  77. 19
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
  78. 11
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
  79. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java
  80. 24
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java
  81. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java
  82. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java
  83. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java
  84. 10
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
  85. 8
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java
  86. 2
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java
  87. 7
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
  88. 55
      dolphinscheduler-ui/pom.xml
  89. 6
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js
  90. 4
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
  91. 12
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue
  92. 10
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue
  93. 21
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue
  94. 71
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue
  95. 1
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  96. 1
      dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
  97. 8
      e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java
  98. 17
      e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java
  99. 2
      e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java
  100. 2
      e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java
  101. Some files were not shown because too many files have changed in this diff Show More

10
.github/workflows/ci_ut.yml

@ -15,7 +15,7 @@
# limitations under the License. # limitations under the License.
# #
on: ["pull_request"] on: ["pull_request", "push"]
env: env:
DOCKER_DIR: ./docker DOCKER_DIR: ./docker
LOG_DIR: /tmp/dolphinscheduler LOG_DIR: /tmp/dolphinscheduler
@ -52,7 +52,15 @@ jobs:
run: | run: |
export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx3g' export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx3g'
mvn test -B -Dmaven.test.skip=false mvn test -B -Dmaven.test.skip=false
- name: Upload coverage report to codecov
if: github.event_name == 'pull_request'
run: |
CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash) CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash)
- name: Git fetch unshallow
run: |
git fetch --unshallow
git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"
git fetch origin
- name: Run SonarCloud Analysis - name: Run SonarCloud Analysis
run: > run: >
mvn verify --batch-mode mvn verify --batch-mode

2
README.md

@ -17,7 +17,7 @@ Dolphin Scheduler Official Website
### Design features: ### Design features:
A distributed and easy-to-expand visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing. A distributed and easy-to-extend visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing.
Its main objectives are as follows: Its main objectives are as follows:
- Associate the Tasks according to the dependencies of the tasks in a DAG graph, which can visualize the running state of task in real time. - Associate the Tasks according to the dependencies of the tasks in a DAG graph, which can visualize the running state of task in real time.

2
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py

@ -30,7 +30,7 @@ sys.setdefaultencoding('utf-8')
config = Script.get_config() config = Script.get_config()
# conf_dir = "/etc/" # conf_dir = "/etc/"
dolphin_home = "/opt/soft/apache-dolphinscheduler-incubating-1.2.1" dolphin_home = "/opt/soft/dolphinscheduler"
dolphin_conf_dir = dolphin_home + "/conf" dolphin_conf_dir = dolphin_home + "/conf"
dolphin_log_dir = dolphin_home + "/logs" dolphin_log_dir = dolphin_home + "/logs"
dolphin_bin_dir = dolphin_home + "/bin" dolphin_bin_dir = dolphin_home + "/bin"

6
dockerfile/Dockerfile

@ -23,11 +23,11 @@ ENV TZ Asia/Shanghai
ENV LANG C.UTF-8 ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip kazoo. #1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip tini kazoo.
#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example: #If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories #RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
RUN apk update && \ RUN apk update && \
apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip && \ apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip tini && \
apk add --update procps && \ apk add --update procps && \
openrc boot && \ openrc boot && \
pip install kazoo pip install kazoo
@ -92,4 +92,4 @@ RUN rm -rf /var/cache/apk/*
#9. expose port #9. expose port
EXPOSE 2181 2888 3888 5432 12345 50051 8888 EXPOSE 2181 2888 3888 5432 12345 50051 8888
ENTRYPOINT ["/root/startup.sh"] ENTRYPOINT ["/sbin/tini", "--", "/root/startup.sh"]

2
dockerfile/hooks/check

@ -17,7 +17,7 @@
# #
echo "------ dolphinscheduler check - server - status -------" echo "------ dolphinscheduler check - server - status -------"
sleep 20 sleep 20
server_num=$(docker top `docker container list | grep startup | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l) server_num=$(docker top `docker container list | grep '/sbin/tini' | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l)
if [ $server_num -eq 5 ] if [ $server_num -eq 5 ]
then then
echo "Server all start successfully" echo "Server all start successfully"

8
dockerfile/startup.sh

@ -164,6 +164,7 @@ case "$1" in
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-worker.log LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-worker.log
;; ;;
(api-server) (api-server)
initZK
initPostgreSQL initPostgreSQL
initApiServer initApiServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-api-server.log LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-api-server.log
@ -187,6 +188,9 @@ case "$1" in
;; ;;
esac esac
echo "tee begin" # init directories and log files
exec tee ${LOGFILE} mkdir -p ${DOLPHINSCHEDULER_LOGS} && mkdir -p /var/log/nginx/ && cat /dev/null >> ${LOGFILE}
echo "tail begin"
exec bash -c "tail -n 1 -f ${LOGFILE}"

2
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java

@ -20,7 +20,7 @@ import org.apache.dolphinscheduler.common.utils.StringUtils;
public class FuncUtils { public class FuncUtils {
static public String mkString(Iterable<String> list, String split) { public static String mkString(Iterable<String> list, String split) {
if (null == list || StringUtils.isEmpty(split)){ if (null == list || StringUtils.isEmpty(split)){
return null; return null;

13
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java

@ -16,12 +16,13 @@
*/ */
package org.apache.dolphinscheduler.alert.utils; package org.apache.dolphinscheduler.alert.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List; import java.util.List;
/** /**
@ -38,7 +39,7 @@ public class JSONUtils {
*/ */
public static String toJsonString(Object object) { public static String toJsonString(Object object) {
try{ try{
return JSONObject.toJSONString(object,false); return JSON.toJSONString(object,false);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e); throw new RuntimeException("Json deserialization exception.", e);
} }
@ -50,19 +51,19 @@ public class JSONUtils {
* @param json the json * @param json the json
* @param clazz c * @param clazz c
* @param <T> the generic clazz * @param <T> the generic clazz
* @return the result list * @return the result list or empty list
*/ */
public static <T> List<T> toList(String json, Class<T> clazz) { public static <T> List<T> toList(String json, Class<T> clazz) {
if (StringUtils.isEmpty(json)) { if (StringUtils.isEmpty(json)) {
return null; return Collections.emptyList();
} }
try { try {
return JSONArray.parseArray(json, clazz); return JSON.parseArray(json, clazz);
} catch (Exception e) { } catch (Exception e) {
logger.error("JSONArray.parseArray exception!",e); logger.error("JSONArray.parseArray exception!",e);
} }
return null; return Collections.emptyList();
} }
} }

54
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java

@ -39,29 +39,29 @@ public class MailUtils {
public static final Logger logger = LoggerFactory.getLogger(MailUtils.class); public static final Logger logger = LoggerFactory.getLogger(MailUtils.class);
public static final String mailProtocol = PropertyUtils.getString(Constants.MAIL_PROTOCOL); public static final String MAIL_PROTOCOL = PropertyUtils.getString(Constants.MAIL_PROTOCOL);
public static final String mailServerHost = PropertyUtils.getString(Constants.MAIL_SERVER_HOST); public static final String MAIL_SERVER_HOST = PropertyUtils.getString(Constants.MAIL_SERVER_HOST);
public static final Integer mailServerPort = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT); public static final Integer MAIL_SERVER_PORT = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT);
public static final String mailSender = PropertyUtils.getString(Constants.MAIL_SENDER); public static final String MAIL_SENDER = PropertyUtils.getString(Constants.MAIL_SENDER);
public static final String mailUser = PropertyUtils.getString(Constants.MAIL_USER); public static final String MAIL_USER = PropertyUtils.getString(Constants.MAIL_USER);
public static final String mailPasswd = PropertyUtils.getString(Constants.MAIL_PASSWD); public static final String MAIL_PASSWD = PropertyUtils.getString(Constants.MAIL_PASSWD);
public static final Boolean mailUseStartTLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE); public static final Boolean MAIL_USE_START_TLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final Boolean mailUseSSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE); public static final Boolean MAIL_USE_SSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String xlsFilePath = PropertyUtils.getString(Constants.XLS_FILE_PATH); public static final String XLS_FILE_PATH = PropertyUtils.getString(Constants.XLS_FILE_PATH);
public static final String starttlsEnable = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE); public static final String STARTTLS_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final String sslEnable = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE); public static final String SSL_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String sslTrust = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST); public static final String SSL_TRUST = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST);
public static final AlertTemplate alertTemplate = AlertTemplateFactory.getMessageTemplate(); public static final AlertTemplate alertTemplate = AlertTemplateFactory.getMessageTemplate();
@ -105,7 +105,7 @@ public class MailUtils {
try { try {
Session session = getSession(); Session session = getSession();
email.setMailSession(session); email.setMailSession(session);
email.setFrom(mailSender); email.setFrom(MAIL_SENDER);
email.setCharset(Constants.UTF_8); email.setCharset(Constants.UTF_8);
if (CollectionUtils.isNotEmpty(receivers)){ if (CollectionUtils.isNotEmpty(receivers)){
// receivers mail // receivers mail
@ -199,10 +199,10 @@ public class MailUtils {
// 2. creating mail: Creating a MimeMessage // 2. creating mail: Creating a MimeMessage
MimeMessage msg = new MimeMessage(session); MimeMessage msg = new MimeMessage(session);
// 3. set sender // 3. set sender
msg.setFrom(new InternetAddress(mailSender)); msg.setFrom(new InternetAddress(MAIL_SENDER));
// 4. set receivers // 4. set receivers
for (String receiver : receivers) { for (String receiver : receivers) {
msg.addRecipients(MimeMessage.RecipientType.TO, InternetAddress.parse(receiver)); msg.addRecipients(Message.RecipientType.TO, InternetAddress.parse(receiver));
} }
return msg; return msg;
} }
@ -213,19 +213,19 @@ public class MailUtils {
*/ */
private static Session getSession() { private static Session getSession() {
Properties props = new Properties(); Properties props = new Properties();
props.setProperty(Constants.MAIL_HOST, mailServerHost); props.setProperty(Constants.MAIL_HOST, MAIL_SERVER_HOST);
props.setProperty(Constants.MAIL_PORT, String.valueOf(mailServerPort)); props.setProperty(Constants.MAIL_PORT, String.valueOf(MAIL_SERVER_PORT));
props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE); props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE);
props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, mailProtocol); props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, MAIL_PROTOCOL);
props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, starttlsEnable); props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, STARTTLS_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, sslEnable); props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, SSL_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, sslTrust); props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, SSL_TRUST);
Authenticator auth = new Authenticator() { Authenticator auth = new Authenticator() {
@Override @Override
protected PasswordAuthentication getPasswordAuthentication() { protected PasswordAuthentication getPasswordAuthentication() {
// mail username and password // mail username and password
return new PasswordAuthentication(mailUser, mailPasswd); return new PasswordAuthentication(MAIL_USER, MAIL_PASSWD);
} }
}; };
@ -248,12 +248,10 @@ public class MailUtils {
*/ */
if(CollectionUtils.isNotEmpty(receiversCc)){ if(CollectionUtils.isNotEmpty(receiversCc)){
for (String receiverCc : receiversCc){ for (String receiverCc : receiversCc){
msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(receiverCc)); msg.addRecipients(Message.RecipientType.CC, InternetAddress.parse(receiverCc));
} }
} }
// set receivers type to cc
// msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(propMap.get("${CC}")));
// set subject // set subject
msg.setSubject(title); msg.setSubject(title);
MimeMultipart partList = new MimeMultipart(); MimeMultipart partList = new MimeMultipart();
@ -263,8 +261,8 @@ public class MailUtils {
// set attach file // set attach file
MimeBodyPart part2 = new MimeBodyPart(); MimeBodyPart part2 = new MimeBodyPart();
// make excel file // make excel file
ExcelUtils.genExcelFile(content,title,xlsFilePath); ExcelUtils.genExcelFile(content,title, XLS_FILE_PATH);
File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); File file = new File(XLS_FILE_PATH + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS);
part2.attachFile(file); part2.attachFile(file);
part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS,Constants.UTF_8,"B")); part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS,Constants.UTF_8,"B"));
// add components to collection // add components to collection
@ -334,7 +332,7 @@ public class MailUtils {
* @param e the exception * @param e the exception
*/ */
private static void handleException(Collection<String> receivers, Map<String, Object> retMap, Exception e) { private static void handleException(Collection<String> receivers, Map<String, Object> retMap, Exception e) {
logger.error("Send email to {} failed {}", receivers, e); logger.error("Send email to {} failed", receivers, e);
retMap.put(Constants.MESSAGE, "Send email to {" + String.join(",", receivers) + "} failed," + e.toString()); retMap.put(Constants.MESSAGE, "Send email to {" + String.join(",", receivers) + "} failed," + e.toString());
} }

1
dolphinscheduler-alert/src/main/resources/alert.properties

@ -28,7 +28,6 @@ mail.server.port=25
mail.sender=xxx@xxx.com mail.sender=xxx@xxx.com
mail.user=xxx@xxx.com mail.user=xxx@xxx.com
mail.passwd=111111 mail.passwd=111111
# TLS # TLS
mail.smtp.starttls.enable=true mail.smtp.starttls.enable=true
# SSL # SSL

2
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/FuncUtilsTest.java

@ -46,7 +46,7 @@ public class FuncUtilsTest {
logger.info(result); logger.info(result);
//Expected result string //Expected result string
assertEquals(result, "user1|user2|user3"); assertEquals("user1|user2|user3", result);
//Null list expected return null //Null list expected return null
result = FuncUtils.mkString(null, split); result = FuncUtils.mkString(null, split);

27
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java

@ -26,8 +26,7 @@ import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.*;
import static org.junit.Assert.assertNull;
public class JSONUtilsTest { public class JSONUtilsTest {
@ -73,7 +72,7 @@ public class JSONUtilsTest {
result = JSONUtils.toJsonString(null); result = JSONUtils.toJsonString(null);
logger.info(result); logger.info(result);
assertEquals(result,"null"); assertEquals("null", result);
} }
@ -86,25 +85,27 @@ public class JSONUtilsTest {
//Invoke toList //Invoke toList
List<LinkedHashMap> result = JSONUtils.toList(expected ,LinkedHashMap.class); List<LinkedHashMap> result = JSONUtils.toList(expected ,LinkedHashMap.class);
//Equal list size=1 //Equal list size=1
assertEquals(result.size(),1); assertEquals(1,result.size());
//Transform entity to LinkedHashMap<String, Object> //Transform entity to LinkedHashMap<String, Object>
LinkedHashMap<String, Object> entity = result.get(0); LinkedHashMap<String, Object> entity = result.get(0);
//Equal expected values //Equal expected values
assertEquals(entity.get("mysql service name"),"mysql200"); assertEquals("mysql200",entity.get("mysql service name"));
assertEquals(entity.get("mysql address"),"192.168.xx.xx"); assertEquals("192.168.xx.xx", entity.get("mysql address"));
assertEquals(entity.get("port"),"3306"); assertEquals("3306", entity.get("port"));
assertEquals(entity.get("no index of number"),"80"); assertEquals("80", entity.get("no index of number"));
assertEquals(entity.get("database client connections"),"190"); assertEquals("190", entity.get("database client connections"));
//If param is null, then return null //If param is null, then return empty list
result = JSONUtils.toList(null ,LinkedHashMap.class); result = JSONUtils.toList(null ,LinkedHashMap.class);
assertNull(result); assertNotNull(result);
assertTrue(result.isEmpty());
//If param is incorrect, then return null and log error message //If param is incorrect, then return empty list and log error message
result = JSONUtils.toList("}{" ,LinkedHashMap.class); result = JSONUtils.toList("}{" ,LinkedHashMap.class);
assertNull(result); assertNotNull(result);
assertTrue(result.isEmpty());
} }

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java

@ -25,7 +25,9 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2;
@SpringBootApplication @SpringBootApplication
@ServletComponentScan @ServletComponentScan
@ComponentScan("org.apache.dolphinscheduler") @ComponentScan({"org.apache.dolphinscheduler.api",
"org.apache.dolphinscheduler.dao",
"org.apache.dolphinscheduler.service"})
public class ApiApplicationServer extends SpringBootServletInitializer { public class ApiApplicationServer extends SpringBootServletInitializer {
public static void main(String[] args) { public static void main(String[] args) {

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java

@ -27,7 +27,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.HandlerInterceptor; import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
@ -90,14 +89,4 @@ public class LoginHandlerInterceptor implements HandlerInterceptor {
return true; return true;
} }
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
}
} }

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
@ -303,7 +304,7 @@ public class DataSourceService extends BaseService{
for (DataSource dataSource : dataSourceList) { for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams(); String connectionParams = dataSource.getConnectionParams();
JSONObject object = JSONObject.parseObject(connectionParams); JSONObject object = JSON.parseObject(connectionParams);
object.put(Constants.PASSWORD, Constants.XXXXXX); object.put(Constants.PASSWORD, Constants.XXXXXX);
dataSource.setConnectionParams(JSONUtils.toJson(object)); dataSource.setConnectionParams(JSONUtils.toJson(object));
@ -367,11 +368,11 @@ public class DataSourceService extends BaseService{
try { try {
switch (dbType) { switch (dbType) {
case POSTGRESQL: case POSTGRESQL:
datasource = JSONObject.parseObject(parameter, PostgreDataSource.class); datasource = JSON.parseObject(parameter, PostgreDataSource.class);
Class.forName(Constants.ORG_POSTGRESQL_DRIVER); Class.forName(Constants.ORG_POSTGRESQL_DRIVER);
break; break;
case MYSQL: case MYSQL:
datasource = JSONObject.parseObject(parameter, MySQLDataSource.class); datasource = JSON.parseObject(parameter, MySQLDataSource.class);
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); Class.forName(Constants.COM_MYSQL_JDBC_DRIVER);
break; break;
case HIVE: case HIVE:
@ -386,26 +387,26 @@ public class DataSourceService extends BaseService{
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
} }
if (dbType == DbType.HIVE){ if (dbType == DbType.HIVE){
datasource = JSONObject.parseObject(parameter, HiveDataSource.class); datasource = JSON.parseObject(parameter, HiveDataSource.class);
}else if (dbType == DbType.SPARK){ }else if (dbType == DbType.SPARK){
datasource = JSONObject.parseObject(parameter, SparkDataSource.class); datasource = JSON.parseObject(parameter, SparkDataSource.class);
} }
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break; break;
case CLICKHOUSE: case CLICKHOUSE:
datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class); datasource = JSON.parseObject(parameter, ClickHouseDataSource.class);
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER); Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break; break;
case ORACLE: case ORACLE:
datasource = JSONObject.parseObject(parameter, OracleDataSource.class); datasource = JSON.parseObject(parameter, OracleDataSource.class);
Class.forName(Constants.COM_ORACLE_JDBC_DRIVER); Class.forName(Constants.COM_ORACLE_JDBC_DRIVER);
break; break;
case SQLSERVER: case SQLSERVER:
datasource = JSONObject.parseObject(parameter, SQLServerDataSource.class); datasource = JSON.parseObject(parameter, SQLServerDataSource.class);
Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER);
break; break;
case DB2: case DB2:
datasource = JSONObject.parseObject(parameter, DB2ServerDataSource.class); datasource = JSON.parseObject(parameter, DB2ServerDataSource.class);
Class.forName(Constants.COM_DB2_JDBC_DRIVER); Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break; break;
default: default:
@ -507,7 +508,7 @@ public class DataSourceService extends BaseService{
parameterMap.put(Constants.PRINCIPAL,principal); parameterMap.put(Constants.PRINCIPAL,principal);
} }
if (other != null && !"".equals(other)) { if (other != null && !"".equals(other)) {
LinkedHashMap<String, String> map = JSONObject.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() { LinkedHashMap<String, String> map = JSON.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() {
}); });
if (map.size() > 0) { if (map.size() > 0) {
StringBuilder otherSb = new StringBuilder(); StringBuilder otherSb = new StringBuilder();
@ -523,9 +524,9 @@ public class DataSourceService extends BaseService{
} }
if(logger.isDebugEnabled()){ if(logger.isDebugEnabled()){
logger.info("parameters map-----" + JSONObject.toJSONString(parameterMap)); logger.info("parameters map-----" + JSON.toJSONString(parameterMap));
} }
return JSONObject.toJSONString(parameterMap); return JSON.toJSONString(parameterMap);
} }

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import java.nio.charset.StandardCharsets;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task; import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
@ -49,7 +50,6 @@ import java.io.BufferedReader;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.text.ParseException; import java.text.ParseException;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -273,7 +273,8 @@ public class ProcessInstanceService extends BaseDAGService {
return resultMap; return resultMap;
} }
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(Charset.forName("utf8"))), Charset.forName("utf8"))); BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line; String line;
while ((line = br.readLine()) != null) { while ((line = br.readLine()) != null) {
if(line.contains(DEPENDENT_SPLIT)){ if(line.contains(DEPENDENT_SPLIT)){

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java

@ -121,7 +121,7 @@ public class ZooKeeperState {
private class SendThread extends Thread { private class SendThread extends Thread {
private String cmd; private String cmd;
public String ret = ""; private String ret = "";
public SendThread(String cmd) { public SendThread(String cmd) {
this.cmd = cmd; this.cmd = cmd;

23
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.ResourceType;
@ -54,7 +55,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -78,7 +79,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -281,7 +282,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -303,7 +304,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -324,7 +325,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -344,7 +345,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -365,7 +366,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -386,7 +387,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -406,7 +407,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -427,7 +428,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
@ -446,7 +447,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode()); result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -813,6 +813,11 @@ public final class Constants {
*/ */
public static final String KERBEROS = "kerberos"; public static final String KERBEROS = "kerberos";
/**
* kerberos expire time
*/
public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time";
/** /**
* java.security.krb5.conf * java.security.krb5.conf
*/ */

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.common.model; package org.apache.dolphinscheduler.common.model;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
@ -23,7 +24,6 @@ import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
@ -294,7 +294,7 @@ public class TaskNode {
if(StringUtils.isNotEmpty(this.getTimeout())){ if(StringUtils.isNotEmpty(this.getTimeout())){
String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name()); String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name());
String timeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name()); String timeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name());
return JSONObject.parseObject(timeout,TaskTimeoutParameter.class); return JSON.parseObject(timeout,TaskTimeoutParameter.class);
} }
return new TaskTimeoutParameter(false); return new TaskTimeoutParameter(false);
} }

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java

@ -335,7 +335,7 @@ public abstract class AbstractShell {
try{ try{
entry.getValue().destroy(); entry.getValue().destroy();
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error("Destroy All Processes error", e);
} }
} }

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java

@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -207,12 +208,15 @@ public class FlinkParameters extends AbstractParameters {
@Override @Override
public List<String> getResourceFilesList() { public List<String> getResourceFilesList() {
if(resourceList !=null ) { if(resourceList != null ) {
this.resourceList.add(mainJar); List<String> resourceFiles = resourceList.stream()
return resourceList.stream() .map(ResourceInfo::getRes).collect(Collectors.toList());
.map(p -> p.getRes()).collect(Collectors.toList()); if(mainJar != null) {
resourceFiles.add(mainJar.getRes());
}
return resourceFiles;
} }
return null; return Collections.emptyList();
} }

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java

@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -125,12 +126,15 @@ public class MapreduceParameters extends AbstractParameters {
@Override @Override
public List<String> getResourceFilesList() { public List<String> getResourceFilesList() {
if (resourceList != null) { if(resourceList != null ) {
this.resourceList.add(mainJar); List<String> resourceFiles = resourceList.stream()
return resourceList.stream() .map(ResourceInfo::getRes).collect(Collectors.toList());
.map(p -> p.getRes()).collect(Collectors.toList()); if(mainJar != null) {
resourceFiles.add(mainJar.getRes());
}
return resourceFiles;
} }
return null; return Collections.emptyList();
} }
@Override @Override

17
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java

@ -120,12 +120,24 @@ public class ThreadUtils {
/** /**
* Wrapper over ScheduledThreadPoolExecutor * Wrapper over ScheduledThreadPoolExecutor
* @param threadName
* @param corePoolSize * @param corePoolSize
* @return * @return
*/ */
public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName,int corePoolSize) { public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName, int corePoolSize) {
return newThreadScheduledExecutor(threadName, corePoolSize, true);
}
/**
* Wrapper over ScheduledThreadPoolExecutor
* @param threadName
* @param corePoolSize
* @param isDaemon
* @return
*/
public static ScheduledExecutorService newThreadScheduledExecutor(String threadName, int corePoolSize, boolean isDaemon) {
ThreadFactory threadFactory = new ThreadFactoryBuilder() ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setDaemon(true) .setDaemon(isDaemon)
.setNameFormat(threadName) .setNameFormat(threadName)
.build(); .build();
ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(corePoolSize, threadFactory); ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(corePoolSize, threadFactory);
@ -135,7 +147,6 @@ public class ThreadUtils {
return executor; return executor;
} }
public static ThreadInfo getThreadInfo(Thread t) { public static ThreadInfo getThreadInfo(Thread t) {
long tid = t.getId(); long tid = t.getId();
return threadBean.getThreadInfo(tid, STACK_DEPTH); return threadBean.getThreadInfo(tid, STACK_DEPTH);

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java

@ -44,7 +44,7 @@ public class FileUtils {
String fileSuffix = ""; String fileSuffix = "";
if (StringUtils.isNotEmpty(filename)) { if (StringUtils.isNotEmpty(filename)) {
int lastIndex = filename.lastIndexOf("."); int lastIndex = filename.lastIndexOf('.');
if (lastIndex > 0) { if (lastIndex > 0) {
fileSuffix = filename.substring(lastIndex + 1); fileSuffix = filename.substring(lastIndex + 1);
} }
@ -325,10 +325,8 @@ public class FileUtils {
} }
} else { } else {
File parent = file.getParentFile(); File parent = file.getParentFile();
if (parent != null) { if (parent != null && !parent.mkdirs() && !parent.isDirectory()) {
if (!parent.mkdirs() && !parent.isDirectory()) {
throw new IOException("Directory '" + parent + "' could not be created"); throw new IOException("Directory '" + parent + "' could not be created");
}
} }
} }
return new FileOutputStream(file, append); return new FileOutputStream(file, append);

254
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -16,6 +16,9 @@
*/ */
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.enums.ResUploadType;
@ -32,9 +35,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.*; import java.io.*;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -46,32 +52,37 @@ public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class); private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
private static volatile HadoopUtils instance = new HadoopUtils();
private static volatile Configuration configuration;
private static FileSystem fs;
private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
.newBuilder()
.expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 7), TimeUnit.DAYS)
.build(new CacheLoader<String, HadoopUtils>() {
@Override
public HadoopUtils load(String key) throws Exception {
return new HadoopUtils();
}
});
private HadoopUtils(){ private Configuration configuration;
if(StringUtils.isEmpty(hdfsUser)){ private FileSystem fs;
hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
} private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
private HadoopUtils() {
init(); init();
initHdfsPath(); initHdfsPath();
} }
public static HadoopUtils getInstance(){ public static HadoopUtils getInstance() {
// if kerberos startup , renew HadoopUtils
if (CommonUtils.getKerberosStartupState()){ return cache.getUnchecked(HADOOP_UTILS_KEY);
return new HadoopUtils();
}
return instance;
} }
/** /**
* init dolphinscheduler root path in hdfs * init dolphinscheduler root path in hdfs
*/ */
private void initHdfsPath(){ private void initHdfsPath() {
String hdfsPath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH); String hdfsPath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH);
Path path = new Path(hdfsPath); Path path = new Path(hdfsPath);
@ -80,7 +91,7 @@ public class HadoopUtils implements Closeable {
fs.mkdirs(path); fs.mkdirs(path);
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
} }
} }
@ -89,82 +100,74 @@ public class HadoopUtils implements Closeable {
* init hadoop configuration * init hadoop configuration
*/ */
private void init() { private void init() {
if (configuration == null) { try {
synchronized (HadoopUtils.class) { configuration = new Configuration();
if (configuration == null) {
try { String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE);
configuration = new Configuration(); ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); if (resUploadType == ResUploadType.HDFS) {
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
if (resUploadType == ResUploadType.HDFS){ PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){ configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, UserGroupInformation.setConfiguration(configuration);
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos"); PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
UserGroupInformation.setConfiguration(configuration); }
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS); String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system // the default is the local file system
if(defaultFS.startsWith("file")){ if (defaultFS.startsWith("file")) {
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS); String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if(StringUtils.isNotBlank(defaultFSProp)){ if (StringUtils.isNotBlank(defaultFSProp)) {
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs."); Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS,defaultFSProp); configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
}else{ } else {
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS ); logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
throw new RuntimeException( throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS) String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
); );
} }
}else{ } else {
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS); logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
} }
if (fs == null) { if (fs == null) {
if(StringUtils.isNotEmpty(hdfsUser)){ if (StringUtils.isNotEmpty(hdfsUser)) {
//UserGroupInformation ugi = UserGroupInformation.createProxyUser(hdfsUser,UserGroupInformation.getLoginUser()); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
ugi.doAs(new PrivilegedExceptionAction<Boolean>() { @Override
@Override public Boolean run() throws Exception {
public Boolean run() throws Exception { fs = FileSystem.get(configuration);
fs = FileSystem.get(configuration); return true;
return true;
}
});
}else{
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
} }
}else if (resUploadType == ResUploadType.S3){ });
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS)); } else {
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT)); logger.warn("hdfs.root.user is not set value!");
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); fs = FileSystem.get(configuration);
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
if (!StringUtils.isEmpty(rmHaIds)) {
appAddress = getAppAddress(appAddress, rmHaIds);
logger.info("appAddress : {}", appAddress);
}
configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} }
} }
} else if (resUploadType == ResUploadType.S3) {
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
if (!StringUtils.isEmpty(rmHaIds)) {
appAddress = getAppAddress(appAddress, rmHaIds);
logger.info("appAddress : {}", appAddress);
} }
configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} }
} }
@ -188,15 +191,15 @@ public class HadoopUtils implements Closeable {
/** /**
* cat file on hdfs * cat file on hdfs
* *
* @param hdfsFilePath hdfs file path * @param hdfsFilePath hdfs file path
* @return byte[] byte array * @return byte[] byte array
* @throws IOException errors * @throws IOException errors
*/ */
public byte[] catFile(String hdfsFilePath) throws IOException { public byte[] catFile(String hdfsFilePath) throws IOException {
if(StringUtils.isBlank(hdfsFilePath)){ if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank",hdfsFilePath); logger.error("hdfs file path:{} is blank", hdfsFilePath);
return null; return new byte[0];
} }
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath)); FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
@ -204,29 +207,28 @@ public class HadoopUtils implements Closeable {
} }
/** /**
* cat file on hdfs * cat file on hdfs
* *
* @param hdfsFilePath hdfs file path * @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers * @param skipLineNums skip line numbers
* @param limit read how many lines * @param limit read how many lines
* @return content of file * @return content of file
* @throws IOException errors * @throws IOException errors
*/ */
public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)){ if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank",hdfsFilePath); logger.error("hdfs file path:{} is blank", hdfsFilePath);
return null; return Collections.emptyList();
} }
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))){ try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
BufferedReader br = new BufferedReader(new InputStreamReader(in)); BufferedReader br = new BufferedReader(new InputStreamReader(in));
Stream<String> stream = br.lines().skip(skipLineNums).limit(limit); Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
return stream.collect(Collectors.toList()); return stream.collect(Collectors.toList());
} }
} }
/** /**
@ -259,17 +261,17 @@ public class HadoopUtils implements Closeable {
/** /**
* the src file is on the local disk. Add it to FS at * the src file is on the local disk. Add it to FS at
* the given dst name. * the given dst name.
*
* @param srcFile local file * @param srcFile local file
* @param dstHdfsPath destination hdfs path * @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src * @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file * @param overwrite whether to overwrite an existing file
* @return if success or not * @return if success or not
* @throws IOException errors * @throws IOException errors
*/ */
public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException { public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcFile); Path srcPath = new Path(srcFile);
Path dstPath= new Path(dstHdfsPath); Path dstPath = new Path(dstHdfsPath);
fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);
@ -279,10 +281,10 @@ public class HadoopUtils implements Closeable {
/** /**
* copy hdfs file to local * copy hdfs file to local
* *
* @param srcHdfsFilePath source hdfs file path * @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file * @param dstFile destination file
* @param deleteSource delete source * @param deleteSource delete source
* @param overwrite overwrite * @param overwrite overwrite
* @return result of copy hdfs file to local * @return result of copy hdfs file to local
* @throws IOException errors * @throws IOException errors
*/ */
@ -293,14 +295,14 @@ public class HadoopUtils implements Closeable {
if (dstPath.exists()) { if (dstPath.exists()) {
if (dstPath.isFile()) { if (dstPath.isFile()) {
if (overwrite) { if (overwrite) {
dstPath.delete(); Files.delete(dstPath.toPath());
} }
} else { } else {
logger.error("destination file must be a file"); logger.error("destination file must be a file");
} }
} }
if(!dstPath.getParentFile().exists()){ if (!dstPath.getParentFile().exists()) {
dstPath.getParentFile().mkdirs(); dstPath.getParentFile().mkdirs();
} }
@ -308,14 +310,13 @@ public class HadoopUtils implements Closeable {
} }
/** /**
*
* delete a file * delete a file
* *
* @param hdfsFilePath the path to delete. * @param hdfsFilePath the path to delete.
* @param recursive if path is a directory and set to * @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In * true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false. * case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false. * @return true if delete is successful else false.
* @throws IOException errors * @throws IOException errors
*/ */
public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
@ -340,7 +341,7 @@ public class HadoopUtils implements Closeable {
* @return {@link FileStatus} file status * @return {@link FileStatus} file status
* @throws Exception errors * @throws Exception errors
*/ */
public FileStatus[] listFileStatus(String filePath)throws Exception{ public FileStatus[] listFileStatus(String filePath) throws Exception {
try { try {
return fs.listStatus(new Path(filePath)); return fs.listStatus(new Path(filePath));
} catch (IOException e) { } catch (IOException e) {
@ -352,10 +353,11 @@ public class HadoopUtils implements Closeable {
/** /**
* Renames Path src to Path dst. Can take place on local fs * Renames Path src to Path dst. Can take place on local fs
* or remote DFS. * or remote DFS.
*
* @param src path to be renamed * @param src path to be renamed
* @param dst new path after rename * @param dst new path after rename
* @throws IOException on failure
* @return true if rename is successful * @return true if rename is successful
* @throws IOException on failure
*/ */
public boolean rename(String src, String dst) throws IOException { public boolean rename(String src, String dst) throws IOException {
return fs.rename(new Path(src), new Path(dst)); return fs.rename(new Path(src), new Path(dst));
@ -378,7 +380,7 @@ public class HadoopUtils implements Closeable {
String responseContent = HttpUtils.get(applicationUrl); String responseContent = HttpUtils.get(applicationUrl);
JSONObject jsonObject = JSONObject.parseObject(responseContent); JSONObject jsonObject = JSON.parseObject(responseContent);
String result = jsonObject.getJSONObject("app").getString("finalStatus"); String result = jsonObject.getJSONObject("app").getString("finalStatus");
switch (result) { switch (result) {
@ -401,7 +403,6 @@ public class HadoopUtils implements Closeable {
} }
/** /**
*
* @return data hdfs path * @return data hdfs path
*/ */
public static String getHdfsDataBasePath() { public static String getHdfsDataBasePath() {
@ -428,11 +429,11 @@ public class HadoopUtils implements Closeable {
* hdfs user dir * hdfs user dir
* *
* @param tenantCode tenant code * @param tenantCode tenant code
* @param userId user id * @param userId user id
* @return hdfs resource dir * @return hdfs resource dir
*/ */
public static String getHdfsUserDir(String tenantCode,int userId) { public static String getHdfsUserDir(String tenantCode, int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode),userId); return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
} }
/** /**
@ -480,7 +481,7 @@ public class HadoopUtils implements Closeable {
* getAppAddress * getAppAddress
* *
* @param appAddress app address * @param appAddress app address
* @param rmHa resource manager ha * @param rmHa resource manager ha
* @return app address * @return app address
*/ */
public static String getAppAddress(String appAddress, String rmHa) { public static String getAppAddress(String appAddress, String rmHa) {
@ -525,8 +526,6 @@ public class HadoopUtils implements Closeable {
*/ */
private static final class YarnHAAdminUtils extends RMAdminCLI { private static final class YarnHAAdminUtils extends RMAdminCLI {
private static final Logger logger = LoggerFactory.getLogger(YarnHAAdminUtils.class);
/** /**
* get active resourcemanager * get active resourcemanager
* *
@ -585,8 +584,7 @@ public class HadoopUtils implements Closeable {
JSONObject jsonObject = JSON.parseObject(retStr); JSONObject jsonObject = JSON.parseObject(retStr);
//get ResourceManager state //get ResourceManager state
String state = jsonObject.getJSONObject("clusterInfo").getString("haState"); return jsonObject.getJSONObject("clusterInfo").getString("haState");
return state;
} }
} }

12
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java

@ -81,17 +81,15 @@ public class HttpUtils {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
} }
if (httpget != null && !httpget.isAborted()) { if (!httpget.isAborted()) {
httpget.releaseConnection(); httpget.releaseConnection();
httpget.abort(); httpget.abort();
} }
if (httpclient != null) { try {
try { httpclient.close();
httpclient.close(); } catch (IOException e) {
} catch (IOException e) { logger.error(e.getMessage(),e);
logger.error(e.getMessage(),e);
}
} }
} }
return responseContent; return responseContent;

19
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java

@ -19,26 +19,17 @@
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class IOUtils { public class IOUtils {
public static void closeQuietly(InputStream fis){ public static void closeQuietly(Closeable closeable){
if(fis != null){ if(closeable != null){
try { try {
fis.close(); closeable.close();
} catch (IOException ignore) {
}
}
}
public static void closeQuietly(InputStreamReader reader){
if(reader != null){
try {
reader.close();
} catch (IOException ignore) { } catch (IOException ignore) {
// nothing need to do
} }
} }
} }

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java

@ -17,16 +17,11 @@
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* http utils * http utils
*/ */
public class IpUtils { public class IpUtils {
private static final Logger logger = LoggerFactory.getLogger(IpUtils.class);
public static final String DOT = "."; public static final String DOT = ".";
/** /**

25
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference; import com.alibaba.fastjson.TypeReference;
@ -41,12 +42,6 @@ public class JSONUtils {
*/ */
private static final ObjectMapper objectMapper = new ObjectMapper(); private static final ObjectMapper objectMapper = new ObjectMapper();
/**
* init
*/
private static final JSONUtils instance = new JSONUtils();
private JSONUtils() { private JSONUtils() {
//Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties //Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault()); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault());
@ -59,7 +54,7 @@ public class JSONUtils {
*/ */
public static String toJson(Object object) { public static String toJson(Object object) {
try{ try{
return JSONObject.toJSONString(object,false); return JSON.toJSONString(object,false);
} catch (Exception e) { } catch (Exception e) {
logger.error("object to json exception!",e); logger.error("object to json exception!",e);
} }
@ -89,7 +84,7 @@ public class JSONUtils {
} }
try { try {
return JSONObject.parseObject(json, clazz); return JSON.parseObject(json, clazz);
} catch (Exception e) { } catch (Exception e) {
logger.error("parse object exception!",e); logger.error("parse object exception!",e);
} }
@ -178,7 +173,7 @@ public class JSONUtils {
} }
try { try {
return JSONObject.parseObject(json, new TypeReference<HashMap<String, String>>(){}); return JSON.parseObject(json, new TypeReference<HashMap<String, String>>(){});
} catch (Exception e) { } catch (Exception e) {
logger.error("json to map exception!",e); logger.error("json to map exception!",e);
} }
@ -203,7 +198,7 @@ public class JSONUtils {
} }
try { try {
return JSONObject.parseObject(json, new TypeReference<HashMap<K, V>>() {}); return JSON.parseObject(json, new TypeReference<HashMap<K, V>>() {});
} catch (Exception e) { } catch (Exception e) {
logger.error("json to map exception!",e); logger.error("json to map exception!",e);
} }
@ -218,23 +213,23 @@ public class JSONUtils {
*/ */
public static String toJsonString(Object object) { public static String toJsonString(Object object) {
try{ try{
return JSONObject.toJSONString(object,false); return JSON.toJSONString(object,false);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e); throw new RuntimeException("Object json deserialization exception.", e);
} }
} }
public static JSONObject parseObject(String text) { public static JSONObject parseObject(String text) {
try{ try{
return JSONObject.parseObject(text); return JSON.parseObject(text);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e); throw new RuntimeException("String json deserialization exception.", e);
} }
} }
public static JSONArray parseArray(String text) { public static JSONArray parseArray(String text) {
try{ try{
return JSONObject.parseArray(text); return JSON.parseArray(text);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e); throw new RuntimeException("Json deserialization exception.", e);
} }

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java

@ -79,7 +79,7 @@ public class LoggerUtils {
*/ */
public static List<String> getAppIds(String log, Logger logger) { public static List<String> getAppIds(String log, Logger logger) {
List<String> appIds = new ArrayList<String>(); List<String> appIds = new ArrayList<>();
Matcher matcher = APPLICATION_REGEX.matcher(log); Matcher matcher = APPLICATION_REGEX.matcher(log);

16
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java

@ -400,8 +400,7 @@ public class OSUtils {
* @return true if mac * @return true if mac
*/ */
public static boolean isMacOS() { public static boolean isMacOS() {
String os = System.getProperty("os.name"); return getOSName().startsWith("Mac");
return os.startsWith("Mac");
} }
@ -409,9 +408,16 @@ public class OSUtils {
* whether is windows * whether is windows
* @return true if windows * @return true if windows
*/ */
public static boolean isWindows() { public static boolean isWindows() { ;
String os = System.getProperty("os.name"); return getOSName().startsWith("Windows");
return os.startsWith("Windows"); }
/**
* get current OS name
* @return current OS name
*/
public static String getOSName() {
return System.getProperty("os.name");
} }
/** /**

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.DataType;
@ -23,7 +24,6 @@ import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils; import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils; import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils; import org.apache.commons.lang.time.DateUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -157,7 +157,7 @@ public class ParameterUtils {
property.setValue(val); property.setValue(val);
} }
} }
return JSONObject.toJSONString(globalParamList); return JSON.toJSONString(globalParamList);
} }

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java

@ -43,13 +43,11 @@ public class PropertyUtils {
private static final Properties properties = new Properties(); private static final Properties properties = new Properties();
private static final PropertyUtils propertyUtils = new PropertyUtils(); private PropertyUtils() {
throw new IllegalStateException("PropertyUtils class");
private PropertyUtils(){
init();
} }
private void init(){ static {
String[] propertyFiles = new String[]{COMMON_PROPERTIES_PATH}; String[] propertyFiles = new String[]{COMMON_PROPERTIES_PATH};
for (String fileName : propertyFiles) { for (String fileName : propertyFiles) {
InputStream fis = null; InputStream fis = null;
@ -125,7 +123,7 @@ public class PropertyUtils {
* @param key property name * @param key property name
* @return property value * @return property value
*/ */
public static Boolean getBoolean(String key) { public static boolean getBoolean(String key) {
String value = properties.getProperty(key.trim()); String value = properties.getProperty(key.trim());
if(null != value){ if(null != value){
return Boolean.parseBoolean(value); return Boolean.parseBoolean(value);

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java

@ -31,12 +31,12 @@ public class PlaceholderUtils {
/** /**
* Prefix of the position to be replaced * Prefix of the position to be replaced
*/ */
public static final String placeholderPrefix = "${"; public static final String PLACEHOLDER_PREFIX = "${";
/** /**
* The suffix of the position to be replaced * The suffix of the position to be replaced
*/ */
public static final String placeholderSuffix = "}"; public static final String PLACEHOLDER_SUFFIX = "}";
/** /**
@ -68,7 +68,7 @@ public class PlaceholderUtils {
*/ */
public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) {
return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders);
} }
/** /**

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java

@ -426,6 +426,7 @@ public class ProcessBuilderForWin32 {
static final ProcessBuilderForWin32.NullInputStream INSTANCE = new ProcessBuilderForWin32.NullInputStream(); static final ProcessBuilderForWin32.NullInputStream INSTANCE = new ProcessBuilderForWin32.NullInputStream();
private NullInputStream() {} private NullInputStream() {}
public int read() { return -1; } public int read() { return -1; }
@Override
public int available() { return 0; } public int available() { return 0; }
} }
@ -462,7 +463,7 @@ public class ProcessBuilderForWin32 {
* *
* @since 1.7 * @since 1.7
*/ */
public static abstract class Redirect { public abstract static class Redirect {
/** /**
* The type of a {@link ProcessBuilderForWin32.Redirect}. * The type of a {@link ProcessBuilderForWin32.Redirect}.
*/ */
@ -494,7 +495,7 @@ public class ProcessBuilderForWin32 {
* {@link ProcessBuilderForWin32.Redirect#appendTo Redirect.appendTo(File)}. * {@link ProcessBuilderForWin32.Redirect#appendTo Redirect.appendTo(File)}.
*/ */
APPEND APPEND
}; }
/** /**
* Returns the type of this {@code Redirect}. * Returns the type of this {@code Redirect}.
@ -568,6 +569,7 @@ public class ProcessBuilderForWin32 {
throw new NullPointerException(); throw new NullPointerException();
return new ProcessBuilderForWin32.Redirect() { return new ProcessBuilderForWin32.Redirect() {
public Type type() { return Type.READ; } public Type type() { return Type.READ; }
@Override
public File file() { return file; } public File file() { return file; }
public String toString() { public String toString() {
return "redirect to read from file \"" + file + "\""; return "redirect to read from file \"" + file + "\"";
@ -595,10 +597,12 @@ public class ProcessBuilderForWin32 {
throw new NullPointerException(); throw new NullPointerException();
return new ProcessBuilderForWin32.Redirect() { return new ProcessBuilderForWin32.Redirect() {
public Type type() { return Type.WRITE; } public Type type() { return Type.WRITE; }
@Override
public File file() { return file; } public File file() { return file; }
public String toString() { public String toString() {
return "redirect to write to file \"" + file + "\""; return "redirect to write to file \"" + file + "\"";
} }
@Override
boolean append() { return false; } boolean append() { return false; }
}; };
} }
@ -626,10 +630,12 @@ public class ProcessBuilderForWin32 {
throw new NullPointerException(); throw new NullPointerException();
return new ProcessBuilderForWin32.Redirect() { return new ProcessBuilderForWin32.Redirect() {
public Type type() { return Type.APPEND; } public Type type() { return Type.APPEND; }
@Override
public File file() { return file; } public File file() { return file; }
public String toString() { public String toString() {
return "redirect to append to file \"" + file + "\""; return "redirect to append to file \"" + file + "\"";
} }
@Override
boolean append() { return true; } boolean append() { return true; }
}; };
} }

57
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java

@ -19,6 +19,8 @@ package org.apache.dolphinscheduler.common.utils.process;
import com.sun.jna.Pointer; import com.sun.jna.Pointer;
import com.sun.jna.platform.win32.*; import com.sun.jna.platform.win32.*;
import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.IntByReference;
import java.lang.reflect.Field;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import sun.security.action.GetPropertyAction; import sun.security.action.GetPropertyAction;
import java.io.*; import java.io.*;
@ -31,10 +33,25 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import static com.sun.jna.platform.win32.WinBase.STILL_ACTIVE; import static com.sun.jna.platform.win32.WinBase.STILL_ACTIVE;
import static java.util.Objects.requireNonNull;
public class ProcessImplForWin32 extends Process { public class ProcessImplForWin32 extends Process {
private static final sun.misc.JavaIOFileDescriptorAccess fdAccess
= sun.misc.SharedSecrets.getJavaIOFileDescriptorAccess(); private static final Field FD_HANDLE;
static {
if (!OSUtils.isWindows()) {
throw new RuntimeException("ProcessImplForWin32 can be only initialized in " +
"Windows environment, but current OS is " + OSUtils.getOSName());
}
try {
FD_HANDLE = requireNonNull(FileDescriptor.class.getDeclaredField("handle"));
FD_HANDLE.setAccessible(true);
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
private static final int PIPE_SIZE = 4096 + 24; private static final int PIPE_SIZE = 4096 + 24;
@ -46,6 +63,22 @@ public class ProcessImplForWin32 extends Process {
private static final WinNT.HANDLE JAVA_INVALID_HANDLE_VALUE = new WinNT.HANDLE(Pointer.createConstant(-1)); private static final WinNT.HANDLE JAVA_INVALID_HANDLE_VALUE = new WinNT.HANDLE(Pointer.createConstant(-1));
private static void setHandle(FileDescriptor obj, long handle) {
try {
FD_HANDLE.set(obj, handle);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private static long getHandle(FileDescriptor obj) {
try {
return (Long) FD_HANDLE.get(obj);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
/** /**
* Open a file for writing. If {@code append} is {@code true} then the file * Open a file for writing. If {@code append} is {@code true} then the file
* is opened for atomic append directly and a FileOutputStream constructed * is opened for atomic append directly and a FileOutputStream constructed
@ -63,7 +96,7 @@ public class ProcessImplForWin32 extends Process {
sm.checkWrite(path); sm.checkWrite(path);
long handle = openForAtomicAppend(path); long handle = openForAtomicAppend(path);
final FileDescriptor fd = new FileDescriptor(); final FileDescriptor fd = new FileDescriptor();
fdAccess.setHandle(fd, handle); setHandle(fd, handle);
return AccessController.doPrivileged( return AccessController.doPrivileged(
new PrivilegedAction<FileOutputStream>() { new PrivilegedAction<FileOutputStream>() {
public FileOutputStream run() { public FileOutputStream run() {
@ -102,30 +135,30 @@ public class ProcessImplForWin32 extends Process {
if (redirects[0] == ProcessBuilderForWin32.Redirect.PIPE) if (redirects[0] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[0] = -1L; stdHandles[0] = -1L;
else if (redirects[0] == ProcessBuilderForWin32.Redirect.INHERIT) else if (redirects[0] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[0] = fdAccess.getHandle(FileDescriptor.in); stdHandles[0] = getHandle(FileDescriptor.in);
else { else {
f0 = new FileInputStream(redirects[0].file()); f0 = new FileInputStream(redirects[0].file());
stdHandles[0] = fdAccess.getHandle(f0.getFD()); stdHandles[0] = getHandle(f0.getFD());
} }
if (redirects[1] == ProcessBuilderForWin32.Redirect.PIPE) if (redirects[1] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[1] = -1L; stdHandles[1] = -1L;
else if (redirects[1] == ProcessBuilderForWin32.Redirect.INHERIT) else if (redirects[1] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[1] = fdAccess.getHandle(FileDescriptor.out); stdHandles[1] = getHandle(FileDescriptor.out);
else { else {
f1 = newFileOutputStream(redirects[1].file(), f1 = newFileOutputStream(redirects[1].file(),
redirects[1].append()); redirects[1].append());
stdHandles[1] = fdAccess.getHandle(f1.getFD()); stdHandles[1] = getHandle(f1.getFD());
} }
if (redirects[2] == ProcessBuilderForWin32.Redirect.PIPE) if (redirects[2] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[2] = -1L; stdHandles[2] = -1L;
else if (redirects[2] == ProcessBuilderForWin32.Redirect.INHERIT) else if (redirects[2] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[2] = fdAccess.getHandle(FileDescriptor.err); stdHandles[2] = getHandle(FileDescriptor.err);
else { else {
f2 = newFileOutputStream(redirects[2].file(), f2 = newFileOutputStream(redirects[2].file(),
redirects[2].append()); redirects[2].append());
stdHandles[2] = fdAccess.getHandle(f2.getFD()); stdHandles[2] = getHandle(f2.getFD());
} }
} }
@ -442,7 +475,7 @@ public class ProcessImplForWin32 extends Process {
stdin_stream = ProcessBuilderForWin32.NullOutputStream.INSTANCE; stdin_stream = ProcessBuilderForWin32.NullOutputStream.INSTANCE;
else { else {
FileDescriptor stdin_fd = new FileDescriptor(); FileDescriptor stdin_fd = new FileDescriptor();
fdAccess.setHandle(stdin_fd, stdHandles[0]); setHandle(stdin_fd, stdHandles[0]);
stdin_stream = new BufferedOutputStream( stdin_stream = new BufferedOutputStream(
new FileOutputStream(stdin_fd)); new FileOutputStream(stdin_fd));
} }
@ -451,7 +484,7 @@ public class ProcessImplForWin32 extends Process {
stdout_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE; stdout_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE;
else { else {
FileDescriptor stdout_fd = new FileDescriptor(); FileDescriptor stdout_fd = new FileDescriptor();
fdAccess.setHandle(stdout_fd, stdHandles[1]); setHandle(stdout_fd, stdHandles[1]);
stdout_stream = new BufferedInputStream( stdout_stream = new BufferedInputStream(
new FileInputStream(stdout_fd)); new FileInputStream(stdout_fd));
} }
@ -460,7 +493,7 @@ public class ProcessImplForWin32 extends Process {
stderr_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE; stderr_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE;
else { else {
FileDescriptor stderr_fd = new FileDescriptor(); FileDescriptor stderr_fd = new FileDescriptor();
fdAccess.setHandle(stderr_fd, stdHandles[2]); setHandle(stderr_fd, stdHandles[2]);
stderr_stream = new FileInputStream(stderr_fd); stderr_stream = new FileInputStream(stderr_fd);
} }

2
dolphinscheduler-common/src/main/resources/common.properties

@ -91,4 +91,4 @@ yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
# If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine # If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
kerberos.expire.time=7

55
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java

@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.task;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.junit.Assert;
import org.junit.Test;
import java.util.LinkedList;
import java.util.List;
public class FlinkParametersTest {
@Test
public void getResourceFilesList() {
FlinkParameters flinkParameters = new FlinkParameters();
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertTrue(flinkParameters.getResourceFilesList().isEmpty());
ResourceInfo mainResource = new ResourceInfo();
mainResource.setRes("testFlinkMain-1.0.0-SNAPSHOT.jar");
flinkParameters.setMainJar(mainResource);
List<ResourceInfo> resourceInfos = new LinkedList<>();
ResourceInfo resourceInfo1 = new ResourceInfo();
resourceInfo1.setRes("testFlinkParameters1.jar");
resourceInfos.add(resourceInfo1);
flinkParameters.setResourceList(resourceInfos);
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertEquals(2, flinkParameters.getResourceFilesList().size());
ResourceInfo resourceInfo2 = new ResourceInfo();
resourceInfo2.setRes("testFlinkParameters2.jar");
resourceInfos.add(resourceInfo2);
flinkParameters.setResourceList(resourceInfos);
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertEquals(3, flinkParameters.getResourceFilesList().size());
}
}

17
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java

@ -30,29 +30,32 @@ public class FileUtilsTest {
@Test @Test
public void suffix() { public void suffix() {
Assert.assertEquals(FileUtils.suffix("ninfor.java"),"java"); Assert.assertEquals("java", FileUtils.suffix("ninfor.java"));
Assert.assertEquals("", FileUtils.suffix(null));
Assert.assertEquals("", FileUtils.suffix(""));
Assert.assertEquals("", FileUtils.suffix("ninfor-java"));
} }
@Test @Test
public void testGetDownloadFilename() { public void testGetDownloadFilename() {
PowerMockito.mockStatic(DateUtils.class); PowerMockito.mockStatic(DateUtils.class);
PowerMockito.when(DateUtils.getCurrentTime(YYYYMMDDHHMMSS)).thenReturn("20190101101059"); PowerMockito.when(DateUtils.getCurrentTime(YYYYMMDDHHMMSS)).thenReturn("20190101101059");
Assert.assertEquals(FileUtils.getDownloadFilename("test"), Assert.assertEquals("/tmp/dolphinscheduler/download/20190101101059/test",
"/tmp/dolphinscheduler/download/20190101101059/test"); FileUtils.getDownloadFilename("test"));
} }
@Test @Test
public void testGetUploadFilename() { public void testGetUploadFilename() {
Assert.assertEquals(FileUtils.getUploadFilename("aaa","bbb"), Assert.assertEquals("/tmp/dolphinscheduler/aaa/resources/bbb",
"/tmp/dolphinscheduler/aaa/resources/bbb"); FileUtils.getUploadFilename("aaa","bbb"));
} }
@Test @Test
public void testGetProcessExecDir() { public void testGetProcessExecDir() {
String dir = FileUtils.getProcessExecDir(1,2,3, 4); String dir = FileUtils.getProcessExecDir(1,2,3, 4);
Assert.assertEquals(dir, "/tmp/dolphinscheduler/exec/process/1/2/3/4"); Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3/4", dir);
dir = FileUtils.getProcessExecDir(1,2,3); dir = FileUtils.getProcessExecDir(1,2,3);
Assert.assertEquals(dir, "/tmp/dolphinscheduler/exec/process/1/2/3"); Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3", dir);
} }
@Test @Test

2
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java

@ -38,7 +38,7 @@ public class HttpUtilsTest {
String result = HttpUtils.get("https://github.com/manifest.json"); String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result); Assert.assertNotNull(result);
JSONObject jsonObject = JSON.parseObject(result); JSONObject jsonObject = JSON.parseObject(result);
Assert.assertEquals(jsonObject.getString("name"), "GitHub"); Assert.assertEquals("GitHub", jsonObject.getString("name"));
result = HttpUtils.get("https://123.333.111.33/ccc"); result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result); Assert.assertNull(result);

6
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java

@ -29,13 +29,13 @@ public class IpUtilsTest {
long longNumber = IpUtils.ipToLong(ip); long longNumber = IpUtils.ipToLong(ip);
long longNumber2 = IpUtils.ipToLong(ip2); long longNumber2 = IpUtils.ipToLong(ip2);
System.out.println(longNumber); System.out.println(longNumber);
Assert.assertEquals(longNumber, 3232263681L); Assert.assertEquals(3232263681L, longNumber);
Assert.assertEquals(longNumber2, 0L); Assert.assertEquals(0L, longNumber2);
String ip3 = "255.255.255.255"; String ip3 = "255.255.255.255";
long longNumber3 = IpUtils.ipToLong(ip3); long longNumber3 = IpUtils.ipToLong(ip3);
System.out.println(longNumber3); System.out.println(longNumber3);
Assert.assertEquals(longNumber3, 4294967295L); Assert.assertEquals(4294967295L, longNumber3);
} }

22
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java

@ -16,10 +16,10 @@
*/ */
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.Direct;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import org.junit.Assert; import org.junit.Assert;
@ -40,8 +40,8 @@ public class JSONUtilsTest {
String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}"; String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}";
Map<String,String> models = JSONUtils.toMap(jsonStr); Map<String,String> models = JSONUtils.toMap(jsonStr);
Assert.assertEquals(models.get("id"), "1001"); Assert.assertEquals("1001", models.get("id"));
Assert.assertEquals(models.get("name"), "Jobs"); Assert.assertEquals("Jobs", models.get("name"));
} }
@ -53,9 +53,9 @@ public class JSONUtilsTest {
property.setType(DataType.VARCHAR); property.setType(DataType.VARCHAR);
property.setValue("sssssss"); property.setValue("sssssss");
String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}"; String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}";
Property property1 = JSONObject.parseObject(str, Property.class); Property property1 = JSON.parseObject(str, Property.class);
Direct direct = property1.getDirect(); Direct direct = property1.getDirect();
Assert.assertEquals(direct , Direct.IN); Assert.assertEquals(Direct.IN, direct);
} }
@ -66,12 +66,12 @@ public class JSONUtilsTest {
List<LinkedHashMap> maps = JSONUtils.toList(str, List<LinkedHashMap> maps = JSONUtils.toList(str,
LinkedHashMap.class); LinkedHashMap.class);
Assert.assertEquals(maps.size(), 1); Assert.assertEquals(1, maps.size());
Assert.assertEquals(maps.get(0).get("mysql service name"), "mysql200"); Assert.assertEquals("mysql200", maps.get(0).get("mysql service name"));
Assert.assertEquals(maps.get(0).get("mysql address"), "192.168.xx.xx"); Assert.assertEquals("192.168.xx.xx", maps.get(0).get("mysql address"));
Assert.assertEquals(maps.get(0).get("port"), "3306"); Assert.assertEquals("3306", maps.get(0).get("port"));
Assert.assertEquals(maps.get(0).get("no index of number"), "80"); Assert.assertEquals("80", maps.get(0).get("no index of number"));
Assert.assertEquals(maps.get(0).get("database client connections"), "190"); Assert.assertEquals("190", maps.get(0).get("database client connections"));
} }
public String list2String(){ public String list2String(){

8
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.common.utils; package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import org.apache.commons.lang.time.DateUtils; import org.apache.commons.lang.time.DateUtils;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.DataType;
@ -91,13 +91,13 @@ public class ParameterUtilsTest {
globalParamList.add(property); globalParamList.add(property);
String result2 = ParameterUtils.curingGlobalParams(null,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime); String result2 = ParameterUtils.curingGlobalParams(null,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime);
Assert.assertEquals(result2, JSONObject.toJSONString(globalParamList)); Assert.assertEquals(result2, JSON.toJSONString(globalParamList));
String result3 = ParameterUtils.curingGlobalParams(globalParamMap,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,null); String result3 = ParameterUtils.curingGlobalParams(globalParamMap,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,null);
Assert.assertEquals(result3, JSONObject.toJSONString(globalParamList)); Assert.assertEquals(result3, JSON.toJSONString(globalParamList));
String result4 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime); String result4 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime);
Assert.assertEquals(result4, JSONObject.toJSONString(globalParamList)); Assert.assertEquals(result4, JSON.toJSONString(globalParamList));
//test var $ startsWith //test var $ startsWith
globalParamMap.put("bizDate","${system.biz.date}"); globalParamMap.put("bizDate","${system.biz.date}");

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java

@ -61,7 +61,7 @@ public class MonitorDBDao {
return new PostgrePerformance().getMonitorRecord(conn); return new PostgrePerformance().getMonitorRecord(conn);
} }
}catch (Exception e) { }catch (Exception e) {
logger.error("SQLException " + e); logger.error("SQLException: {}", e.getMessage(), e);
}finally { }finally {
try { try {
if (conn != null) { if (conn != null) {

134
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java

@ -50,25 +50,28 @@ public class TaskRecordDao {
static { static {
try { try {
conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES); conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES);
}catch (ConfigurationException e){ } catch (ConfigurationException e) {
logger.error("load configuration exception",e); logger.error("load configuration exception", e);
System.exit(1); System.exit(1);
} }
} }
/** /**
* get task record flag * get task record flag
*
* @return whether startup taskrecord * @return whether startup taskrecord
*/ */
public static boolean getTaskRecordFlag(){ public static boolean getTaskRecordFlag() {
return conf.getBoolean(Constants.TASK_RECORD_FLAG); return conf.getBoolean(Constants.TASK_RECORD_FLAG);
} }
/** /**
* create connection * create connection
*
* @return connection * @return connection
*/ */
private static Connection getConn() { private static Connection getConn() {
if(!getTaskRecordFlag()){ if (!getTaskRecordFlag()) {
return null; return null;
} }
String driver = "com.mysql.jdbc.Driver"; String driver = "com.mysql.jdbc.Driver";
@ -90,101 +93,96 @@ public class TaskRecordDao {
/** /**
* generate where sql string * generate where sql string
*
* @param filterMap filterMap * @param filterMap filterMap
* @return sql string * @return sql string
*/ */
private static String getWhereString(Map<String, String> filterMap) { private static String getWhereString(Map<String, String> filterMap) {
if(filterMap.size() ==0){ if (filterMap.size() == 0) {
return ""; return "";
} }
String result = " where 1=1 "; String result = " where 1=1 ";
Object taskName = filterMap.get("taskName"); Object taskName = filterMap.get("taskName");
if(taskName != null && StringUtils.isNotEmpty(taskName.toString())){ if (taskName != null && StringUtils.isNotEmpty(taskName.toString())) {
result += " and PROC_NAME like concat('%', '" + taskName.toString() + "', '%') "; result += " and PROC_NAME like concat('%', '" + taskName.toString() + "', '%') ";
} }
Object taskDate = filterMap.get("taskDate"); Object taskDate = filterMap.get("taskDate");
if(taskDate != null && StringUtils.isNotEmpty(taskDate.toString())){ if (taskDate != null && StringUtils.isNotEmpty(taskDate.toString())) {
result += " and PROC_DATE='" + taskDate.toString() + "'"; result += " and PROC_DATE='" + taskDate.toString() + "'";
} }
Object state = filterMap.get("state"); Object state = filterMap.get("state");
if(state != null && StringUtils.isNotEmpty(state.toString())){ if (state != null && StringUtils.isNotEmpty(state.toString())) {
result += " and NOTE='" + state.toString() + "'"; result += " and NOTE='" + state.toString() + "'";
} }
Object sourceTable = filterMap.get("sourceTable"); Object sourceTable = filterMap.get("sourceTable");
if(sourceTable!= null && StringUtils.isNotEmpty(sourceTable.toString())){ if (sourceTable != null && StringUtils.isNotEmpty(sourceTable.toString())) {
result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString()+ "', '%')"; result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString() + "', '%')";
} }
Object targetTable = filterMap.get("targetTable"); Object targetTable = filterMap.get("targetTable");
if(sourceTable!= null && StringUtils.isNotEmpty(targetTable.toString())){ if (sourceTable != null && StringUtils.isNotEmpty(targetTable.toString())) {
result += " and TARGET_TAB like concat('%', '"+ targetTable.toString()+"', '%') " ; result += " and TARGET_TAB like concat('%', '" + targetTable.toString() + "', '%') ";
} }
Object start = filterMap.get("startTime"); Object start = filterMap.get("startTime");
if(start != null && StringUtils.isNotEmpty(start.toString())){ if (start != null && StringUtils.isNotEmpty(start.toString())) {
result += " and STARTDATE>='" + start.toString() + "'"; result += " and STARTDATE>='" + start.toString() + "'";
} }
Object end = filterMap.get("endTime"); Object end = filterMap.get("endTime");
if(end != null && StringUtils.isNotEmpty(end.toString())){ if (end != null && StringUtils.isNotEmpty(end.toString())) {
result += " and ENDDATE>='" + end.toString()+ "'"; result += " and ENDDATE>='" + end.toString() + "'";
} }
return result; return result;
} }
/** /**
* count task record * count task record
*
* @param filterMap filterMap * @param filterMap filterMap
* @param table table * @param table table
* @return task record count * @return task record count
*/ */
public static int countTaskRecord(Map<String, String> filterMap, String table){ public static int countTaskRecord(Map<String, String> filterMap, String table) {
int count = 0; int count = 0;
Connection conn = null; Connection conn = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
ResultSet rs = null;
try { try {
conn = getConn(); conn = getConn();
if(conn == null){ if (conn == null) {
return count; return count;
} }
String sql = String.format("select count(1) as count from %s", table); String sql = String.format("select count(1) as count from %s", table);
sql += getWhereString(filterMap); sql += getWhereString(filterMap);
pstmt = conn.prepareStatement(sql); pstmt = conn.prepareStatement(sql);
ResultSet rs = pstmt.executeQuery(); rs = pstmt.executeQuery();
while(rs.next()){ while (rs.next()) {
count = rs.getInt("count"); count = rs.getInt("count");
break; break;
} }
} catch (SQLException e) { } catch (SQLException e) {
logger.error("Exception ", e); logger.error("Exception ", e);
}finally { } finally {
try { closeResource(rs, pstmt, conn);
if(pstmt != null) {
pstmt.close();
}
if(conn != null){
conn.close();
}
} catch (SQLException e) {
logger.error("Exception ", e);
}
} }
return count; return count;
} }
/** /**
* query task record by filter map paging * query task record by filter map paging
*
* @param filterMap filterMap * @param filterMap filterMap
* @param table table * @param table table
* @return task record list * @return task record list
*/ */
public static List<TaskRecord> queryAllTaskRecord(Map<String,String> filterMap , String table) { public static List<TaskRecord> queryAllTaskRecord(Map<String, String> filterMap, String table) {
String sql = String.format("select * from %s", table); String sql = String.format("select * from %s", table);
sql += getWhereString(filterMap); sql += getWhereString(filterMap);
@ -194,9 +192,9 @@ public class TaskRecordDao {
sql += String.format(" order by STARTDATE desc limit %d,%d", offset, pageSize); sql += String.format(" order by STARTDATE desc limit %d,%d", offset, pageSize);
List<TaskRecord> recordList = new ArrayList<>(); List<TaskRecord> recordList = new ArrayList<>();
try{ try {
recordList = getQueryResult(sql); recordList = getQueryResult(sql);
}catch (Exception e){ } catch (Exception e) {
logger.error("Exception ", e); logger.error("Exception ", e);
} }
return recordList; return recordList;
@ -204,6 +202,7 @@ public class TaskRecordDao {
/** /**
* convert result set to task record * convert result set to task record
*
* @param resultSet resultSet * @param resultSet resultSet
* @return task record * @return task record
* @throws SQLException if error throws SQLException * @throws SQLException if error throws SQLException
@ -232,6 +231,7 @@ public class TaskRecordDao {
/** /**
* query task list by select sql * query task list by select sql
*
* @param selectSql select sql * @param selectSql select sql
* @return task record list * @return task record list
*/ */
@ -239,65 +239,81 @@ public class TaskRecordDao {
List<TaskRecord> recordList = new ArrayList<>(); List<TaskRecord> recordList = new ArrayList<>();
Connection conn = null; Connection conn = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
ResultSet rs = null;
try { try {
conn = getConn(); conn = getConn();
if(conn == null){ if (conn == null) {
return recordList; return recordList;
} }
pstmt = conn.prepareStatement(selectSql); pstmt = conn.prepareStatement(selectSql);
ResultSet rs = pstmt.executeQuery(); rs = pstmt.executeQuery();
while(rs.next()){ while (rs.next()) {
TaskRecord taskRecord = convertToTaskRecord(rs); TaskRecord taskRecord = convertToTaskRecord(rs);
recordList.add(taskRecord); recordList.add(taskRecord);
} }
} catch (SQLException e) { } catch (SQLException e) {
logger.error("Exception ", e); logger.error("Exception ", e);
}finally { } finally {
try { closeResource(rs, pstmt, conn);
if(pstmt != null) {
pstmt.close();
}
if(conn != null){
conn.close();
}
} catch (SQLException e) {
logger.error("Exception ", e);
}
} }
return recordList; return recordList;
} }
/** /**
* according to procname and procdate query task record * according to procname and procdate query task record
*
* @param procName procName * @param procName procName
* @param procDate procDate * @param procDate procDate
* @return task record status * @return task record status
*/ */
public static TaskRecordStatus getTaskRecordState(String procName,String procDate){ public static TaskRecordStatus getTaskRecordState(String procName, String procDate) {
String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'" String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'"
,procName,procDate + "%"); , procName, procDate + "%");
List<TaskRecord> taskRecordList = getQueryResult(sql); List<TaskRecord> taskRecordList = getQueryResult(sql);
// contains no record and sql exception // contains no record and sql exception
if (CollectionUtils.isEmpty(taskRecordList)){ if (CollectionUtils.isEmpty(taskRecordList)) {
// exception // exception
return TaskRecordStatus.EXCEPTION; return TaskRecordStatus.EXCEPTION;
}else if (taskRecordList.size() > 1){ } else if (taskRecordList.size() > 1) {
return TaskRecordStatus.EXCEPTION; return TaskRecordStatus.EXCEPTION;
}else { } else {
TaskRecord taskRecord = taskRecordList.get(0); TaskRecord taskRecord = taskRecordList.get(0);
if (taskRecord == null){ if (taskRecord == null) {
return TaskRecordStatus.EXCEPTION; return TaskRecordStatus.EXCEPTION;
} }
Long targetRowCount = taskRecord.getTargetRowCount(); Long targetRowCount = taskRecord.getTargetRowCount();
if (targetRowCount <= 0){ if (targetRowCount <= 0) {
return TaskRecordStatus.FAILURE; return TaskRecordStatus.FAILURE;
}else { } else {
return TaskRecordStatus.SUCCESS; return TaskRecordStatus.SUCCESS;
} }
} }
} }
private static void closeResource(ResultSet rs, PreparedStatement pstmt, Connection conn) {
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
logger.error("Exception ", e);
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
logger.error("Exception ", e);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
logger.error("Exception ", e);
}
}
}
} }

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java

@ -39,7 +39,7 @@ public class HiveDataSource extends BaseDataSource {
@Override @Override
public String getJdbcUrl() { public String getJdbcUrl() {
String jdbcUrl = getAddress(); String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) {
jdbcUrl += "/"; jdbcUrl += "/";
} }

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java

@ -39,7 +39,7 @@ public class MySQLDataSource extends BaseDataSource {
@Override @Override
public String getJdbcUrl() { public String getJdbcUrl() {
String address = getAddress(); String address = getAddress();
if (address.lastIndexOf("/") != (address.length() - 1)) { if (address.lastIndexOf('/') != (address.length() - 1)) {
address += "/"; address += "/";
} }
String jdbcUrl = address + getDatabase(); String jdbcUrl = address + getDatabase();

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java

@ -40,7 +40,7 @@ public class PostgreDataSource extends BaseDataSource {
@Override @Override
public String getJdbcUrl() { public String getJdbcUrl() {
String jdbcUrl = getAddress(); String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) {
jdbcUrl += "/"; jdbcUrl += "/";
} }

8
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java

@ -16,10 +16,10 @@
*/ */
package org.apache.dolphinscheduler.dao.entity; package org.apache.dolphinscheduler.dao.entity;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableId;
@ -266,7 +266,7 @@ public class ProcessDefinition {
} }
public void setGlobalParams(String globalParams) { public void setGlobalParams(String globalParams) {
this.globalParamList = JSONObject.parseArray(globalParams, Property.class); this.globalParamList = JSON.parseArray(globalParams, Property.class);
this.globalParams = globalParams; this.globalParams = globalParams;
} }
@ -275,7 +275,7 @@ public class ProcessDefinition {
} }
public void setGlobalParamList(List<Property> globalParamList) { public void setGlobalParamList(List<Property> globalParamList) {
this.globalParams = JSONObject.toJSONString(globalParamList); this.globalParams = JSON.toJSONString(globalParamList);
this.globalParamList = globalParamList; this.globalParamList = globalParamList;
} }
@ -283,7 +283,7 @@ public class ProcessDefinition {
List<Property> propList; List<Property> propList;
if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) { if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) {
propList = JSONObject.parseArray(globalParams, Property.class); propList = JSON.parseArray(globalParams, Property.class);
globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
} }

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java

@ -506,7 +506,7 @@ public class ProcessInstance {
* check this process is start complement data * check this process is start complement data
* @return whether complement data * @return whether complement data
*/ */
public Boolean isComplementData(){ public boolean isComplementData(){
if(!StringUtils.isNotEmpty(this.historyCmd)){ if(!StringUtils.isNotEmpty(this.historyCmd)){
return false; return false;
} }

4
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java

@ -373,7 +373,7 @@ public class TaskInstance {
} }
public Boolean isSubProcess(){ public boolean isSubProcess(){
return TaskType.SUB_PROCESS.getDescp().equals(this.taskType); return TaskType.SUB_PROCESS.getDescp().equals(this.taskType);
} }
@ -442,7 +442,7 @@ public class TaskInstance {
this.executorName = executorName; this.executorName = executorName;
} }
public Boolean isTaskComplete() { public boolean isTaskComplete() {
return this.getState().typeIsPause() return this.getState().typeIsPause()
|| this.getState().typeIsSuccess() || this.getState().typeIsSuccess()

23
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java

@ -29,15 +29,7 @@ import java.sql.SQLException;
*/ */
public class MysqlUpgradeDao extends UpgradeDao { public class MysqlUpgradeDao extends UpgradeDao {
public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); public static final Logger logger = LoggerFactory.getLogger(MysqlUpgradeDao.class);
/**
* init
*/
@Override
protected void init() {
}
/** /**
* mysql upgrade dao holder * mysql upgrade dao holder
@ -69,12 +61,7 @@ public class MysqlUpgradeDao extends UpgradeDao {
try { try {
conn = dataSource.getConnection(); conn = dataSource.getConnection();
rs = conn.getMetaData().getTables(null, null, tableName, null); rs = conn.getMetaData().getTables(null, null, tableName, null);
if (rs.next()) { return rs.next();
return true;
} else {
return false;
}
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e);
@ -96,11 +83,7 @@ public class MysqlUpgradeDao extends UpgradeDao {
try { try {
conn = dataSource.getConnection(); conn = dataSource.getConnection();
ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName); ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName);
if (rs.next()) { return rs.next();
return true;
} else {
return false;
}
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);

39
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java

@ -30,16 +30,8 @@ import java.sql.SQLException;
*/ */
public class PostgresqlUpgradeDao extends UpgradeDao { public class PostgresqlUpgradeDao extends UpgradeDao {
public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); public static final Logger logger = LoggerFactory.getLogger(PostgresqlUpgradeDao.class);
private static final String schema = getSchema(); private static final String SCHEMA = getSchema();
/**
* init
*/
@Override
protected void init() {
}
/** /**
* postgresql upgrade dao holder * postgresql upgrade dao holder
@ -58,16 +50,6 @@ public class PostgresqlUpgradeDao extends UpgradeDao {
return PostgresqlUpgradeDaoHolder.INSTANCE; return PostgresqlUpgradeDaoHolder.INSTANCE;
} }
/**
* init schema
* @param initSqlPath initSqlPath
*/
@Override
public void initSchema(String initSqlPath) {
super.initSchema(initSqlPath);
}
/** /**
* getSchema * getSchema
* @return schema * @return schema
@ -107,13 +89,9 @@ public class PostgresqlUpgradeDao extends UpgradeDao {
try { try {
conn = dataSource.getConnection(); conn = dataSource.getConnection();
rs = conn.getMetaData().getTables(null, schema, tableName, null); rs = conn.getMetaData().getTables(null, SCHEMA, tableName, null);
if (rs.next()) {
return true;
} else {
return false;
}
return rs.next();
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e);
@ -135,13 +113,8 @@ public class PostgresqlUpgradeDao extends UpgradeDao {
ResultSet rs = null; ResultSet rs = null;
try { try {
conn = dataSource.getConnection(); conn = dataSource.getConnection();
rs = conn.getMetaData().getColumns(null,schema,tableName,columnName); rs = conn.getMetaData().getColumns(null, SCHEMA,tableName,columnName);
if (rs.next()) { return rs.next();
return true;
} else {
return false;
}
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e);

3
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java

@ -27,7 +27,6 @@ import java.util.Date;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.dao.MonitorDBDao;
import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.apache.dolphinscheduler.dao.entity.MonitorRecord;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -37,7 +36,7 @@ import org.slf4j.LoggerFactory;
*/ */
public class MysqlPerformance extends BaseDBPerformance{ public class MysqlPerformance extends BaseDBPerformance{
private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); private static Logger logger = LoggerFactory.getLogger(MysqlPerformance.class);
/** /**

3
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java

@ -24,7 +24,6 @@ import java.util.Date;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.dao.MonitorDBDao;
import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.apache.dolphinscheduler.dao.entity.MonitorRecord;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -34,7 +33,7 @@ import org.slf4j.LoggerFactory;
*/ */
public class PostgrePerformance extends BaseDBPerformance { public class PostgrePerformance extends BaseDBPerformance {
private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); private static Logger logger = LoggerFactory.getLogger(PostgrePerformance.class);
/** /**
* get monitor record * get monitor record

6
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java

@ -77,7 +77,7 @@ public class ProcessDefinitionMapperTest {
//update //update
processDefinition.setUpdateTime(new Date()); processDefinition.setUpdateTime(new Date());
int update = processDefinitionMapper.updateById(processDefinition); int update = processDefinitionMapper.updateById(processDefinition);
Assert.assertEquals(update, 1); Assert.assertEquals(1, update);
processDefinitionMapper.deleteById(processDefinition.getId()); processDefinitionMapper.deleteById(processDefinition.getId());
} }
@ -88,7 +88,7 @@ public class ProcessDefinitionMapperTest {
public void testDelete(){ public void testDelete(){
ProcessDefinition processDefinition = insertOne(); ProcessDefinition processDefinition = insertOne();
int delete = processDefinitionMapper.deleteById(processDefinition.getId()); int delete = processDefinitionMapper.deleteById(processDefinition.getId());
Assert.assertEquals(delete, 1); Assert.assertEquals(1, delete);
} }
/** /**
@ -189,7 +189,7 @@ public class ProcessDefinitionMapperTest {
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryDefinitionListByIdList(array); List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryDefinitionListByIdList(array);
processDefinitionMapper.deleteById(processDefinition.getId()); processDefinitionMapper.deleteById(processDefinition.getId());
processDefinitionMapper.deleteById(processDefinition1.getId()); processDefinitionMapper.deleteById(processDefinition1.getId());
Assert.assertEquals(processDefinitions.size(), 2); Assert.assertEquals(2, processDefinitions.size());
} }

53
dolphinscheduler-dist/pom.xml vendored

@ -220,10 +220,7 @@
${basedir}/../dolphinscheduler-alert/src/main/resources ${basedir}/../dolphinscheduler-alert/src/main/resources
</location> </location>
<includes> <includes>
<include>**/*.properties</include> <include>**/*.*</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>**/*.ftl</include>
</includes> </includes>
</source> </source>
@ -232,9 +229,7 @@
${basedir}/../dolphinscheduler-common/src/main/resources ${basedir}/../dolphinscheduler-common/src/main/resources
</location> </location>
<includes> <includes>
<include>**/*.properties</include> <include>**/*.*</include>
<include>**/*.xml</include>
<include>**/*.json</include>
</includes> </includes>
</source> </source>
@ -243,10 +238,7 @@
${basedir}/../dolphinscheduler-dao/src/main/resources ${basedir}/../dolphinscheduler-dao/src/main/resources
</location> </location>
<includes> <includes>
<include>**/*.properties</include> <include>**/*.*</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>**/*.yml</include>
</includes> </includes>
</source> </source>
@ -255,9 +247,7 @@
${basedir}/../dolphinscheduler-api/src/main/resources ${basedir}/../dolphinscheduler-api/src/main/resources
</location> </location>
<includes> <includes>
<include>**/*.properties</include> <include>**/*.*</include>
<include>**/*.xml</include>
<include>**/*.json</include>
</includes> </includes>
</source> </source>
@ -266,13 +256,19 @@
${basedir}/../dolphinscheduler-server/src/main/resources ${basedir}/../dolphinscheduler-server/src/main/resources
</location> </location>
<includes> <includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>config/*.*</include> <include>config/*.*</include>
</includes> </includes>
</source> </source>
<source>
<location>
${basedir}/../dolphinscheduler-service/src/main/resources
</location>
<includes>
<include>*.*</include>
</includes>
</source>
<source> <source>
<location> <location>
${basedir}/../script ${basedir}/../script
@ -342,14 +338,6 @@
</includes> </includes>
</source> </source>
<source>
<location>
${basedir}/../dolphinscheduler-ui
</location>
<includes>
<include>install-dolphinscheduler-ui.sh</include>
</includes>
</source>
<source> <source>
<location> <location>
${basedir}/release-docs ${basedir}/release-docs
@ -362,7 +350,7 @@
</sources> </sources>
</mapping> </mapping>
<mapping> <mapping>
<directory>/opt/soft/${project.build.finalName}/dist</directory> <directory>/opt/soft/${project.build.finalName}/ui</directory>
<filemode>755</filemode> <filemode>755</filemode>
<username>root</username> <username>root</username>
<groupname>root</groupname> <groupname>root</groupname>
@ -391,6 +379,14 @@
<include>**/*.*</include> <include>**/*.*</include>
</includes> </includes>
</source> </source>
<source>
<location>
${basedir}/../sql
</location>
<includes>
<include>soft_version</include>
</includes>
</source>
</sources> </sources>
</mapping> </mapping>
@ -405,7 +401,7 @@
${basedir}/../script ${basedir}/../script
</location> </location>
<includes> <includes>
<include>**/*.*</include> <include>*.sh</include>
</includes> </includes>
</source> </source>
@ -416,6 +412,9 @@
<preinstallScriptlet> <preinstallScriptlet>
<script>mkdir -p /opt/soft</script> <script>mkdir -p /opt/soft</script>
</preinstallScriptlet> </preinstallScriptlet>
<postinstallScriptlet>
<script>rm -rf /opt/soft/dolphinscheduler ; ln -s /opt/soft/apache-dolphinscheduler-incubating-${project.version} /opt/soft/dolphinscheduler</script>
</postinstallScriptlet>
<postremoveScriptlet> <postremoveScriptlet>
<script>rm -rf /opt/soft/apache-dolphinscheduler-incubating-${project.version}</script> <script>rm -rf /opt/soft/apache-dolphinscheduler-incubating-${project.version}</script>
</postremoveScriptlet> </postremoveScriptlet>

4
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java

@ -30,12 +30,12 @@ public class Ping implements Serializable {
/** /**
* ping body * ping body
*/ */
protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; protected static final ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
/** /**
* request command body * request command body
*/ */
private static byte[] EMPTY_BODY_ARRAY = new byte[0]; private static final byte[] EMPTY_BODY_ARRAY = new byte[0];
private static final ByteBuf PING_BUF; private static final ByteBuf PING_BUF;

4
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java

@ -30,12 +30,12 @@ public class Pong implements Serializable {
/** /**
* pong body * pong body
*/ */
protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; protected static final ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
/** /**
* pong command body * pong command body
*/ */
private static byte[] EMPTY_BODY_ARRAY = new byte[0]; private static final byte[] EMPTY_BODY_ARRAY = new byte[0];
/** /**
* ping byte buffer * ping byte buffer

3
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.remote.utils; package org.apache.dolphinscheduler.remote.utils;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
/** /**
@ -31,7 +32,7 @@ public class Constants {
/** /**
* charset * charset
*/ */
public static final Charset UTF8 = Charset.forName("UTF-8"); public static final Charset UTF8 = StandardCharsets.UTF_8;
/** /**
* cpus * cpus

21
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.server.log; package org.apache.dolphinscheduler.server.log;
import io.netty.channel.Channel; import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.utils.IOUtils;
import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.log.*; import org.apache.dolphinscheduler.remote.command.log.*;
@ -116,16 +117,8 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
}catch (IOException e){ }catch (IOException e){
logger.error("get file bytes error",e); logger.error("get file bytes error",e);
}finally { }finally {
if (bos != null){ IOUtils.closeQuietly(bos);
try { IOUtils.closeQuietly(in);
bos.close();
} catch (IOException ignore) {}
}
if (in != null){
try {
in.close();
} catch (IOException ignore) {}
}
} }
return new byte[0]; return new byte[0];
} }
@ -146,7 +139,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
} catch (IOException e) { } catch (IOException e) {
logger.error("read file error",e); logger.error("read file error",e);
} }
return Collections.EMPTY_LIST; return Collections.emptyList();
} }
/** /**
@ -168,11 +161,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
}catch (IOException e){ }catch (IOException e){
logger.error("read file error",e); logger.error("read file error",e);
}finally { }finally {
try { IOUtils.closeQuietly(br);
if (br != null){
br.close();
}
} catch (IOException ignore) {}
} }
return ""; return "";
} }

36
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java

@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread; import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread;
import org.apache.dolphinscheduler.server.worker.WorkerServer;
import org.apache.dolphinscheduler.server.zk.ZKMasterClient; import org.apache.dolphinscheduler.server.zk.ZKMasterClient;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
@ -37,8 +38,10 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.WebApplicationType; import org.springframework.boot.WebApplicationType;
import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -46,7 +49,9 @@ import java.util.concurrent.TimeUnit;
/** /**
* master server * master server
*/ */
@ComponentScan("org.apache.dolphinscheduler") @ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = {
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = {WorkerServer.class})
})
public class MasterServer implements IStoppable { public class MasterServer implements IStoppable {
/** /**
@ -112,7 +117,7 @@ public class MasterServer implements IStoppable {
masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread"); masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread");
heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.DEFAULT_MASTER_HEARTBEAT_THREAD_NUM); heartbeatMasterService = ThreadUtils.newThreadScheduledExecutor("Master-Main-Thread",Constants.DEFAULT_MASTER_HEARTBEAT_THREAD_NUM, false);
// heartbeat thread implement // heartbeat thread implement
Runnable heartBeatThread = heartBeatThread(); Runnable heartBeatThread = heartBeatThread();
@ -147,23 +152,17 @@ public class MasterServer implements IStoppable {
} }
logger.error("start Quartz failed", e); logger.error("start Quartz failed", e);
} }
/**
* register hooks, which are called before the process exits
*/
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
if (zkMasterClient.getActiveMasterNum() <= 1) {
zkMasterClient.getAlertDao().sendServerStopedAlert(
1, OSUtils.getHost(), "Master-Server");
}
stop("shutdownhook");
}
}));
} }
@PreDestroy
public void destroy() {
// master server exit alert
if (zkMasterClient.getActiveMasterNum() <= 1) {
zkMasterClient.getAlertDao().sendServerStopedAlert(
1, OSUtils.getHost(), "Master-Server");
}
stop("shutdownhook");
}
/** /**
* gracefully stop * gracefully stop
@ -244,7 +243,7 @@ public class MasterServer implements IStoppable {
*/ */
private Runnable heartBeatThread(){ private Runnable heartBeatThread(){
logger.info("start master heart beat thread..."); logger.info("start master heart beat thread...");
Runnable heartBeatThread = new Runnable() { return new Runnable() {
@Override @Override
public void run() { public void run() {
if(Stopper.isRunning()) { if(Stopper.isRunning()) {
@ -258,7 +257,6 @@ public class MasterServer implements IStoppable {
} }
} }
}; };
return heartBeatThread;
} }
} }

67
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.master.runner; package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
@ -68,7 +68,7 @@ public class MasterExecThread implements Runnable {
/** /**
* runing TaskNode * runing TaskNode
*/ */
private final Map<MasterBaseTaskExecThread,Future<Boolean>> activeTaskNode = new ConcurrentHashMap<MasterBaseTaskExecThread,Future<Boolean>>(); private final Map<MasterBaseTaskExecThread,Future<Boolean>> activeTaskNode = new ConcurrentHashMap<>();
/** /**
* task exec service * task exec service
@ -78,7 +78,7 @@ public class MasterExecThread implements Runnable {
/** /**
* submit failure nodes * submit failure nodes
*/ */
private Boolean taskFailedSubmit = false; private boolean taskFailedSubmit = false;
/** /**
* recover node id list * recover node id list
@ -454,7 +454,7 @@ public class MasterExecThread implements Runnable {
// process instance id // process instance id
taskInstance.setProcessInstanceId(processInstance.getId()); taskInstance.setProcessInstanceId(processInstance.getId());
// task instance node json // task instance node json
taskInstance.setTaskJson(JSONObject.toJSONString(taskNode)); taskInstance.setTaskJson(JSON.toJSONString(taskNode));
// task instance type // task instance type
taskInstance.setTaskType(taskNode.getType()); taskInstance.setTaskType(taskNode.getType());
// task instance whether alert // task instance whether alert
@ -652,7 +652,7 @@ public class MasterExecThread implements Runnable {
continue; continue;
} }
if(task.getState().typeIsPause() || task.getState().typeIsCancel()){ if(task.getState().typeIsPause() || task.getState().typeIsCancel()){
logger.info("task {} stopped, the state is {}", task.getName(), task.getState().toString()); logger.info("task {} stopped, the state is {}", task.getName(), task.getState());
}else{ }else{
addTaskToStandByList(task); addTaskToStandByList(task);
} }
@ -685,11 +685,12 @@ public class MasterExecThread implements Runnable {
} }
ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState();
// conditions task would not return failed. // conditions task would not return failed.
if(depTaskState.typeIsFailure()){ if(depTaskState.typeIsFailure()
if(!haveConditionsAfterNode(depsNode) && !dag.getNode(depsNode).isConditionsTask()){ && !haveConditionsAfterNode(depsNode)
return DependResult.FAILED; && !dag.getNode(depsNode).isConditionsTask()){
} return DependResult.FAILED;
} }
if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){ if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){
return DependResult.WAITING; return DependResult.WAITING;
} }
@ -737,7 +738,7 @@ public class MasterExecThread implements Runnable {
* *
* @return Boolean whether has failed task * @return Boolean whether has failed task
*/ */
private Boolean hasFailedTask(){ private boolean hasFailedTask(){
if(this.taskFailedSubmit){ if(this.taskFailedSubmit){
return true; return true;
@ -753,7 +754,7 @@ public class MasterExecThread implements Runnable {
* *
* @return Boolean whether process instance failed * @return Boolean whether process instance failed
*/ */
private Boolean processFailed(){ private boolean processFailed(){
if(hasFailedTask()) { if(hasFailedTask()) {
if(processInstance.getFailureStrategy() == FailureStrategy.END){ if(processInstance.getFailureStrategy() == FailureStrategy.END){
return true; return true;
@ -769,9 +770,9 @@ public class MasterExecThread implements Runnable {
* whether task for waiting thread * whether task for waiting thread
* @return Boolean whether has waiting thread task * @return Boolean whether has waiting thread task
*/ */
private Boolean hasWaitingThreadTask(){ private boolean hasWaitingThreadTask(){
List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD);
return waitingList.size() > 0; return CollectionUtils.isNotEmpty(waitingList);
} }
/** /**
@ -787,7 +788,7 @@ public class MasterExecThread implements Runnable {
} }
List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE);
if(pauseList.size() > 0 if(CollectionUtils.isNotEmpty(pauseList)
|| !isComplementEnd() || !isComplementEnd()
|| readyToSubmitTaskList.size() > 0){ || readyToSubmitTaskList.size() > 0){
return ExecutionStatus.PAUSE; return ExecutionStatus.PAUSE;
@ -827,7 +828,8 @@ public class MasterExecThread implements Runnable {
if(state == ExecutionStatus.READY_STOP){ if(state == ExecutionStatus.READY_STOP){
List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP); List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP);
List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL); List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL);
if(stopList.size() > 0 || killList.size() > 0 || !isComplementEnd()){ if(CollectionUtils.isNotEmpty(stopList)
|| CollectionUtils.isNotEmpty(killList) || !isComplementEnd()){
return ExecutionStatus.STOP; return ExecutionStatus.STOP;
}else{ }else{
return ExecutionStatus.SUCCESS; return ExecutionStatus.SUCCESS;
@ -852,7 +854,7 @@ public class MasterExecThread implements Runnable {
* whether complement end * whether complement end
* @return Boolean whether is complement end * @return Boolean whether is complement end
*/ */
private Boolean isComplementEnd() { private boolean isComplementEnd() {
if(!processInstance.isComplementData()){ if(!processInstance.isComplementData()){
return true; return true;
} }
@ -877,8 +879,8 @@ public class MasterExecThread implements Runnable {
logger.info( logger.info(
"work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}",
processInstance.getId(), processInstance.getName(), processInstance.getId(), processInstance.getName(),
processInstance.getState().toString(), state.toString(), processInstance.getState(), state,
processInstance.getCommandType().toString()); processInstance.getCommandType());
processInstance.setState(state); processInstance.setState(state);
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
instance.setState(state); instance.setState(state);
@ -894,8 +896,7 @@ public class MasterExecThread implements Runnable {
* @return DependResult * @return DependResult
*/ */
private DependResult getDependResultForTask(TaskInstance taskInstance){ private DependResult getDependResultForTask(TaskInstance taskInstance){
DependResult inner = isTaskDepsComplete(taskInstance.getName()); return isTaskDepsComplete(taskInstance.getName());
return inner;
} }
/** /**
@ -920,7 +921,7 @@ public class MasterExecThread implements Runnable {
* has retry task in standby * has retry task in standby
* @return Boolean whether has retry task in standby * @return Boolean whether has retry task in standby
*/ */
private Boolean hasRetryTaskInStandBy(){ private boolean hasRetryTaskInStandBy(){
for (Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) { for (Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) {
if(entry.getValue().getState().typeIsFailure()){ if(entry.getValue().getState().typeIsFailure()){
return true; return true;
@ -958,7 +959,7 @@ public class MasterExecThread implements Runnable {
continue; continue;
} }
logger.info("task :{}, id:{} complete, state is {} ", logger.info("task :{}, id:{} complete, state is {} ",
task.getName(), task.getId(), task.getState().toString()); task.getName(), task.getId(), task.getState());
// node success , post node submit // node success , post node submit
if(task.getState() == ExecutionStatus.SUCCESS){ if(task.getState() == ExecutionStatus.SUCCESS){
completeTaskList.put(task.getName(), task); completeTaskList.put(task.getName(), task);
@ -990,7 +991,7 @@ public class MasterExecThread implements Runnable {
completeTaskList.put(task.getName(), task); completeTaskList.put(task.getName(), task);
} }
// send alert // send alert
if(this.recoverToleranceFaultTaskList.size() > 0){ if(CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)){
alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList);
this.recoverToleranceFaultTaskList.clear(); this.recoverToleranceFaultTaskList.clear();
} }
@ -1034,10 +1035,7 @@ public class MasterExecThread implements Runnable {
Date now = new Date(); Date now = new Date();
long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); long runningTime = DateUtils.diffMin(now, processInstance.getStartTime());
if(runningTime > processInstance.getTimeout()){ return runningTime > processInstance.getTimeout();
return true;
}
return false;
} }
/** /**
@ -1062,7 +1060,7 @@ public class MasterExecThread implements Runnable {
TaskInstance taskInstance = taskExecThread.getTaskInstance(); TaskInstance taskInstance = taskExecThread.getTaskInstance();
taskInstance = processService.findTaskInstanceById(taskInstance.getId()); taskInstance = processService.findTaskInstanceById(taskInstance.getId());
if(taskInstance.getState().typeIsFinished()){ if(taskInstance != null && taskInstance.getState().typeIsFinished()){
continue; continue;
} }
@ -1081,22 +1079,19 @@ public class MasterExecThread implements Runnable {
* @param taskInstance task instance * @param taskInstance task instance
* @return Boolean * @return Boolean
*/ */
private Boolean retryTaskIntervalOverTime(TaskInstance taskInstance){ private boolean retryTaskIntervalOverTime(TaskInstance taskInstance){
if(taskInstance.getState() != ExecutionStatus.FAILURE){ if(taskInstance.getState() != ExecutionStatus.FAILURE){
return Boolean.TRUE; return true;
} }
if(taskInstance.getId() == 0 || if(taskInstance.getId() == 0 ||
taskInstance.getMaxRetryTimes() ==0 || taskInstance.getMaxRetryTimes() ==0 ||
taskInstance.getRetryInterval() == 0 ){ taskInstance.getRetryInterval() == 0 ){
return Boolean.TRUE; return true;
} }
Date now = new Date(); Date now = new Date();
long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime());
// task retry does not over time, return false // task retry does not over time, return false
if(taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT >= failedTimeInterval){ return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval;
return Boolean.FALSE;
}
return Boolean.TRUE;
} }
/** /**
@ -1189,7 +1184,7 @@ public class MasterExecThread implements Runnable {
*/ */
private List<String> getRecoveryNodeNameList(){ private List<String> getRecoveryNodeNameList(){
List<String> recoveryNodeNameList = new ArrayList<>(); List<String> recoveryNodeNameList = new ArrayList<>();
if(recoverNodeIdList.size() > 0) { if(CollectionUtils.isNotEmpty(recoverNodeIdList)) {
for (TaskInstance task : recoverNodeIdList) { for (TaskInstance task : recoverNodeIdList) {
recoveryNodeNameList.add(task.getName()); recoveryNodeNameList.add(task.getName());
} }

11
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.master.runner; package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
@ -25,7 +26,6 @@ import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import com.alibaba.fastjson.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -64,7 +64,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
/** /**
* whether already Killed,default false * whether already Killed,default false
*/ */
private Boolean alreadyKilled = false; private boolean alreadyKilled = false;
/** /**
* submit task instance and wait complete * submit task instance and wait complete
@ -98,7 +98,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
logger.info("wait task: process id: {}, task id:{}, task name:{} complete", logger.info("wait task: process id: {}, task id:{}, task name:{} complete",
this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName()); this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName());
// task time out // task time out
Boolean checkTimeout = false; boolean checkTimeout = false;
TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter(); TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter();
if(taskTimeoutParameter.getEnable()){ if(taskTimeoutParameter.getEnable()){
TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy(); TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy();
@ -176,7 +176,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
*/ */
private TaskTimeoutParameter getTaskTimeoutParameter(){ private TaskTimeoutParameter getTaskTimeoutParameter(){
String taskJson = taskInstance.getTaskJson(); String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
return taskNode.getTaskTimeoutParameter(); return taskNode.getTaskTimeoutParameter();
} }
@ -189,7 +189,6 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
private long getRemaintime(long timeoutSeconds) { private long getRemaintime(long timeoutSeconds) {
Date startTime = taskInstance.getStartTime(); Date startTime = taskInstance.getStartTime();
long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000;
long remainTime = timeoutSeconds - usedTime; return timeoutSeconds - usedTime;
return remainTime;
} }
} }

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java

@ -30,7 +30,7 @@ import org.springframework.context.annotation.ComponentScan;
@ComponentScan("org.apache.dolphinscheduler") @ComponentScan("org.apache.dolphinscheduler")
public class MonitorServer implements CommandLineRunner { public class MonitorServer implements CommandLineRunner {
private static Integer ARGS_LENGTH = 4; private static final Integer ARGS_LENGTH = 4;
private static final Logger logger = LoggerFactory.getLogger(MonitorServer.class); private static final Logger logger = LoggerFactory.getLogger(MonitorServer.class);

16
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java

@ -17,12 +17,11 @@
package org.apache.dolphinscheduler.server.utils; package org.apache.dolphinscheduler.server.utils;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.apache.commons.lang.StringUtils;
import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -32,12 +31,7 @@ import java.util.List;
* spark args utils * spark args utils
*/ */
public class FlinkArgsUtils { public class FlinkArgsUtils {
private static final String LOCAL_DEPLOY_MODE = "local";
/**
* logger of FlinkArgsUtils
*/
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(FlinkArgsUtils.class);
/** /**
* build args * build args
* @param param flink parameters * @param param flink parameters
@ -52,7 +46,7 @@ public class FlinkArgsUtils {
deployMode = tmpDeployMode; deployMode = tmpDeployMode;
} }
if (!"local".equals(deployMode)) { if (!LOCAL_DEPLOY_MODE.equals(deployMode)) {
args.add(Constants.FLINK_RUN_MODE); //-m args.add(Constants.FLINK_RUN_MODE); //-m
args.add(Constants.FLINK_YARN_CLUSTER); //yarn-cluster args.add(Constants.FLINK_YARN_CLUSTER); //yarn-cluster
@ -113,12 +107,12 @@ public class FlinkArgsUtils {
String queue = param.getQueue(); String queue = param.getQueue();
if (StringUtils.isNotEmpty(others)) { if (StringUtils.isNotEmpty(others)) {
if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals("local")) { if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) {
args.add(Constants.FLINK_QUEUE); args.add(Constants.FLINK_QUEUE);
args.add(param.getQueue()); args.add(param.getQueue());
} }
args.add(others); args.add(others);
} else if (StringUtils.isNotEmpty(queue) && !deployMode.equals("local")) { } else if (StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) {
args.add(Constants.FLINK_QUEUE); args.add(Constants.FLINK_QUEUE);
args.add(param.getQueue()); args.add(param.getQueue());
} }

22
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.utils;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import java.util.Date; import java.util.Date;
@ -70,17 +71,16 @@ public class ParamUtils {
Map.Entry<String, Property> en = iter.next(); Map.Entry<String, Property> en = iter.next();
Property property = en.getValue(); Property property = en.getValue();
if (property.getValue() != null && property.getValue().length() > 0){ if (StringUtils.isNotEmpty(property.getValue())
if (property.getValue().startsWith("$")){ && property.getValue().startsWith("$")){
/** /**
* local parameter refers to global parameter with the same name * local parameter refers to global parameter with the same name
* note: the global parameters of the process instance here are solidified parameters, * note: the global parameters of the process instance here are solidified parameters,
* and there are no variables in them. * and there are no variables in them.
*/ */
String val = property.getValue(); String val = property.getValue();
val = ParameterUtils.convertParameterPlaceholders(val, timeParams); val = ParameterUtils.convertParameterPlaceholders(val, timeParams);
property.setValue(val); property.setValue(val);
}
} }
} }

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.utils; package org.apache.dolphinscheduler.server.utils;
import java.nio.charset.StandardCharsets;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils;
@ -29,7 +30,6 @@ import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -297,7 +297,7 @@ public class ProcessUtils {
File f = new File(commandFile); File f = new File(commandFile);
if (!f.exists()) { if (!f.exists()) {
FileUtils.writeStringToFile(new File(commandFile), sb.toString(), Charset.forName("UTF-8")); FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8);
} }
String runCmd = "sh " + commandFile; String runCmd = "sh " + commandFile;

45
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java

@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.master.MasterServer;
import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.runner.FetchTaskThread; import org.apache.dolphinscheduler.server.worker.runner.FetchTaskThread;
@ -43,10 +44,13 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.WebApplicationType; import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
@ -56,7 +60,10 @@ import java.util.concurrent.TimeUnit;
/** /**
* worker server * worker server
*/ */
@ComponentScan("org.apache.dolphinscheduler") @SpringBootApplication
@ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = {
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = {MasterServer.class})
})
public class WorkerServer implements IStoppable { public class WorkerServer implements IStoppable {
/** /**
@ -104,11 +111,6 @@ public class WorkerServer implements IStoppable {
*/ */
private ExecutorService fetchTaskExecutorService; private ExecutorService fetchTaskExecutorService;
/**
* CountDownLatch latch
*/
private CountDownLatch latch;
@Value("${server.is-combined-server:false}") @Value("${server.is-combined-server:false}")
private Boolean isCombinedServer; private Boolean isCombinedServer;
@ -149,7 +151,7 @@ public class WorkerServer implements IStoppable {
this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor"); this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor");
heartbeatWorkerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.DEFAUL_WORKER_HEARTBEAT_THREAD_NUM); heartbeatWorkerService = ThreadUtils.newThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.DEFAUL_WORKER_HEARTBEAT_THREAD_NUM, false);
// heartbeat thread implement // heartbeat thread implement
Runnable heartBeatThread = heartBeatThread(); Runnable heartBeatThread = heartBeatThread();
@ -171,29 +173,15 @@ public class WorkerServer implements IStoppable {
// submit fetch task thread // submit fetch task thread
fetchTaskExecutorService.execute(fetchTaskThread); fetchTaskExecutorService.execute(fetchTaskThread);
}
/** @PreDestroy
* register hooks, which are called before the process exits public void destroy() {
*/ // worker server exit alert
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { if (zkWorkerClient.getActiveMasterNum() <= 1) {
@Override alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server");
public void run() {
// worker server exit alert
if (zkWorkerClient.getActiveMasterNum() <= 1) {
alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server");
}
stop("shutdownhook");
}
}));
//let the main thread await
latch = new CountDownLatch(1);
if (!isCombinedServer) {
try {
latch.await();
} catch (InterruptedException ignore) {
}
} }
stop("shutdownhook");
} }
@Override @Override
@ -251,7 +239,6 @@ public class WorkerServer implements IStoppable {
}catch (Exception e){ }catch (Exception e){
logger.warn("zookeeper service stopped exception:{}",e.getMessage()); logger.warn("zookeeper service stopped exception:{}",e.getMessage());
} }
latch.countDown();
logger.info("zookeeper service stopped"); logger.info("zookeeper service stopped");
} catch (Exception e) { } catch (Exception e) {

10
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java

@ -19,7 +19,7 @@ package org.apache.dolphinscheduler.server.worker.runner;
import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender; import ch.qos.logback.classic.sift.SiftingAppender;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
@ -93,7 +93,7 @@ public class TaskScheduleThread implements Runnable {
logger.info("script path : {}", taskInstance.getExecutePath()); logger.info("script path : {}", taskInstance.getExecutePath());
// task node // task node
TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class); TaskNode taskNode = JSON.parseObject(taskInstance.getTaskJson(), TaskNode.class);
// get resource files // get resource files
List<String> resourceFiles = createProjectResFiles(taskNode); List<String> resourceFiles = createProjectResFiles(taskNode);
@ -176,7 +176,7 @@ public class TaskScheduleThread implements Runnable {
String globalParamsStr = taskInstance.getProcessInstance().getGlobalParams(); String globalParamsStr = taskInstance.getProcessInstance().getGlobalParams();
if (globalParamsStr != null) { if (globalParamsStr != null) {
List<Property> globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class); List<Property> globalParamsList = JSON.parseArray(globalParamsStr, Property.class);
globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue))); globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)));
} }
return globalParamsMap; return globalParamsMap;
@ -296,9 +296,7 @@ public class TaskScheduleThread implements Runnable {
if (baseParam != null) { if (baseParam != null) {
List<String> projectResourceFiles = baseParam.getResourceFilesList(); List<String> projectResourceFiles = baseParam.getResourceFilesList();
if (projectResourceFiles != null) { projectFiles.addAll(projectResourceFiles);
projectFiles.addAll(projectResourceFiles);
}
} }
return new ArrayList<>(projectFiles); return new ArrayList<>(projectFiles);

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.server.worker.task.datax;
import java.io.File; import java.io.File;
import java.nio.charset.Charset; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardOpenOption; import java.nio.file.StandardOpenOption;
@ -209,7 +209,7 @@ public class DataxTask extends AbstractTask {
logger.debug("datax job json : {}", root.toString()); logger.debug("datax job json : {}", root.toString());
// create datax json file // create datax json file
FileUtils.writeStringToFile(new File(fileName), root.toString(), Charset.forName("UTF-8")); FileUtils.writeStringToFile(new File(fileName), root.toString(), StandardCharsets.UTF_8);
return fileName; return fileName;
} }

29
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.server.worker.task.http; package org.apache.dolphinscheduler.server.worker.task.http;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.Charsets; import org.apache.commons.io.Charsets;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
@ -26,6 +27,7 @@ import org.apache.dolphinscheduler.common.process.HttpProperty;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.http.HttpParameters; import org.apache.dolphinscheduler.common.task.http.HttpParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
@ -51,6 +53,7 @@ import org.slf4j.Logger;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -98,7 +101,7 @@ public class HttpTask extends AbstractTask {
@Override @Override
public void init() { public void init() {
logger.info("http task params {}", taskProps.getTaskParams()); logger.info("http task params {}", taskProps.getTaskParams());
this.httpParameters = JSONObject.parseObject(taskProps.getTaskParams(), HttpParameters.class); this.httpParameters = JSON.parseObject(taskProps.getTaskParams(), HttpParameters.class);
if (!httpParameters.checkParameters()) { if (!httpParameters.checkParameters()) {
throw new RuntimeException("http task params is not valid"); throw new RuntimeException("http task params is not valid");
@ -146,12 +149,12 @@ public class HttpTask extends AbstractTask {
processInstance.getCmdTypeIfComplement(), processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime()); processInstance.getScheduleTime());
List<HttpProperty> httpPropertyList = new ArrayList<>(); List<HttpProperty> httpPropertyList = new ArrayList<>();
if(httpParameters.getHttpParams() != null && httpParameters.getHttpParams().size() > 0){ if(CollectionUtils.isNotEmpty(httpParameters.getHttpParams() )){
for (HttpProperty httpProperty: httpParameters.getHttpParams()) { for (HttpProperty httpProperty: httpParameters.getHttpParams()) {
String jsonObject = JSONObject.toJSONString(httpProperty); String jsonObject = JSON.toJSONString(httpProperty);
String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap)); String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap));
logger.info("http request params:{}",params); logger.info("http request params:{}",params);
httpPropertyList.add(JSONObject.parseObject(params,HttpProperty.class)); httpPropertyList.add(JSON.parseObject(params,HttpProperty.class));
} }
} }
addRequestParams(builder,httpPropertyList); addRequestParams(builder,httpPropertyList);
@ -176,8 +179,7 @@ public class HttpTask extends AbstractTask {
if (entity == null) { if (entity == null) {
return null; return null;
} }
String webPage = EntityUtils.toString(entity, StandardCharsets.UTF_8.name()); return EntityUtils.toString(entity, StandardCharsets.UTF_8.name());
return webPage;
} }
/** /**
@ -186,8 +188,7 @@ public class HttpTask extends AbstractTask {
* @return status code * @return status code
*/ */
protected int getStatusCode(CloseableHttpResponse httpResponse) { protected int getStatusCode(CloseableHttpResponse httpResponse) {
int status = httpResponse.getStatusLine().getStatusCode(); return httpResponse.getStatusLine().getStatusCode();
return status;
} }
/** /**
@ -252,7 +253,7 @@ public class HttpTask extends AbstractTask {
* @param httpPropertyList http property list * @param httpPropertyList http property list
*/ */
protected void addRequestParams(RequestBuilder builder,List<HttpProperty> httpPropertyList) { protected void addRequestParams(RequestBuilder builder,List<HttpProperty> httpPropertyList) {
if(httpPropertyList != null && httpPropertyList.size() > 0){ if(CollectionUtils.isNotEmpty(httpPropertyList)){
JSONObject jsonParam = new JSONObject(); JSONObject jsonParam = new JSONObject();
for (HttpProperty property: httpPropertyList){ for (HttpProperty property: httpPropertyList){
if(property.getHttpParametersType() != null){ if(property.getHttpParametersType() != null){
@ -276,12 +277,10 @@ public class HttpTask extends AbstractTask {
* @param httpPropertyList http property list * @param httpPropertyList http property list
*/ */
protected void setHeaders(HttpUriRequest request,List<HttpProperty> httpPropertyList) { protected void setHeaders(HttpUriRequest request,List<HttpProperty> httpPropertyList) {
if(httpPropertyList != null && httpPropertyList.size() > 0){ if(CollectionUtils.isNotEmpty(httpPropertyList)){
for (HttpProperty property: httpPropertyList){ for (HttpProperty property: httpPropertyList) {
if(property.getHttpParametersType() != null) { if (HttpParametersType.HEADERS.equals(property.getHttpParametersType())) {
if (property.getHttpParametersType().equals(HttpParametersType.HEADERS)) { request.addHeader(property.getProp(), property.getValue());
request.addHeader(property.getProp(), property.getValue());
}
} }
} }
} }

17
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java

@ -122,22 +122,19 @@ public class MapReduceTask extends AbstractYarnTask {
} }
// main class // main class
if(mapreduceParameters.getProgramType() !=null ){ if(!ProgramType.PYTHON.equals(mapreduceParameters.getProgramType())
if(mapreduceParameters.getProgramType()!= ProgramType.PYTHON){ && StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){
if(StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){ result.add(mapreduceParameters.getMainClass());
result.add(mapreduceParameters.getMainClass());
}
}
} }
// others // others
if (StringUtils.isNotEmpty(mapreduceParameters.getOthers())) { if (StringUtils.isNotEmpty(mapreduceParameters.getOthers())) {
String others = mapreduceParameters.getOthers(); String others = mapreduceParameters.getOthers();
if(!others.contains(Constants.MR_QUEUE)){ if (!others.contains(Constants.MR_QUEUE)
if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { && StringUtils.isNotEmpty(mapreduceParameters.getQueue())) {
result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue()));
}
} }
result.add(mapreduceParameters.getOthers()); result.add(mapreduceParameters.getOthers());
}else if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { }else if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) {
result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue()));

31
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.worker.task.processdure; package org.apache.dolphinscheduler.server.worker.task.processdure;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import com.cronutils.utils.StringUtils; import com.cronutils.utils.StringUtils;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.DataType;
@ -60,11 +60,6 @@ public class ProcedureTask extends AbstractTask {
*/ */
private ProcessService processService; private ProcessService processService;
/**
* base datasource
*/
private BaseDataSource baseDataSource;
/** /**
* constructor * constructor
* @param taskProps task props * @param taskProps task props
@ -75,7 +70,7 @@ public class ProcedureTask extends AbstractTask {
logger.info("procedure task params {}", taskProps.getTaskParams()); logger.info("procedure task params {}", taskProps.getTaskParams());
this.procedureParameters = JSONObject.parseObject(taskProps.getTaskParams(), ProcedureParameters.class); this.procedureParameters = JSON.parseObject(taskProps.getTaskParams(), ProcedureParameters.class);
// check parameters // check parameters
if (!procedureParameters.checkParameters()) { if (!procedureParameters.checkParameters()) {
@ -117,7 +112,7 @@ public class ProcedureTask extends AbstractTask {
// load class // load class
DataSourceFactory.loadClass(dataSource.getType()); DataSourceFactory.loadClass(dataSource.getType());
// get datasource // get datasource
baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), BaseDataSource baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(),
dataSource.getConnectionParams()); dataSource.getConnectionParams());
// get jdbc connection // get jdbc connection
@ -163,7 +158,7 @@ public class ProcedureTask extends AbstractTask {
stmt.setQueryTimeout(taskProps.getTaskTimeout()); stmt.setQueryTimeout(taskProps.getTaskTimeout());
} }
Map<Integer,Property> outParameterMap = new HashMap<>(); Map<Integer,Property> outParameterMap = new HashMap<>();
if (userDefParamsList != null && userDefParamsList.size() > 0){ if (CollectionUtils.isNotEmpty(userDefParamsList)){
int index = 1; int index = 1;
for (Property property : userDefParamsList){ for (Property property : userDefParamsList){
logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}"
@ -237,31 +232,31 @@ public class ProcedureTask extends AbstractTask {
private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException {
switch (dataType){ switch (dataType){
case VARCHAR: case VARCHAR:
logger.info("out prameter key : {} , value : {}",prop,stmt.getString(index)); logger.info("out prameter varchar key : {} , value : {}",prop,stmt.getString(index));
break; break;
case INTEGER: case INTEGER:
logger.info("out prameter key : {} , value : {}", prop, stmt.getInt(index)); logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index));
break; break;
case LONG: case LONG:
logger.info("out prameter key : {} , value : {}",prop,stmt.getLong(index)); logger.info("out prameter long key : {} , value : {}",prop,stmt.getLong(index));
break; break;
case FLOAT: case FLOAT:
logger.info("out prameter key : {} , value : {}",prop,stmt.getFloat(index)); logger.info("out prameter float key : {} , value : {}",prop,stmt.getFloat(index));
break; break;
case DOUBLE: case DOUBLE:
logger.info("out prameter key : {} , value : {}",prop,stmt.getDouble(index)); logger.info("out prameter double key : {} , value : {}",prop,stmt.getDouble(index));
break; break;
case DATE: case DATE:
logger.info("out prameter key : {} , value : {}",prop,stmt.getDate(index)); logger.info("out prameter date key : {} , value : {}",prop,stmt.getDate(index));
break; break;
case TIME: case TIME:
logger.info("out prameter key : {} , value : {}",prop,stmt.getTime(index)); logger.info("out prameter time key : {} , value : {}",prop,stmt.getTime(index));
break; break;
case TIMESTAMP: case TIMESTAMP:
logger.info("out prameter key : {} , value : {}",prop,stmt.getTimestamp(index)); logger.info("out prameter timestamp key : {} , value : {}",prop,stmt.getTimestamp(index));
break; break;
case BOOLEAN: case BOOLEAN:
logger.info("out prameter key : {} , value : {}",prop, stmt.getBoolean(index)); logger.info("out prameter boolean key : {} , value : {}",prop, stmt.getBoolean(index));
break; break;
default: default:
break; break;

9
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.worker.task.sql; package org.apache.dolphinscheduler.server.worker.task.sql;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature; import com.alibaba.fastjson.serializer.SerializerFeature;
@ -92,7 +93,7 @@ public class SqlTask extends AbstractTask {
super(taskProps, logger); super(taskProps, logger);
logger.info("sql task params {}", taskProps.getTaskParams()); logger.info("sql task params {}", taskProps.getTaskParams());
this.sqlParameters = JSONObject.parseObject(taskProps.getTaskParams(), SqlParameters.class); this.sqlParameters = JSON.parseObject(taskProps.getTaskParams(), SqlParameters.class);
if (!sqlParameters.checkParameters()) { if (!sqlParameters.checkParameters()) {
throw new RuntimeException("sql task params is not valid"); throw new RuntimeException("sql task params is not valid");
@ -308,16 +309,16 @@ public class SqlTask extends AbstractTask {
} }
resultJSONArray.add(mapOfColValues); resultJSONArray.add(mapOfColValues);
} }
logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); logger.debug("execute sql : {}", JSON.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
// if there is a result set // if there is a result set
if ( !resultJSONArray.isEmpty() ) { if ( !resultJSONArray.isEmpty() ) {
if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { if (StringUtils.isNotEmpty(sqlParameters.getTitle())) {
sendAttachment(sqlParameters.getTitle(), sendAttachment(sqlParameters.getTitle(),
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); JSON.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
}else{ }else{
sendAttachment(taskProps.getNodeName() + " query resultsets ", sendAttachment(taskProps.getNodeName() + " query resultsets ",
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); JSON.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
} }
} }

19
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java

@ -69,17 +69,16 @@ public class MysqlSourceGenerator implements ISourceGenerator {
result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns()); result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns());
} }
}else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()){ }else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()
if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){ && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){
String srcQuery = sourceMysqlParameter.getSrcQuerySql();
String srcQuery = sourceMysqlParameter.getSrcQuerySql(); if(srcQuery.toLowerCase().contains("where")){
if(srcQuery.toLowerCase().contains("where")){ srcQuery += " AND "+"$CONDITIONS";
srcQuery += " AND "+"$CONDITIONS"; }else{
}else{ srcQuery += " WHERE $CONDITIONS";
srcQuery += " WHERE $CONDITIONS";
}
result.append(" --query \'"+srcQuery+"\'");
} }
result.append(" --query \'"+srcQuery+"\'");
} }
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive(); List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();

11
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java

@ -75,12 +75,11 @@ public class MysqlTargetGenerator implements ITargetGenerator {
result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'"); result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'");
} }
if(targetMysqlParameter.isUpdate()){ if(targetMysqlParameter.isUpdate()
if(StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())&& && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())
StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){ && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){
result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey())
.append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode()); .append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode());
}
} }
} }
} }

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.master; package org.apache.dolphinscheduler.server.master;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.DateUtils;
@ -85,7 +85,7 @@ public class MasterExecThreadTest {
Map<String, String> cmdParam = new HashMap<>(); Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, "2020-01-01 00:00:00"); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, "2020-01-01 00:00:00");
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, "2020-01-31 23:00:00"); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, "2020-01-31 23:00:00");
Mockito.when(processInstance.getCommandParam()).thenReturn(JSONObject.toJSONString(cmdParam)); Mockito.when(processInstance.getCommandParam()).thenReturn(JSON.toJSONString(cmdParam));
ProcessDefinition processDefinition = new ProcessDefinition(); ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setGlobalParamMap(Collections.EMPTY_MAP); processDefinition.setGlobalParamMap(Collections.EMPTY_MAP);
processDefinition.setGlobalParamList(Collections.EMPTY_LIST); processDefinition.setGlobalParamList(Collections.EMPTY_LIST);

24
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java

@ -87,35 +87,35 @@ public class FlinkArgsUtilsTest {
} }
//Expected values and order //Expected values and order
assertEquals(result.size(),20); assertEquals(20, result.size());
assertEquals(result.get(0),"-m"); assertEquals("-m", result.get(0));
assertEquals(result.get(1),"yarn-cluster"); assertEquals("yarn-cluster", result.get(1));
assertEquals(result.get(2),"-ys"); assertEquals("-ys", result.get(2));
assertSame(Integer.valueOf(result.get(3)),slot); assertSame(Integer.valueOf(result.get(3)),slot);
assertEquals(result.get(4),"-ynm"); assertEquals("-ynm",result.get(4));
assertEquals(result.get(5),appName); assertEquals(result.get(5),appName);
assertEquals(result.get(6),"-yn"); assertEquals("-yn", result.get(6));
assertSame(Integer.valueOf(result.get(7)),taskManager); assertSame(Integer.valueOf(result.get(7)),taskManager);
assertEquals(result.get(8),"-yjm"); assertEquals("-yjm", result.get(8));
assertEquals(result.get(9),jobManagerMemory); assertEquals(result.get(9),jobManagerMemory);
assertEquals(result.get(10),"-ytm"); assertEquals("-ytm", result.get(10));
assertEquals(result.get(11),taskManagerMemory); assertEquals(result.get(11),taskManagerMemory);
assertEquals(result.get(12),"-d"); assertEquals("-d", result.get(12));
assertEquals(result.get(13),"-c"); assertEquals("-c", result.get(13));
assertEquals(result.get(14),mainClass); assertEquals(result.get(14),mainClass);
assertEquals(result.get(15),mainJar.getRes()); assertEquals(result.get(15),mainJar.getRes());
assertEquals(result.get(16),mainArgs); assertEquals(result.get(16),mainArgs);
assertEquals(result.get(17),"--qu"); assertEquals("--qu", result.get(17));
assertEquals(result.get(18),queue); assertEquals(result.get(18),queue);
assertEquals(result.get(19),others); assertEquals(result.get(19),others);
@ -125,7 +125,7 @@ public class FlinkArgsUtilsTest {
param1.setQueue(queue); param1.setQueue(queue);
param1.setDeployMode(mode); param1.setDeployMode(mode);
result = FlinkArgsUtils.buildArgs(param1); result = FlinkArgsUtils.buildArgs(param1);
assertEquals(result.size(),5); assertEquals(5, result.size());
} }
} }

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.worker.shell; package org.apache.dolphinscheduler.server.worker.shell;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNode;
@ -68,7 +68,7 @@ public class ShellCommandExecutorTest {
TaskInstance taskInstance = processService.findTaskInstanceById(7657); TaskInstance taskInstance = processService.findTaskInstanceById(7657);
String taskJson = taskInstance.getTaskJson(); String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
taskProps.setTaskParams(taskNode.getParams()); taskProps.setTaskParams(taskNode.getParams());

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.worker.sql; package org.apache.dolphinscheduler.server.worker.sql;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
@ -112,7 +112,7 @@ public class SqlExecutorTest {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
String taskJson = taskInstance.getTaskJson(); String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
taskProps.setTaskParams(taskNode.getParams()); taskProps.setTaskParams(taskNode.getParams());

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java

@ -27,6 +27,7 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -172,7 +173,7 @@ public class ShellTaskTest {
@Test @Test
public void testHandleForWindows() throws Exception { public void testHandleForWindows() throws Exception {
try { try {
PowerMockito.when(OSUtils.isWindows()).thenReturn(true); Assume.assumeTrue(OSUtils.isWindows());
shellTask.handle(); shellTask.handle();
Assert.assertTrue(true); Assert.assertTrue(true);
} catch (Error | Exception e) { } catch (Error | Exception e) {

10
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java

@ -16,7 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop; package org.apache.dolphinscheduler.server.worker.task.sqoop;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DataSource;
@ -74,7 +74,7 @@ public class SqoopTaskTest {
@Test @Test
public void testGenerator(){ public void testGenerator(){
String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters1 = JSONObject.parseObject(data1,SqoopParameters.class); SqoopParameters sqoopParameters1 = JSON.parseObject(data1,SqoopParameters.class);
SqoopJobGenerator generator = new SqoopJobGenerator(); SqoopJobGenerator generator = new SqoopJobGenerator();
String script = generator.generateSqoopJob(sqoopParameters1); String script = generator.generateSqoopJob(sqoopParameters1);
@ -82,21 +82,21 @@ public class SqoopTaskTest {
Assert.assertEquals(expected, script); Assert.assertEquals(expected, script);
String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters2 = JSONObject.parseObject(data2,SqoopParameters.class); SqoopParameters sqoopParameters2 = JSON.parseObject(data2,SqoopParameters.class);
String script2 = generator.generateSqoopJob(sqoopParameters2); String script2 = generator.generateSqoopJob(sqoopParameters2);
String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
Assert.assertEquals(expected2, script2); Assert.assertEquals(expected2, script2);
String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters3 = JSONObject.parseObject(data3,SqoopParameters.class); SqoopParameters sqoopParameters3 = JSON.parseObject(data3,SqoopParameters.class);
String script3 = generator.generateSqoopJob(sqoopParameters3); String script3 = generator.generateSqoopJob(sqoopParameters3);
String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'"; String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'";
Assert.assertEquals(expected3, script3); Assert.assertEquals(expected3, script3);
String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters4 = JSONObject.parseObject(data4,SqoopParameters.class); SqoopParameters sqoopParameters4 = JSON.parseObject(data4,SqoopParameters.class);
String script4 = generator.generateSqoopJob(sqoopParameters4); String script4 = generator.generateSqoopJob(sqoopParameters4);
String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";

8
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java

@ -40,7 +40,7 @@ public class LogClientService {
/** /**
* request time out * request time out
*/ */
private final long logRequestTimeout = 10 * 1000; private static final long LOG_REQUEST_TIMEOUT = 10 * 1000L;
/** /**
* construct client * construct client
@ -75,7 +75,7 @@ public class LogClientService {
final Address address = new Address(host, port); final Address address = new Address(host, port);
try { try {
Command command = request.convert2Command(); Command command = request.convert2Command();
Command response = this.client.sendSync(address, command, logRequestTimeout); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT);
if(response != null){ if(response != null){
RollViewLogResponseCommand rollReviewLog = FastJsonSerializer.deserialize( RollViewLogResponseCommand rollReviewLog = FastJsonSerializer.deserialize(
response.getBody(), RollViewLogResponseCommand.class); response.getBody(), RollViewLogResponseCommand.class);
@ -103,7 +103,7 @@ public class LogClientService {
final Address address = new Address(host, port); final Address address = new Address(host, port);
try { try {
Command command = request.convert2Command(); Command command = request.convert2Command();
Command response = this.client.sendSync(address, command, logRequestTimeout); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT);
if(response != null){ if(response != null){
ViewLogResponseCommand viewLog = FastJsonSerializer.deserialize( ViewLogResponseCommand viewLog = FastJsonSerializer.deserialize(
response.getBody(), ViewLogResponseCommand.class); response.getBody(), ViewLogResponseCommand.class);
@ -131,7 +131,7 @@ public class LogClientService {
final Address address = new Address(host, port); final Address address = new Address(host, port);
try { try {
Command command = request.convert2Command(); Command command = request.convert2Command();
Command response = this.client.sendSync(address, command, logRequestTimeout); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT);
if(response != null){ if(response != null){
GetLogBytesResponseCommand getLog = FastJsonSerializer.deserialize( GetLogBytesResponseCommand getLog = FastJsonSerializer.deserialize(
response.getBody(), GetLogBytesResponseCommand.class); response.getBody(), GetLogBytesResponseCommand.class);

2
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java

@ -147,7 +147,7 @@ public class PermissionCheck<T> {
List<T> unauthorizedList = processService.listUnauthorized(userId,needChecks,authorizationType); List<T> unauthorizedList = processService.listUnauthorized(userId,needChecks,authorizationType);
// if exist unauthorized resource // if exist unauthorized resource
if(CollectionUtils.isNotEmpty(unauthorizedList)){ if(CollectionUtils.isNotEmpty(unauthorizedList)){
logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList.toString()); logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList);
throw new RuntimeException(String.format("user %s didn't has permission of %s %s", user.getUserName(), authorizationType.getDescp(), unauthorizedList.get(0))); throw new RuntimeException(String.format("user %s didn't has permission of %s %s", user.getUserName(), authorizationType.getDescp(), unauthorizedList.get(0)));
} }
} }

7
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.service.process; package org.apache.dolphinscheduler.service.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.cronutils.model.Cron; import com.cronutils.model.Cron;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
@ -207,7 +208,7 @@ public class ProcessService {
CommandType commandType = command.getCommandType(); CommandType commandType = command.getCommandType();
if(cmdTypeMap.containsKey(commandType)){ if(cmdTypeMap.containsKey(commandType)){
JSONObject cmdParamObj = (JSONObject) JSONObject.parse(command.getCommandParam()); JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam());
JSONObject tempObj; JSONObject tempObj;
int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING); int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING);
@ -215,7 +216,7 @@ public class ProcessService {
// for all commands // for all commands
for (Command tmpCommand:commands){ for (Command tmpCommand:commands){
if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){ if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){
tempObj = (JSONObject) JSONObject.parse(tmpCommand.getCommandParam()); tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam());
if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){ if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){
isNeedCreate = false; isNeedCreate = false;
break; break;
@ -309,7 +310,7 @@ public class ProcessService {
for (TaskNode taskNode : taskNodeList){ for (TaskNode taskNode : taskNodeList){
String parameter = taskNode.getParams(); String parameter = taskNode.getParams();
if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){ if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){
SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class); SubProcessParameters subProcessParam = JSON.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId()); ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids);
} }

55
dolphinscheduler-ui/pom.xml

@ -89,6 +89,61 @@
</build> </build>
</profile> </profile>
<profile>
<id>rpmbuild</id>
<build>
<plugins>
<plugin>
<groupId>com.github.eirslett</groupId>
<artifactId>frontend-maven-plugin</artifactId>
<version>${frontend-maven-plugin.version}</version>
<executions>
<execution>
<id>install node and npm</id>
<goals>
<goal>install-node-and-npm</goal>
</goals>
<configuration>
<nodeVersion>${node.version}</nodeVersion>
<npmVersion>${npm.version}</npmVersion>
</configuration>
</execution>
<execution>
<id>npm install node-sass --unsafe-perm</id>
<goals>
<goal>npm</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<arguments>install node-sass --unsafe-perm</arguments>
</configuration>
</execution>
<execution>
<id>npm install</id>
<goals>
<goal>npm</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<arguments>install</arguments>
</configuration>
</execution>
<execution>
<id>npm run build:release</id>
<goals>
<goal>npm</goal>
</goals>
<configuration>
<arguments>run build:release</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile> <profile>
<id>nginx</id> <id>nginx</id>
<build> <build>

6
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js

@ -26,7 +26,7 @@ import Permissions from '@/module/permissions'
* @desc tooltip * @desc tooltip
*/ */
const toolOper = (dagThis) => { const toolOper = (dagThis) => {
let disabled =!dagThis.$store.state.dag.isDetails// Permissions.getAuth() === false ? false : !dagThis.$store.state.dag.isDetails let disabled =!!dagThis.$store.state.dag.isDetails// Permissions.getAuth() === false ? false : !dagThis.$store.state.dag.isDetails
return [ return [
{ {
code: 'pointer', code: 'pointer',
@ -49,13 +49,13 @@ const toolOper = (dagThis) => {
{ {
code: 'download', code: 'download',
icon: 'ans-icon-download', icon: 'ans-icon-download',
disable: !!dagThis.type, disable: !dagThis.type,
desc: `${i18n.$t('Download')}` desc: `${i18n.$t('Download')}`
}, },
{ {
code: 'screen', code: 'screen',
icon: 'ans-icon-max', icon: 'ans-icon-max',
disable: disabled, disable: false,
desc: `${i18n.$t('Full Screen')}` desc: `${i18n.$t('Full Screen')}`
} }
] ]

4
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue

@ -25,7 +25,7 @@
:key="v" :key="v"
v-for="(item,v) in tasksTypeList" v-for="(item,v) in tasksTypeList"
@mousedown="_getDagId(v)"> @mousedown="_getDagId(v)">
<div data-toggle="tooltip" :title="item.description"> <div data-toggle="tooltip" :title="item.desc">
<div class="icos" :class="'icos-' + v" ></div> <div class="icos" :class="'icos-' + v" ></div>
</div> </div>
</div> </div>
@ -293,7 +293,7 @@
let is = true let is = true
let code = '' let code = ''
if (!item.disable) { if (item.disable) {
return return
} }

12
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue

@ -252,6 +252,7 @@
v-if="taskType === 'CONDITIONS'" v-if="taskType === 'CONDITIONS'"
ref="CONDITIONS" ref="CONDITIONS"
@on-dependent="_onDependent" @on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
:backfill-item="backfillItem" :backfill-item="backfillItem"
:pre-node="preNode"> :pre-node="preNode">
</m-conditions> </m-conditions>
@ -438,6 +439,8 @@
}, },
_cacheItem () { _cacheItem () {
this.conditionResult.successNode[0] = this.successBranch
this.conditionResult.failedNode[0] = this.failedBranch
this.$emit('cacheTaskInfo', { this.$emit('cacheTaskInfo', {
item: { item: {
type: this.taskType, type: this.taskType,
@ -446,12 +449,15 @@
params: this.params, params: this.params,
description: this.description, description: this.description,
runFlag: this.runFlag, runFlag: this.runFlag,
conditionResult: this.conditionResult,
dependence: this.cacheDependence, dependence: this.cacheDependence,
maxRetryTimes: this.maxRetryTimes, maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval, retryInterval: this.retryInterval,
timeout: this.timeout, timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority, taskInstancePriority: this.taskInstancePriority,
workerGroupId: this.workerGroupId workerGroupId: this.workerGroupId,
status: this.status,
branch: this.branch
}, },
fromThis: this fromThis: this
}) })
@ -657,7 +663,9 @@
retryInterval: this.retryInterval, retryInterval: this.retryInterval,
timeout: this.timeout, timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority, taskInstancePriority: this.taskInstancePriority,
workerGroupId: this.workerGroupId workerGroupId: this.workerGroupId,
successBranch: this.successBranch,
failedBranch: this.failedBranch
} }
} }
}, },

10
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue

@ -122,8 +122,11 @@
}, },
// Watch the cacheParams // Watch the cacheParams
watch: { watch: {
cacheParams (val) { datasource (val) {
this.$emit('on-dsData', val); this.$emit('on-dsData', {
type: this.type,
datasource: val
});
} }
}, },
created () { created () {
@ -150,7 +153,8 @@
}) })
} }
this.$emit('on-dsData', { this.$emit('on-dsData', {
type: this.type type: this.type,
datasource: this.datasource
}) })
}) })
}, },

21
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue

@ -19,7 +19,7 @@
<m-list-box> <m-list-box>
<div slot="text">{{$t('Custom Parameters')}}</div> <div slot="text">{{$t('Custom Parameters')}}</div>
<div slot="content"> <div slot="content">
<div class="dep-opt"> <div class="dep-opt">
<a href="javascript:" <a href="javascript:"
@click="!isDetails && _addDep()" @click="!isDetails && _addDep()"
class="add-dep"> class="add-dep">
@ -133,6 +133,9 @@
setTimeout(() => { setTimeout(() => {
this.isLoading = false this.isLoading = false
}, 600) }, 600)
},
cacheDependence (val) {
this.$emit('on-cache-dependent', val)
} }
}, },
beforeCreate () { beforeCreate () {
@ -153,7 +156,19 @@
}, },
destroyed () { destroyed () {
}, },
computed: {}, computed: {
cacheDependence () {
return {
relation: this.relation,
dependTaskList: _.map(this.dependTaskList, v => {
return {
relation: v.relation,
dependItemList: _.map(v.dependItemList, v1 => _.omit(v1, ['depTasksList', 'state', 'dateValueList']))
}
})
}
}
},
components: { mListBox, mNodeStatus } components: { mListBox, mNodeStatus }
} }
</script> </script>
@ -257,4 +272,4 @@
} }
} }
} }
</style> </style>

71
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue

@ -65,7 +65,7 @@
<m-datasource <m-datasource
ref="refSourceDs" ref="refSourceDs"
@on-dsData="_onSourceDsData" @on-dsData="_onSourceDsData"
:data="{ type:'MYSQL',datasource:srcDatasource }" :data="{ type:sourceMysqlParams.srcType,datasource:sourceMysqlParams.srcDatasource }"
> >
</m-datasource> </m-datasource>
</div> </div>
@ -186,8 +186,8 @@
<div slot="content"> <div slot="content">
<div class="from-mirror"> <div class="from-mirror">
<textarea <textarea
id="code-sql-mirror" id="code-sqoop-mirror"
name="code-sql-mirror" name="code-sqoop-mirror"
style="opacity: 0;"> style="opacity: 0;">
</textarea> </textarea>
</div> </div>
@ -385,7 +385,7 @@
<m-datasource <m-datasource
ref="refTargetDs" ref="refTargetDs"
@on-dsData="_onTargetDsData" @on-dsData="_onTargetDsData"
:data="{ type:type,datasource:targetDatasource }" :data="{ type:targetMysqlParams.targetType,datasource:targetMysqlParams.targetDatasource }"
> >
</m-datasource> </m-datasource>
</div> </div>
@ -556,7 +556,8 @@
targetType:"HDFS", targetType:"HDFS",
sourceMysqlParams:{ sourceMysqlParams:{
srcDatasource:-1, srcType:"MYSQL",
srcDatasource:"",
srcTable:"", srcTable:"",
srcQueryType:"1", srcQueryType:"1",
srcQuerySql:'', srcQuerySql:'',
@ -588,7 +589,8 @@
}, },
targetMysqlParams:{ targetMysqlParams:{
targetDatasource:-1, targetType:"MYSQL",
targetDatasource:"",
targetTable:"", targetTable:"",
targetColumns:"", targetColumns:"",
fieldsTerminated:"", fieldsTerminated:"",
@ -680,6 +682,7 @@
* return data source * return data source
*/ */
_onSourceDsData (o) { _onSourceDsData (o) {
this.sourceMysqlParams.srcType = o.type
this.sourceMysqlParams.srcDatasource = o.datasource this.sourceMysqlParams.srcDatasource = o.datasource
}, },
@ -687,6 +690,7 @@
* return data source * return data source
*/ */
_onTargetDsData (o) { _onTargetDsData (o) {
this.targetMysqlParams.targetType = o.type
this.targetMysqlParams.targetDatasource = o.datasource this.targetMysqlParams.targetDatasource = o.datasource
}, },
@ -697,7 +701,7 @@
var params = null var params = null
switch(this.sourceType){ switch(this.sourceType){
case "MYSQL": case "MYSQL":
this.sourceMysqlParams.srcQuerySql = editor.getValue() this.sourceMysqlParams.srcQuerySql = editor ? editor.getValue() : this.sourceMysqlParams.srcQuerySql
params = JSON.stringify(this.sourceMysqlParams) params = JSON.stringify(this.sourceMysqlParams)
break; break;
case "ORACLE": case "ORACLE":
@ -879,7 +883,9 @@
* Processing code highlighting * Processing code highlighting
*/ */
_handlerEditor () { _handlerEditor () {
editor = codemirror('code-sql-mirror', { this._destroyEditor()
editor = codemirror('code-sqoop-mirror', {
mode: 'sql', mode: 'sql',
readOnly: this.isDetails readOnly: this.isDetails
}) })
@ -892,9 +898,15 @@
} }
} }
this.changes = () => {
this._cacheParams()
}
// Monitor keyboard // Monitor keyboard
editor.on('keypress', this.keypress) editor.on('keypress', this.keypress)
editor.on('changes', this.changes)
editor.setValue(this.sourceMysqlParams.srcQuerySql) editor.setValue(this.sourceMysqlParams.srcQuerySql)
return editor return editor
@ -906,6 +918,27 @@
_onLocalParams (a) { _onLocalParams (a) {
this.localParams = a this.localParams = a
}, },
_cacheParams () {
this.$emit('on-cache-params', {
concurrency:this.concurrency,
modelType:this.modelType,
sourceType:this.sourceType,
targetType:this.targetType,
sourceParams:this._handleSourceParams(),
targetParams:this._handleTargetParams(),
localParams:this.localParams
});
},
_destroyEditor () {
if (editor) {
editor.toTextArea() // Uninstall
editor.off($('.code-sqoop-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'changes', this.changes)
editor = null
}
},
}, },
watch: { watch: {
// Listening to sqlType // Listening to sqlType
@ -927,11 +960,12 @@
}, },
//Watch the cacheParams //Watch the cacheParams
cacheParams (val) { cacheParams (val) {
this.$emit('on-cache-params', val); this._cacheParams()
} }
}, },
created () { created () {
this._destroyEditor()
let o = this.backfillItem let o = this.backfillItem
// Non-null objects represent backfill // Non-null objects represent backfill
@ -963,11 +997,28 @@
*/ */
if (editor) { if (editor) {
editor.toTextArea() // Uninstall editor.toTextArea() // Uninstall
editor.off($('.code-sql-mirror'), 'keypress', this.keypress) editor.off($('.code-sqoop-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'changes', this.changes)
editor = null
} }
}, },
computed: { computed: {
cacheParams () {
return {
concurrency:this.concurrency,
modelType:this.modelType,
sourceType:this.sourceType,
targetType:this.targetType,
localParams:this.localParams,
sourceMysqlParams:this.sourceMysqlParams,
sourceHdfsParams:this.sourceHdfsParams,
sourceHiveParams:this.sourceHiveParams,
targetHdfsParams:this.targetHdfsParams,
targetMysqlParams:this.targetMysqlParams,
targetHiveParams:this.targetHiveParams
}
}
}, },
components: { mListBox, mDatasource, mLocalParams} components: { mListBox, mDatasource, mLocalParams}
} }

1
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -567,7 +567,6 @@ export default {
'Data Target': 'Data Target', 'Data Target': 'Data Target',
'All Columns': 'All Columns', 'All Columns': 'All Columns',
'Some Columns': 'Some Columns', 'Some Columns': 'Some Columns',
'Modify User': 'Modify User',
'Branch flow': 'Branch flow', 'Branch flow': 'Branch flow',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow' 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow'
} }

1
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js

@ -567,7 +567,6 @@ export default {
'Data Target': '数据目的', 'Data Target': '数据目的',
'All Columns': '全表导入', 'All Columns': '全表导入',
'Some Columns': '选择列', 'Some Columns': '选择列',
'Modify User': '修改用户',
'Branch flow': '分支流转', 'Branch flow': '分支流转',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点' 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点'
} }

8
e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.base;
import org.apache.dolphinscheduler.constant.TestConstant; import org.apache.dolphinscheduler.constant.TestConstant;
import org.apache.dolphinscheduler.util.PropertiesReader; import org.apache.dolphinscheduler.util.PropertiesReader;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.chrome.ChromeDriver;
@ -35,7 +36,7 @@ public class BaseDriver {
/** /**
* driver * driver
*/ */
private WebDriver driver; private static WebDriver driver;
/** /**
* chrome driver path * chrome driver path
@ -88,6 +89,7 @@ public class BaseDriver {
chromeOptions.setPageLoadStrategy(PageLoadStrategy.NONE); chromeOptions.setPageLoadStrategy(PageLoadStrategy.NONE);
chromeOptions.addArguments("--no-sandbox"); chromeOptions.addArguments("--no-sandbox");
chromeOptions.addArguments("--disable-dev-shm-usage"); chromeOptions.addArguments("--disable-dev-shm-usage");
//Browser client running requires annotation --headless
chromeOptions.addArguments("--headless"); chromeOptions.addArguments("--headless");
chromeOptions.addArguments("--disable-gpu"); chromeOptions.addArguments("--disable-gpu");
chromeOptions.addArguments("--whitelisted-ips"); chromeOptions.addArguments("--whitelisted-ips");
@ -120,7 +122,7 @@ public class BaseDriver {
* *
* @return driver * @return driver
*/ */
public WebDriver getDriver() { public static WebDriver getDriver() {
return driver; return driver;
} }
@ -141,7 +143,7 @@ public class BaseDriver {
public void closeBrowser() throws InterruptedException { public void closeBrowser() throws InterruptedException {
// JS Show a pop-up box to indicate the end of the test // JS Show a pop-up box to indicate the end of the test
Thread.sleep(TestConstant.ONE_THOUSANG); Thread.sleep(TestConstant.ONE_THOUSANG);
((JavascriptExecutor) driver).executeScript("alert('Test completed, browser closes after 3s')"); // ((JavascriptExecutor) driver).executeScript("alert('Test completed, browser closes after 3s')");
Thread.sleep(TestConstant.THREE_THOUSANG); Thread.sleep(TestConstant.THREE_THOUSANG);
if (driver != null) { if (driver != null) {
driver.quit(); driver.quit();

17
e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.base; package org.apache.dolphinscheduler.base;
import org.apache.dolphinscheduler.page.LoginPage;
import org.apache.dolphinscheduler.util.PropertiesReader; import org.apache.dolphinscheduler.util.PropertiesReader;
import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriver;
import org.testng.annotations.*; import org.testng.annotations.*;
@ -28,6 +27,7 @@ import java.util.Properties;
/** /**
* base test class * base test class
*/ */
@Test(groups={"functionTests"})
public class BaseTest { public class BaseTest {
/** /**
* properties * properties
@ -43,7 +43,7 @@ public class BaseTest {
/** /**
* driver * driver
*/ */
public WebDriver driver; public static WebDriver driver;
/** /**
* Executed before executing a test suite  * Executed before executing a test suite 
@ -54,7 +54,7 @@ public class BaseTest {
*/ */
@BeforeSuite(alwaysRun = true) @BeforeSuite(alwaysRun = true)
@Parameters({"propertiesPath"}) @Parameters({"propertiesPath"})
public void beforeSuite(@Optional("src/test/resources/config/config.properties") String propertiesPath) throws IOException { public void beforeSuite(@Optional("src/test/resources/config/config.properties") String propertiesPath) throws Exception {
// read properties // read properties
properties = PropertiesReader.readProperties(propertiesPath); properties = PropertiesReader.readProperties(propertiesPath);
} }
@ -70,14 +70,13 @@ public class BaseTest {
driver = baseDriver.getDriver(); driver = baseDriver.getDriver();
} }
/** /**
* Executed before executing a class method in a test case * Executed before executing a class method in a test case
*/ */
@BeforeClass(alwaysRun = true) @BeforeClass(alwaysRun = true)
public void setUp() throws IOException, InterruptedException { public void setUp() throws Exception {
LoginPage loginPage = new LoginPage(driver);
loginPage.jumpPage();
loginPage.login();
} }
@ -85,7 +84,7 @@ public class BaseTest {
* Execute after executing a class method in a test case * Execute after executing a class method in a test case
*/ */
@AfterClass(alwaysRun = true) @AfterClass(alwaysRun = true)
public void afterClass() { public void afterClass() throws InterruptedException {
// logout // logout
} }
@ -102,6 +101,6 @@ public class BaseTest {
* Execute after executing a testsuite * Execute after executing a testsuite
*/ */
@AfterSuite(alwaysRun = true) @AfterSuite(alwaysRun = true)
public void afterSuite() { public void afterSuite() throws InterruptedException {
} }
} }

2
e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java

@ -39,5 +39,5 @@ public class LoginData {
*/ */
public static final String PASSWORD = PropertiesReader.getKey("PASSWORD"); public static final String PASSWORD = PropertiesReader.getKey("PASSWORD");
public static final String TENANT = "Tenant Manage - DolphinScheduler"; public static final String TENANT = "租户管理 - DolphinScheduler";
} }

2
e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.data.project;
public class CreatWorkflowData { public class CreatWorkflowData {
//input shell task name //input shell task name
public static final String SHELL_TASK_NAME = "shell task description test"; public static final String SHELL_TASK_NAME = "shell task description test1";
//input shell task description //input shell task description
public static final String SHELL_TASK_DESCRIPTION = "shell task description test"; public static final String SHELL_TASK_DESCRIPTION = "shell task description test";

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save