break60 4 years ago
parent
commit
b90461fae9
  1. 10
      .github/workflows/ci_ut.yml
  2. 2
      README.md
  3. 2
      ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py
  4. 6
      dockerfile/Dockerfile
  5. 2
      dockerfile/hooks/check
  6. 8
      dockerfile/startup.sh
  7. 2
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java
  8. 13
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java
  9. 54
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java
  10. 1
      dolphinscheduler-alert/src/main/resources/alert.properties
  11. 2
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/FuncUtilsTest.java
  12. 27
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java
  13. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java
  14. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java
  15. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  16. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  17. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java
  18. 23
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java
  19. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  20. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java
  21. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java
  22. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java
  23. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java
  24. 17
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java
  25. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
  26. 254
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
  27. 12
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
  28. 19
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java
  29. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java
  30. 25
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java
  31. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java
  32. 16
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java
  33. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java
  34. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java
  35. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java
  36. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java
  37. 57
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java
  38. 2
      dolphinscheduler-common/src/main/resources/common.properties
  39. 55
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java
  40. 17
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java
  41. 2
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java
  42. 6
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java
  43. 22
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java
  44. 8
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java
  45. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java
  46. 134
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java
  47. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java
  48. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java
  49. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java
  50. 8
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java
  51. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java
  52. 4
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java
  53. 23
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java
  54. 39
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java
  55. 3
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java
  56. 3
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java
  57. 6
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java
  58. 53
      dolphinscheduler-dist/pom.xml
  59. 4
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java
  60. 4
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java
  61. 3
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java
  62. 21
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java
  63. 36
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java
  64. 67
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
  65. 11
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java
  66. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java
  67. 16
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java
  68. 22
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java
  69. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java
  70. 45
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java
  71. 10
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java
  72. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java
  73. 29
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java
  74. 17
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java
  75. 31
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java
  76. 9
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
  77. 19
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
  78. 11
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
  79. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java
  80. 24
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java
  81. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java
  82. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java
  83. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java
  84. 10
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
  85. 8
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java
  86. 2
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java
  87. 7
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
  88. 55
      dolphinscheduler-ui/pom.xml
  89. 6
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js
  90. 4
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
  91. 12
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue
  92. 10
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue
  93. 21
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue
  94. 71
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue
  95. 1
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  96. 1
      dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
  97. 8
      e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java
  98. 17
      e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java
  99. 2
      e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java
  100. 2
      e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java
  101. Some files were not shown because too many files have changed in this diff Show More

10
.github/workflows/ci_ut.yml

@ -15,7 +15,7 @@
# limitations under the License.
#
on: ["pull_request"]
on: ["pull_request", "push"]
env:
DOCKER_DIR: ./docker
LOG_DIR: /tmp/dolphinscheduler
@ -52,7 +52,15 @@ jobs:
run: |
export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx3g'
mvn test -B -Dmaven.test.skip=false
- name: Upload coverage report to codecov
if: github.event_name == 'pull_request'
run: |
CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash)
- name: Git fetch unshallow
run: |
git fetch --unshallow
git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"
git fetch origin
- name: Run SonarCloud Analysis
run: >
mvn verify --batch-mode

2
README.md

@ -17,7 +17,7 @@ Dolphin Scheduler Official Website
### Design features:
A distributed and easy-to-expand visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing.
A distributed and easy-to-extend visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing.
Its main objectives are as follows:
- Associate the Tasks according to the dependencies of the tasks in a DAG graph, which can visualize the running state of task in real time.

2
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py

@ -30,7 +30,7 @@ sys.setdefaultencoding('utf-8')
config = Script.get_config()
# conf_dir = "/etc/"
dolphin_home = "/opt/soft/apache-dolphinscheduler-incubating-1.2.1"
dolphin_home = "/opt/soft/dolphinscheduler"
dolphin_conf_dir = dolphin_home + "/conf"
dolphin_log_dir = dolphin_home + "/logs"
dolphin_bin_dir = dolphin_home + "/bin"

6
dockerfile/Dockerfile

@ -23,11 +23,11 @@ ENV TZ Asia/Shanghai
ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND noninteractive
#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip kazoo.
#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip tini kazoo.
#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
RUN apk update && \
apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip && \
apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip tini && \
apk add --update procps && \
openrc boot && \
pip install kazoo
@ -92,4 +92,4 @@ RUN rm -rf /var/cache/apk/*
#9. expose port
EXPOSE 2181 2888 3888 5432 12345 50051 8888
ENTRYPOINT ["/root/startup.sh"]
ENTRYPOINT ["/sbin/tini", "--", "/root/startup.sh"]

2
dockerfile/hooks/check

@ -17,7 +17,7 @@
#
echo "------ dolphinscheduler check - server - status -------"
sleep 20
server_num=$(docker top `docker container list | grep startup | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l)
server_num=$(docker top `docker container list | grep '/sbin/tini' | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l)
if [ $server_num -eq 5 ]
then
echo "Server all start successfully"

8
dockerfile/startup.sh

@ -164,6 +164,7 @@ case "$1" in
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-worker.log
;;
(api-server)
initZK
initPostgreSQL
initApiServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-api-server.log
@ -187,6 +188,9 @@ case "$1" in
;;
esac
echo "tee begin"
exec tee ${LOGFILE}
# init directories and log files
mkdir -p ${DOLPHINSCHEDULER_LOGS} && mkdir -p /var/log/nginx/ && cat /dev/null >> ${LOGFILE}
echo "tail begin"
exec bash -c "tail -n 1 -f ${LOGFILE}"

2
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java

@ -20,7 +20,7 @@ import org.apache.dolphinscheduler.common.utils.StringUtils;
public class FuncUtils {
static public String mkString(Iterable<String> list, String split) {
public static String mkString(Iterable<String> list, String split) {
if (null == list || StringUtils.isEmpty(split)){
return null;

13
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java

@ -16,12 +16,13 @@
*/
package org.apache.dolphinscheduler.alert.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
/**
@ -38,7 +39,7 @@ public class JSONUtils {
*/
public static String toJsonString(Object object) {
try{
return JSONObject.toJSONString(object,false);
return JSON.toJSONString(object,false);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
}
@ -50,19 +51,19 @@ public class JSONUtils {
* @param json the json
* @param clazz c
* @param <T> the generic clazz
* @return the result list
* @return the result list or empty list
*/
public static <T> List<T> toList(String json, Class<T> clazz) {
if (StringUtils.isEmpty(json)) {
return null;
return Collections.emptyList();
}
try {
return JSONArray.parseArray(json, clazz);
return JSON.parseArray(json, clazz);
} catch (Exception e) {
logger.error("JSONArray.parseArray exception!",e);
}
return null;
return Collections.emptyList();
}
}

54
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java

@ -39,29 +39,29 @@ public class MailUtils {
public static final Logger logger = LoggerFactory.getLogger(MailUtils.class);
public static final String mailProtocol = PropertyUtils.getString(Constants.MAIL_PROTOCOL);
public static final String MAIL_PROTOCOL = PropertyUtils.getString(Constants.MAIL_PROTOCOL);
public static final String mailServerHost = PropertyUtils.getString(Constants.MAIL_SERVER_HOST);
public static final String MAIL_SERVER_HOST = PropertyUtils.getString(Constants.MAIL_SERVER_HOST);
public static final Integer mailServerPort = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT);
public static final Integer MAIL_SERVER_PORT = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT);
public static final String mailSender = PropertyUtils.getString(Constants.MAIL_SENDER);
public static final String MAIL_SENDER = PropertyUtils.getString(Constants.MAIL_SENDER);
public static final String mailUser = PropertyUtils.getString(Constants.MAIL_USER);
public static final String MAIL_USER = PropertyUtils.getString(Constants.MAIL_USER);
public static final String mailPasswd = PropertyUtils.getString(Constants.MAIL_PASSWD);
public static final String MAIL_PASSWD = PropertyUtils.getString(Constants.MAIL_PASSWD);
public static final Boolean mailUseStartTLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final Boolean MAIL_USE_START_TLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final Boolean mailUseSSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE);
public static final Boolean MAIL_USE_SSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String xlsFilePath = PropertyUtils.getString(Constants.XLS_FILE_PATH);
public static final String XLS_FILE_PATH = PropertyUtils.getString(Constants.XLS_FILE_PATH);
public static final String starttlsEnable = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final String STARTTLS_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final String sslEnable = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String SSL_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String sslTrust = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST);
public static final String SSL_TRUST = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST);
public static final AlertTemplate alertTemplate = AlertTemplateFactory.getMessageTemplate();
@ -105,7 +105,7 @@ public class MailUtils {
try {
Session session = getSession();
email.setMailSession(session);
email.setFrom(mailSender);
email.setFrom(MAIL_SENDER);
email.setCharset(Constants.UTF_8);
if (CollectionUtils.isNotEmpty(receivers)){
// receivers mail
@ -199,10 +199,10 @@ public class MailUtils {
// 2. creating mail: Creating a MimeMessage
MimeMessage msg = new MimeMessage(session);
// 3. set sender
msg.setFrom(new InternetAddress(mailSender));
msg.setFrom(new InternetAddress(MAIL_SENDER));
// 4. set receivers
for (String receiver : receivers) {
msg.addRecipients(MimeMessage.RecipientType.TO, InternetAddress.parse(receiver));
msg.addRecipients(Message.RecipientType.TO, InternetAddress.parse(receiver));
}
return msg;
}
@ -213,19 +213,19 @@ public class MailUtils {
*/
private static Session getSession() {
Properties props = new Properties();
props.setProperty(Constants.MAIL_HOST, mailServerHost);
props.setProperty(Constants.MAIL_PORT, String.valueOf(mailServerPort));
props.setProperty(Constants.MAIL_HOST, MAIL_SERVER_HOST);
props.setProperty(Constants.MAIL_PORT, String.valueOf(MAIL_SERVER_PORT));
props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE);
props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, mailProtocol);
props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, starttlsEnable);
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, sslEnable);
props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, sslTrust);
props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, MAIL_PROTOCOL);
props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, STARTTLS_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, SSL_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, SSL_TRUST);
Authenticator auth = new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
// mail username and password
return new PasswordAuthentication(mailUser, mailPasswd);
return new PasswordAuthentication(MAIL_USER, MAIL_PASSWD);
}
};
@ -248,12 +248,10 @@ public class MailUtils {
*/
if(CollectionUtils.isNotEmpty(receiversCc)){
for (String receiverCc : receiversCc){
msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(receiverCc));
msg.addRecipients(Message.RecipientType.CC, InternetAddress.parse(receiverCc));
}
}
// set receivers type to cc
// msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(propMap.get("${CC}")));
// set subject
msg.setSubject(title);
MimeMultipart partList = new MimeMultipart();
@ -263,8 +261,8 @@ public class MailUtils {
// set attach file
MimeBodyPart part2 = new MimeBodyPart();
// make excel file
ExcelUtils.genExcelFile(content,title,xlsFilePath);
File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS);
ExcelUtils.genExcelFile(content,title, XLS_FILE_PATH);
File file = new File(XLS_FILE_PATH + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS);
part2.attachFile(file);
part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS,Constants.UTF_8,"B"));
// add components to collection
@ -334,7 +332,7 @@ public class MailUtils {
* @param e the exception
*/
private static void handleException(Collection<String> receivers, Map<String, Object> retMap, Exception e) {
logger.error("Send email to {} failed {}", receivers, e);
logger.error("Send email to {} failed", receivers, e);
retMap.put(Constants.MESSAGE, "Send email to {" + String.join(",", receivers) + "} failed," + e.toString());
}

1
dolphinscheduler-alert/src/main/resources/alert.properties

@ -28,7 +28,6 @@ mail.server.port=25
mail.sender=xxx@xxx.com
mail.user=xxx@xxx.com
mail.passwd=111111
# TLS
mail.smtp.starttls.enable=true
# SSL

2
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/FuncUtilsTest.java

@ -46,7 +46,7 @@ public class FuncUtilsTest {
logger.info(result);
//Expected result string
assertEquals(result, "user1|user2|user3");
assertEquals("user1|user2|user3", result);
//Null list expected return null
result = FuncUtils.mkString(null, split);

27
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java

@ -26,8 +26,7 @@ import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.*;
public class JSONUtilsTest {
@ -73,7 +72,7 @@ public class JSONUtilsTest {
result = JSONUtils.toJsonString(null);
logger.info(result);
assertEquals(result,"null");
assertEquals("null", result);
}
@ -86,25 +85,27 @@ public class JSONUtilsTest {
//Invoke toList
List<LinkedHashMap> result = JSONUtils.toList(expected ,LinkedHashMap.class);
//Equal list size=1
assertEquals(result.size(),1);
assertEquals(1,result.size());
//Transform entity to LinkedHashMap<String, Object>
LinkedHashMap<String, Object> entity = result.get(0);
//Equal expected values
assertEquals(entity.get("mysql service name"),"mysql200");
assertEquals(entity.get("mysql address"),"192.168.xx.xx");
assertEquals(entity.get("port"),"3306");
assertEquals(entity.get("no index of number"),"80");
assertEquals(entity.get("database client connections"),"190");
assertEquals("mysql200",entity.get("mysql service name"));
assertEquals("192.168.xx.xx", entity.get("mysql address"));
assertEquals("3306", entity.get("port"));
assertEquals("80", entity.get("no index of number"));
assertEquals("190", entity.get("database client connections"));
//If param is null, then return null
//If param is null, then return empty list
result = JSONUtils.toList(null ,LinkedHashMap.class);
assertNull(result);
assertNotNull(result);
assertTrue(result.isEmpty());
//If param is incorrect, then return null and log error message
//If param is incorrect, then return empty list and log error message
result = JSONUtils.toList("}{" ,LinkedHashMap.class);
assertNull(result);
assertNotNull(result);
assertTrue(result.isEmpty());
}

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java

@ -25,7 +25,9 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2;
@SpringBootApplication
@ServletComponentScan
@ComponentScan("org.apache.dolphinscheduler")
@ComponentScan({"org.apache.dolphinscheduler.api",
"org.apache.dolphinscheduler.dao",
"org.apache.dolphinscheduler.service"})
public class ApiApplicationServer extends SpringBootServletInitializer {
public static void main(String[] args) {

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java

@ -27,7 +27,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@ -90,14 +89,4 @@ public class LoginHandlerInterceptor implements HandlerInterceptor {
return true;
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
}
}

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
@ -303,7 +304,7 @@ public class DataSourceService extends BaseService{
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
JSONObject object = JSONObject.parseObject(connectionParams);
JSONObject object = JSON.parseObject(connectionParams);
object.put(Constants.PASSWORD, Constants.XXXXXX);
dataSource.setConnectionParams(JSONUtils.toJson(object));
@ -367,11 +368,11 @@ public class DataSourceService extends BaseService{
try {
switch (dbType) {
case POSTGRESQL:
datasource = JSONObject.parseObject(parameter, PostgreDataSource.class);
datasource = JSON.parseObject(parameter, PostgreDataSource.class);
Class.forName(Constants.ORG_POSTGRESQL_DRIVER);
break;
case MYSQL:
datasource = JSONObject.parseObject(parameter, MySQLDataSource.class);
datasource = JSON.parseObject(parameter, MySQLDataSource.class);
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER);
break;
case HIVE:
@ -386,26 +387,26 @@ public class DataSourceService extends BaseService{
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
}
if (dbType == DbType.HIVE){
datasource = JSONObject.parseObject(parameter, HiveDataSource.class);
datasource = JSON.parseObject(parameter, HiveDataSource.class);
}else if (dbType == DbType.SPARK){
datasource = JSONObject.parseObject(parameter, SparkDataSource.class);
datasource = JSON.parseObject(parameter, SparkDataSource.class);
}
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case CLICKHOUSE:
datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class);
datasource = JSON.parseObject(parameter, ClickHouseDataSource.class);
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break;
case ORACLE:
datasource = JSONObject.parseObject(parameter, OracleDataSource.class);
datasource = JSON.parseObject(parameter, OracleDataSource.class);
Class.forName(Constants.COM_ORACLE_JDBC_DRIVER);
break;
case SQLSERVER:
datasource = JSONObject.parseObject(parameter, SQLServerDataSource.class);
datasource = JSON.parseObject(parameter, SQLServerDataSource.class);
Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER);
break;
case DB2:
datasource = JSONObject.parseObject(parameter, DB2ServerDataSource.class);
datasource = JSON.parseObject(parameter, DB2ServerDataSource.class);
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
default:
@ -507,7 +508,7 @@ public class DataSourceService extends BaseService{
parameterMap.put(Constants.PRINCIPAL,principal);
}
if (other != null && !"".equals(other)) {
LinkedHashMap<String, String> map = JSONObject.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() {
LinkedHashMap<String, String> map = JSON.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() {
});
if (map.size() > 0) {
StringBuilder otherSb = new StringBuilder();
@ -523,9 +524,9 @@ public class DataSourceService extends BaseService{
}
if(logger.isDebugEnabled()){
logger.info("parameters map-----" + JSONObject.toJSONString(parameterMap));
logger.info("parameters map-----" + JSON.toJSONString(parameterMap));
}
return JSONObject.toJSONString(parameterMap);
return JSON.toJSONString(parameterMap);
}

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.nio.charset.StandardCharsets;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
@ -49,7 +50,6 @@ import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.util.*;
import java.util.stream.Collectors;
@ -273,7 +273,8 @@ public class ProcessInstanceService extends BaseDAGService {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(Charset.forName("utf8"))), Charset.forName("utf8")));
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if(line.contains(DEPENDENT_SPLIT)){

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java

@ -121,7 +121,7 @@ public class ZooKeeperState {
private class SendThread extends Thread {
private String cmd;
public String ret = "";
private String ret = "";
public SendThread(String cmd) {
this.cmd = cmd;

23
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.api.controller;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ResourceType;
@ -54,7 +55,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -78,7 +79,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -281,7 +282,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -303,7 +304,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -324,7 +325,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -344,7 +345,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -365,7 +366,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -386,7 +387,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -406,7 +407,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -427,7 +428,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -446,7 +447,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -813,6 +813,11 @@ public final class Constants {
*/
public static final String KERBEROS = "kerberos";
/**
* kerberos expire time
*/
public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time";
/**
* java.security.krb5.conf
*/

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.common.model;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
@ -23,7 +24,6 @@ import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
@ -294,7 +294,7 @@ public class TaskNode {
if(StringUtils.isNotEmpty(this.getTimeout())){
String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name());
String timeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name());
return JSONObject.parseObject(timeout,TaskTimeoutParameter.class);
return JSON.parseObject(timeout,TaskTimeoutParameter.class);
}
return new TaskTimeoutParameter(false);
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java

@ -335,7 +335,7 @@ public abstract class AbstractShell {
try{
entry.getValue().destroy();
} catch (Exception e) {
e.printStackTrace();
logger.error("Destroy All Processes error", e);
}
}

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java

@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@ -207,12 +208,15 @@ public class FlinkParameters extends AbstractParameters {
@Override
public List<String> getResourceFilesList() {
if(resourceList !=null ) {
this.resourceList.add(mainJar);
return resourceList.stream()
.map(p -> p.getRes()).collect(Collectors.toList());
if(resourceList != null ) {
List<String> resourceFiles = resourceList.stream()
.map(ResourceInfo::getRes).collect(Collectors.toList());
if(mainJar != null) {
resourceFiles.add(mainJar.getRes());
}
return resourceFiles;
}
return null;
return Collections.emptyList();
}

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java

@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@ -125,12 +126,15 @@ public class MapreduceParameters extends AbstractParameters {
@Override
public List<String> getResourceFilesList() {
if (resourceList != null) {
this.resourceList.add(mainJar);
return resourceList.stream()
.map(p -> p.getRes()).collect(Collectors.toList());
if(resourceList != null ) {
List<String> resourceFiles = resourceList.stream()
.map(ResourceInfo::getRes).collect(Collectors.toList());
if(mainJar != null) {
resourceFiles.add(mainJar.getRes());
}
return resourceFiles;
}
return null;
return Collections.emptyList();
}
@Override

17
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java

@ -120,12 +120,24 @@ public class ThreadUtils {
/**
* Wrapper over ScheduledThreadPoolExecutor
* @param threadName
* @param corePoolSize
* @return
*/
public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName,int corePoolSize) {
public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName, int corePoolSize) {
return newThreadScheduledExecutor(threadName, corePoolSize, true);
}
/**
* Wrapper over ScheduledThreadPoolExecutor
* @param threadName
* @param corePoolSize
* @param isDaemon
* @return
*/
public static ScheduledExecutorService newThreadScheduledExecutor(String threadName, int corePoolSize, boolean isDaemon) {
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setDaemon(true)
.setDaemon(isDaemon)
.setNameFormat(threadName)
.build();
ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(corePoolSize, threadFactory);
@ -135,7 +147,6 @@ public class ThreadUtils {
return executor;
}
public static ThreadInfo getThreadInfo(Thread t) {
long tid = t.getId();
return threadBean.getThreadInfo(tid, STACK_DEPTH);

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java

@ -44,7 +44,7 @@ public class FileUtils {
String fileSuffix = "";
if (StringUtils.isNotEmpty(filename)) {
int lastIndex = filename.lastIndexOf(".");
int lastIndex = filename.lastIndexOf('.');
if (lastIndex > 0) {
fileSuffix = filename.substring(lastIndex + 1);
}
@ -325,10 +325,8 @@ public class FileUtils {
}
} else {
File parent = file.getParentFile();
if (parent != null) {
if (!parent.mkdirs() && !parent.isDirectory()) {
if (parent != null && !parent.mkdirs() && !parent.isDirectory()) {
throw new IOException("Directory '" + parent + "' could not be created");
}
}
}
return new FileOutputStream(file, append);

254
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -16,6 +16,9 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
@ -32,9 +35,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -46,32 +52,37 @@ public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
private static volatile HadoopUtils instance = new HadoopUtils();
private static volatile Configuration configuration;
private static FileSystem fs;
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
.newBuilder()
.expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 7), TimeUnit.DAYS)
.build(new CacheLoader<String, HadoopUtils>() {
@Override
public HadoopUtils load(String key) throws Exception {
return new HadoopUtils();
}
});
private HadoopUtils(){
if(StringUtils.isEmpty(hdfsUser)){
hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
}
private Configuration configuration;
private FileSystem fs;
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
private HadoopUtils() {
init();
initHdfsPath();
}
public static HadoopUtils getInstance(){
// if kerberos startup , renew HadoopUtils
if (CommonUtils.getKerberosStartupState()){
return new HadoopUtils();
}
return instance;
public static HadoopUtils getInstance() {
return cache.getUnchecked(HADOOP_UTILS_KEY);
}
/**
* init dolphinscheduler root path in hdfs
*/
private void initHdfsPath(){
private void initHdfsPath() {
String hdfsPath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH);
Path path = new Path(hdfsPath);
@ -80,7 +91,7 @@ public class HadoopUtils implements Closeable {
fs.mkdirs(path);
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
logger.error(e.getMessage(), e);
}
}
@ -89,82 +100,74 @@ public class HadoopUtils implements Closeable {
* init hadoop configuration
*/
private void init() {
if (configuration == null) {
synchronized (HadoopUtils.class) {
if (configuration == null) {
try {
configuration = new Configuration();
String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
if (resUploadType == ResUploadType.HDFS){
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos");
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
try {
configuration = new Configuration();
String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
if (resUploadType == ResUploadType.HDFS) {
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system
if(defaultFS.startsWith("file")){
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if(StringUtils.isNotBlank(defaultFSProp)){
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS,defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
}else{
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS );
throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
);
}
}else{
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system
if (defaultFS.startsWith("file")) {
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if (StringUtils.isNotBlank(defaultFSProp)) {
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
} else {
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
);
}
} else {
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
}
if (fs == null) {
if(StringUtils.isNotEmpty(hdfsUser)){
//UserGroupInformation ugi = UserGroupInformation.createProxyUser(hdfsUser,UserGroupInformation.getLoginUser());
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
fs = FileSystem.get(configuration);
return true;
}
});
}else{
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
if (fs == null) {
if (StringUtils.isNotEmpty(hdfsUser)) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
fs = FileSystem.get(configuration);
return true;
}
}else if (resUploadType == ResUploadType.S3){
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
if (!StringUtils.isEmpty(rmHaIds)) {
appAddress = getAppAddress(appAddress, rmHaIds);
logger.info("appAddress : {}", appAddress);
}
configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress);
} catch (Exception e) {
logger.error(e.getMessage(), e);
});
} else {
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
}
} else if (resUploadType == ResUploadType.S3) {
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
if (!StringUtils.isEmpty(rmHaIds)) {
appAddress = getAppAddress(appAddress, rmHaIds);
logger.info("appAddress : {}", appAddress);
}
configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
@ -188,15 +191,15 @@ public class HadoopUtils implements Closeable {
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @param hdfsFilePath hdfs file path
* @return byte[] byte array
* @throws IOException errors
*/
public byte[] catFile(String hdfsFilePath) throws IOException {
if(StringUtils.isBlank(hdfsFilePath)){
logger.error("hdfs file path:{} is blank",hdfsFilePath);
return null;
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return new byte[0];
}
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
@ -204,29 +207,28 @@ public class HadoopUtils implements Closeable {
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers
* @param limit read how many lines
* @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers
* @param limit read how many lines
* @return content of file
* @throws IOException errors
*/
public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)){
logger.error("hdfs file path:{} is blank",hdfsFilePath);
return null;
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return Collections.emptyList();
}
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))){
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
BufferedReader br = new BufferedReader(new InputStreamReader(in));
Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
return stream.collect(Collectors.toList());
}
}
/**
@ -259,17 +261,17 @@ public class HadoopUtils implements Closeable {
/**
* the src file is on the local disk. Add it to FS at
* the given dst name.
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
*
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcFile);
Path dstPath= new Path(dstHdfsPath);
Path dstPath = new Path(dstHdfsPath);
fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);
@ -279,10 +281,10 @@ public class HadoopUtils implements Closeable {
/**
* copy hdfs file to local
*
* @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file
* @param deleteSource delete source
* @param overwrite overwrite
* @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file
* @param deleteSource delete source
* @param overwrite overwrite
* @return result of copy hdfs file to local
* @throws IOException errors
*/
@ -293,14 +295,14 @@ public class HadoopUtils implements Closeable {
if (dstPath.exists()) {
if (dstPath.isFile()) {
if (overwrite) {
dstPath.delete();
Files.delete(dstPath.toPath());
}
} else {
logger.error("destination file must be a file");
}
}
if(!dstPath.getParentFile().exists()){
if (!dstPath.getParentFile().exists()) {
dstPath.getParentFile().mkdirs();
}
@ -308,14 +310,13 @@ public class HadoopUtils implements Closeable {
}
/**
*
* delete a file
*
* @param hdfsFilePath the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false.
* @throws IOException errors
*/
public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
@ -340,7 +341,7 @@ public class HadoopUtils implements Closeable {
* @return {@link FileStatus} file status
* @throws Exception errors
*/
public FileStatus[] listFileStatus(String filePath)throws Exception{
public FileStatus[] listFileStatus(String filePath) throws Exception {
try {
return fs.listStatus(new Path(filePath));
} catch (IOException e) {
@ -352,10 +353,11 @@ public class HadoopUtils implements Closeable {
/**
* Renames Path src to Path dst. Can take place on local fs
* or remote DFS.
*
* @param src path to be renamed
* @param dst new path after rename
* @throws IOException on failure
* @return true if rename is successful
* @throws IOException on failure
*/
public boolean rename(String src, String dst) throws IOException {
return fs.rename(new Path(src), new Path(dst));
@ -378,7 +380,7 @@ public class HadoopUtils implements Closeable {
String responseContent = HttpUtils.get(applicationUrl);
JSONObject jsonObject = JSONObject.parseObject(responseContent);
JSONObject jsonObject = JSON.parseObject(responseContent);
String result = jsonObject.getJSONObject("app").getString("finalStatus");
switch (result) {
@ -401,7 +403,6 @@ public class HadoopUtils implements Closeable {
}
/**
*
* @return data hdfs path
*/
public static String getHdfsDataBasePath() {
@ -428,11 +429,11 @@ public class HadoopUtils implements Closeable {
* hdfs user dir
*
* @param tenantCode tenant code
* @param userId user id
* @param userId user id
* @return hdfs resource dir
*/
public static String getHdfsUserDir(String tenantCode,int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode),userId);
public static String getHdfsUserDir(String tenantCode, int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
}
/**
@ -480,7 +481,7 @@ public class HadoopUtils implements Closeable {
* getAppAddress
*
* @param appAddress app address
* @param rmHa resource manager ha
* @param rmHa resource manager ha
* @return app address
*/
public static String getAppAddress(String appAddress, String rmHa) {
@ -525,8 +526,6 @@ public class HadoopUtils implements Closeable {
*/
private static final class YarnHAAdminUtils extends RMAdminCLI {
private static final Logger logger = LoggerFactory.getLogger(YarnHAAdminUtils.class);
/**
* get active resourcemanager
*
@ -585,8 +584,7 @@ public class HadoopUtils implements Closeable {
JSONObject jsonObject = JSON.parseObject(retStr);
//get ResourceManager state
String state = jsonObject.getJSONObject("clusterInfo").getString("haState");
return state;
return jsonObject.getJSONObject("clusterInfo").getString("haState");
}
}

12
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java

@ -81,17 +81,15 @@ public class HttpUtils {
logger.error(e.getMessage(),e);
}
if (httpget != null && !httpget.isAborted()) {
if (!httpget.isAborted()) {
httpget.releaseConnection();
httpget.abort();
}
if (httpclient != null) {
try {
httpclient.close();
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
try {
httpclient.close();
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
}
return responseContent;

19
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java

@ -19,26 +19,17 @@
package org.apache.dolphinscheduler.common.utils;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class IOUtils {
public static void closeQuietly(InputStream fis){
if(fis != null){
public static void closeQuietly(Closeable closeable){
if(closeable != null){
try {
fis.close();
} catch (IOException ignore) {
}
}
}
public static void closeQuietly(InputStreamReader reader){
if(reader != null){
try {
reader.close();
closeable.close();
} catch (IOException ignore) {
// nothing need to do
}
}
}

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java

@ -17,16 +17,11 @@
package org.apache.dolphinscheduler.common.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* http utils
*/
public class IpUtils {
private static final Logger logger = LoggerFactory.getLogger(IpUtils.class);
public static final String DOT = ".";
/**

25
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
@ -41,12 +42,6 @@ public class JSONUtils {
*/
private static final ObjectMapper objectMapper = new ObjectMapper();
/**
* init
*/
private static final JSONUtils instance = new JSONUtils();
private JSONUtils() {
//Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault());
@ -59,7 +54,7 @@ public class JSONUtils {
*/
public static String toJson(Object object) {
try{
return JSONObject.toJSONString(object,false);
return JSON.toJSONString(object,false);
} catch (Exception e) {
logger.error("object to json exception!",e);
}
@ -89,7 +84,7 @@ public class JSONUtils {
}
try {
return JSONObject.parseObject(json, clazz);
return JSON.parseObject(json, clazz);
} catch (Exception e) {
logger.error("parse object exception!",e);
}
@ -178,7 +173,7 @@ public class JSONUtils {
}
try {
return JSONObject.parseObject(json, new TypeReference<HashMap<String, String>>(){});
return JSON.parseObject(json, new TypeReference<HashMap<String, String>>(){});
} catch (Exception e) {
logger.error("json to map exception!",e);
}
@ -203,7 +198,7 @@ public class JSONUtils {
}
try {
return JSONObject.parseObject(json, new TypeReference<HashMap<K, V>>() {});
return JSON.parseObject(json, new TypeReference<HashMap<K, V>>() {});
} catch (Exception e) {
logger.error("json to map exception!",e);
}
@ -218,23 +213,23 @@ public class JSONUtils {
*/
public static String toJsonString(Object object) {
try{
return JSONObject.toJSONString(object,false);
return JSON.toJSONString(object,false);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
throw new RuntimeException("Object json deserialization exception.", e);
}
}
public static JSONObject parseObject(String text) {
try{
return JSONObject.parseObject(text);
return JSON.parseObject(text);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
throw new RuntimeException("String json deserialization exception.", e);
}
}
public static JSONArray parseArray(String text) {
try{
return JSONObject.parseArray(text);
return JSON.parseArray(text);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java

@ -79,7 +79,7 @@ public class LoggerUtils {
*/
public static List<String> getAppIds(String log, Logger logger) {
List<String> appIds = new ArrayList<String>();
List<String> appIds = new ArrayList<>();
Matcher matcher = APPLICATION_REGEX.matcher(log);

16
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java

@ -400,8 +400,7 @@ public class OSUtils {
* @return true if mac
*/
public static boolean isMacOS() {
String os = System.getProperty("os.name");
return os.startsWith("Mac");
return getOSName().startsWith("Mac");
}
@ -409,9 +408,16 @@ public class OSUtils {
* whether is windows
* @return true if windows
*/
public static boolean isWindows() {
String os = System.getProperty("os.name");
return os.startsWith("Windows");
public static boolean isWindows() { ;
return getOSName().startsWith("Windows");
}
/**
* get current OS name
* @return current OS name
*/
public static String getOSName() {
return System.getProperty("os.name");
}
/**

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
@ -23,7 +24,6 @@ import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.slf4j.Logger;
@ -157,7 +157,7 @@ public class ParameterUtils {
property.setValue(val);
}
}
return JSONObject.toJSONString(globalParamList);
return JSON.toJSONString(globalParamList);
}

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java

@ -43,13 +43,11 @@ public class PropertyUtils {
private static final Properties properties = new Properties();
private static final PropertyUtils propertyUtils = new PropertyUtils();
private PropertyUtils(){
init();
private PropertyUtils() {
throw new IllegalStateException("PropertyUtils class");
}
private void init(){
static {
String[] propertyFiles = new String[]{COMMON_PROPERTIES_PATH};
for (String fileName : propertyFiles) {
InputStream fis = null;
@ -125,7 +123,7 @@ public class PropertyUtils {
* @param key property name
* @return property value
*/
public static Boolean getBoolean(String key) {
public static boolean getBoolean(String key) {
String value = properties.getProperty(key.trim());
if(null != value){
return Boolean.parseBoolean(value);

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java

@ -31,12 +31,12 @@ public class PlaceholderUtils {
/**
* Prefix of the position to be replaced
*/
public static final String placeholderPrefix = "${";
public static final String PLACEHOLDER_PREFIX = "${";
/**
* The suffix of the position to be replaced
*/
public static final String placeholderSuffix = "}";
public static final String PLACEHOLDER_SUFFIX = "}";
/**
@ -68,7 +68,7 @@ public class PlaceholderUtils {
*/
public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) {
return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders);
return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders);
}
/**

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java

@ -426,6 +426,7 @@ public class ProcessBuilderForWin32 {
static final ProcessBuilderForWin32.NullInputStream INSTANCE = new ProcessBuilderForWin32.NullInputStream();
private NullInputStream() {}
public int read() { return -1; }
@Override
public int available() { return 0; }
}
@ -462,7 +463,7 @@ public class ProcessBuilderForWin32 {
*
* @since 1.7
*/
public static abstract class Redirect {
public abstract static class Redirect {
/**
* The type of a {@link ProcessBuilderForWin32.Redirect}.
*/
@ -494,7 +495,7 @@ public class ProcessBuilderForWin32 {
* {@link ProcessBuilderForWin32.Redirect#appendTo Redirect.appendTo(File)}.
*/
APPEND
};
}
/**
* Returns the type of this {@code Redirect}.
@ -568,6 +569,7 @@ public class ProcessBuilderForWin32 {
throw new NullPointerException();
return new ProcessBuilderForWin32.Redirect() {
public Type type() { return Type.READ; }
@Override
public File file() { return file; }
public String toString() {
return "redirect to read from file \"" + file + "\"";
@ -595,10 +597,12 @@ public class ProcessBuilderForWin32 {
throw new NullPointerException();
return new ProcessBuilderForWin32.Redirect() {
public Type type() { return Type.WRITE; }
@Override
public File file() { return file; }
public String toString() {
return "redirect to write to file \"" + file + "\"";
}
@Override
boolean append() { return false; }
};
}
@ -626,10 +630,12 @@ public class ProcessBuilderForWin32 {
throw new NullPointerException();
return new ProcessBuilderForWin32.Redirect() {
public Type type() { return Type.APPEND; }
@Override
public File file() { return file; }
public String toString() {
return "redirect to append to file \"" + file + "\"";
}
@Override
boolean append() { return true; }
};
}

57
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java

@ -19,6 +19,8 @@ package org.apache.dolphinscheduler.common.utils.process;
import com.sun.jna.Pointer;
import com.sun.jna.platform.win32.*;
import com.sun.jna.ptr.IntByReference;
import java.lang.reflect.Field;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import sun.security.action.GetPropertyAction;
import java.io.*;
@ -31,10 +33,25 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.sun.jna.platform.win32.WinBase.STILL_ACTIVE;
import static java.util.Objects.requireNonNull;
public class ProcessImplForWin32 extends Process {
private static final sun.misc.JavaIOFileDescriptorAccess fdAccess
= sun.misc.SharedSecrets.getJavaIOFileDescriptorAccess();
private static final Field FD_HANDLE;
static {
if (!OSUtils.isWindows()) {
throw new RuntimeException("ProcessImplForWin32 can be only initialized in " +
"Windows environment, but current OS is " + OSUtils.getOSName());
}
try {
FD_HANDLE = requireNonNull(FileDescriptor.class.getDeclaredField("handle"));
FD_HANDLE.setAccessible(true);
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
private static final int PIPE_SIZE = 4096 + 24;
@ -46,6 +63,22 @@ public class ProcessImplForWin32 extends Process {
private static final WinNT.HANDLE JAVA_INVALID_HANDLE_VALUE = new WinNT.HANDLE(Pointer.createConstant(-1));
private static void setHandle(FileDescriptor obj, long handle) {
try {
FD_HANDLE.set(obj, handle);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private static long getHandle(FileDescriptor obj) {
try {
return (Long) FD_HANDLE.get(obj);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
/**
* Open a file for writing. If {@code append} is {@code true} then the file
* is opened for atomic append directly and a FileOutputStream constructed
@ -63,7 +96,7 @@ public class ProcessImplForWin32 extends Process {
sm.checkWrite(path);
long handle = openForAtomicAppend(path);
final FileDescriptor fd = new FileDescriptor();
fdAccess.setHandle(fd, handle);
setHandle(fd, handle);
return AccessController.doPrivileged(
new PrivilegedAction<FileOutputStream>() {
public FileOutputStream run() {
@ -102,30 +135,30 @@ public class ProcessImplForWin32 extends Process {
if (redirects[0] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[0] = -1L;
else if (redirects[0] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[0] = fdAccess.getHandle(FileDescriptor.in);
stdHandles[0] = getHandle(FileDescriptor.in);
else {
f0 = new FileInputStream(redirects[0].file());
stdHandles[0] = fdAccess.getHandle(f0.getFD());
stdHandles[0] = getHandle(f0.getFD());
}
if (redirects[1] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[1] = -1L;
else if (redirects[1] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[1] = fdAccess.getHandle(FileDescriptor.out);
stdHandles[1] = getHandle(FileDescriptor.out);
else {
f1 = newFileOutputStream(redirects[1].file(),
redirects[1].append());
stdHandles[1] = fdAccess.getHandle(f1.getFD());
stdHandles[1] = getHandle(f1.getFD());
}
if (redirects[2] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[2] = -1L;
else if (redirects[2] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[2] = fdAccess.getHandle(FileDescriptor.err);
stdHandles[2] = getHandle(FileDescriptor.err);
else {
f2 = newFileOutputStream(redirects[2].file(),
redirects[2].append());
stdHandles[2] = fdAccess.getHandle(f2.getFD());
stdHandles[2] = getHandle(f2.getFD());
}
}
@ -442,7 +475,7 @@ public class ProcessImplForWin32 extends Process {
stdin_stream = ProcessBuilderForWin32.NullOutputStream.INSTANCE;
else {
FileDescriptor stdin_fd = new FileDescriptor();
fdAccess.setHandle(stdin_fd, stdHandles[0]);
setHandle(stdin_fd, stdHandles[0]);
stdin_stream = new BufferedOutputStream(
new FileOutputStream(stdin_fd));
}
@ -451,7 +484,7 @@ public class ProcessImplForWin32 extends Process {
stdout_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE;
else {
FileDescriptor stdout_fd = new FileDescriptor();
fdAccess.setHandle(stdout_fd, stdHandles[1]);
setHandle(stdout_fd, stdHandles[1]);
stdout_stream = new BufferedInputStream(
new FileInputStream(stdout_fd));
}
@ -460,7 +493,7 @@ public class ProcessImplForWin32 extends Process {
stderr_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE;
else {
FileDescriptor stderr_fd = new FileDescriptor();
fdAccess.setHandle(stderr_fd, stdHandles[2]);
setHandle(stderr_fd, stdHandles[2]);
stderr_stream = new FileInputStream(stderr_fd);
}

2
dolphinscheduler-common/src/main/resources/common.properties

@ -91,4 +91,4 @@ yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
# If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
kerberos.expire.time=7

55
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java

@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.task;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.junit.Assert;
import org.junit.Test;
import java.util.LinkedList;
import java.util.List;
public class FlinkParametersTest {
@Test
public void getResourceFilesList() {
FlinkParameters flinkParameters = new FlinkParameters();
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertTrue(flinkParameters.getResourceFilesList().isEmpty());
ResourceInfo mainResource = new ResourceInfo();
mainResource.setRes("testFlinkMain-1.0.0-SNAPSHOT.jar");
flinkParameters.setMainJar(mainResource);
List<ResourceInfo> resourceInfos = new LinkedList<>();
ResourceInfo resourceInfo1 = new ResourceInfo();
resourceInfo1.setRes("testFlinkParameters1.jar");
resourceInfos.add(resourceInfo1);
flinkParameters.setResourceList(resourceInfos);
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertEquals(2, flinkParameters.getResourceFilesList().size());
ResourceInfo resourceInfo2 = new ResourceInfo();
resourceInfo2.setRes("testFlinkParameters2.jar");
resourceInfos.add(resourceInfo2);
flinkParameters.setResourceList(resourceInfos);
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertEquals(3, flinkParameters.getResourceFilesList().size());
}
}

17
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java

@ -30,29 +30,32 @@ public class FileUtilsTest {
@Test
public void suffix() {
Assert.assertEquals(FileUtils.suffix("ninfor.java"),"java");
Assert.assertEquals("java", FileUtils.suffix("ninfor.java"));
Assert.assertEquals("", FileUtils.suffix(null));
Assert.assertEquals("", FileUtils.suffix(""));
Assert.assertEquals("", FileUtils.suffix("ninfor-java"));
}
@Test
public void testGetDownloadFilename() {
PowerMockito.mockStatic(DateUtils.class);
PowerMockito.when(DateUtils.getCurrentTime(YYYYMMDDHHMMSS)).thenReturn("20190101101059");
Assert.assertEquals(FileUtils.getDownloadFilename("test"),
"/tmp/dolphinscheduler/download/20190101101059/test");
Assert.assertEquals("/tmp/dolphinscheduler/download/20190101101059/test",
FileUtils.getDownloadFilename("test"));
}
@Test
public void testGetUploadFilename() {
Assert.assertEquals(FileUtils.getUploadFilename("aaa","bbb"),
"/tmp/dolphinscheduler/aaa/resources/bbb");
Assert.assertEquals("/tmp/dolphinscheduler/aaa/resources/bbb",
FileUtils.getUploadFilename("aaa","bbb"));
}
@Test
public void testGetProcessExecDir() {
String dir = FileUtils.getProcessExecDir(1,2,3, 4);
Assert.assertEquals(dir, "/tmp/dolphinscheduler/exec/process/1/2/3/4");
Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3/4", dir);
dir = FileUtils.getProcessExecDir(1,2,3);
Assert.assertEquals(dir, "/tmp/dolphinscheduler/exec/process/1/2/3");
Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3", dir);
}
@Test

2
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java

@ -38,7 +38,7 @@ public class HttpUtilsTest {
String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result);
JSONObject jsonObject = JSON.parseObject(result);
Assert.assertEquals(jsonObject.getString("name"), "GitHub");
Assert.assertEquals("GitHub", jsonObject.getString("name"));
result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result);

6
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java

@ -29,13 +29,13 @@ public class IpUtilsTest {
long longNumber = IpUtils.ipToLong(ip);
long longNumber2 = IpUtils.ipToLong(ip2);
System.out.println(longNumber);
Assert.assertEquals(longNumber, 3232263681L);
Assert.assertEquals(longNumber2, 0L);
Assert.assertEquals(3232263681L, longNumber);
Assert.assertEquals(0L, longNumber2);
String ip3 = "255.255.255.255";
long longNumber3 = IpUtils.ipToLong(ip3);
System.out.println(longNumber3);
Assert.assertEquals(longNumber3, 4294967295L);
Assert.assertEquals(4294967295L, longNumber3);
}

22
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java

@ -16,10 +16,10 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.Direct;
import org.apache.dolphinscheduler.common.process.Property;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import org.junit.Assert;
@ -40,8 +40,8 @@ public class JSONUtilsTest {
String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}";
Map<String,String> models = JSONUtils.toMap(jsonStr);
Assert.assertEquals(models.get("id"), "1001");
Assert.assertEquals(models.get("name"), "Jobs");
Assert.assertEquals("1001", models.get("id"));
Assert.assertEquals("Jobs", models.get("name"));
}
@ -53,9 +53,9 @@ public class JSONUtilsTest {
property.setType(DataType.VARCHAR);
property.setValue("sssssss");
String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}";
Property property1 = JSONObject.parseObject(str, Property.class);
Property property1 = JSON.parseObject(str, Property.class);
Direct direct = property1.getDirect();
Assert.assertEquals(direct , Direct.IN);
Assert.assertEquals(Direct.IN, direct);
}
@ -66,12 +66,12 @@ public class JSONUtilsTest {
List<LinkedHashMap> maps = JSONUtils.toList(str,
LinkedHashMap.class);
Assert.assertEquals(maps.size(), 1);
Assert.assertEquals(maps.get(0).get("mysql service name"), "mysql200");
Assert.assertEquals(maps.get(0).get("mysql address"), "192.168.xx.xx");
Assert.assertEquals(maps.get(0).get("port"), "3306");
Assert.assertEquals(maps.get(0).get("no index of number"), "80");
Assert.assertEquals(maps.get(0).get("database client connections"), "190");
Assert.assertEquals(1, maps.size());
Assert.assertEquals("mysql200", maps.get(0).get("mysql service name"));
Assert.assertEquals("192.168.xx.xx", maps.get(0).get("mysql address"));
Assert.assertEquals("3306", maps.get(0).get("port"));
Assert.assertEquals("80", maps.get(0).get("no index of number"));
Assert.assertEquals("190", maps.get(0).get("database client connections"));
}
public String list2String(){

8
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import org.apache.commons.lang.time.DateUtils;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
@ -91,13 +91,13 @@ public class ParameterUtilsTest {
globalParamList.add(property);
String result2 = ParameterUtils.curingGlobalParams(null,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime);
Assert.assertEquals(result2, JSONObject.toJSONString(globalParamList));
Assert.assertEquals(result2, JSON.toJSONString(globalParamList));
String result3 = ParameterUtils.curingGlobalParams(globalParamMap,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,null);
Assert.assertEquals(result3, JSONObject.toJSONString(globalParamList));
Assert.assertEquals(result3, JSON.toJSONString(globalParamList));
String result4 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime);
Assert.assertEquals(result4, JSONObject.toJSONString(globalParamList));
Assert.assertEquals(result4, JSON.toJSONString(globalParamList));
//test var $ startsWith
globalParamMap.put("bizDate","${system.biz.date}");

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java

@ -61,7 +61,7 @@ public class MonitorDBDao {
return new PostgrePerformance().getMonitorRecord(conn);
}
}catch (Exception e) {
logger.error("SQLException " + e);
logger.error("SQLException: {}", e.getMessage(), e);
}finally {
try {
if (conn != null) {

134
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java

@ -50,25 +50,28 @@ public class TaskRecordDao {
static {
try {
conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES);
}catch (ConfigurationException e){
logger.error("load configuration exception",e);
} catch (ConfigurationException e) {
logger.error("load configuration exception", e);
System.exit(1);
}
}
/**
* get task record flag
* get task record flag
*
* @return whether startup taskrecord
*/
public static boolean getTaskRecordFlag(){
return conf.getBoolean(Constants.TASK_RECORD_FLAG);
public static boolean getTaskRecordFlag() {
return conf.getBoolean(Constants.TASK_RECORD_FLAG);
}
/**
* create connection
*
* @return connection
*/
private static Connection getConn() {
if(!getTaskRecordFlag()){
if (!getTaskRecordFlag()) {
return null;
}
String driver = "com.mysql.jdbc.Driver";
@ -90,101 +93,96 @@ public class TaskRecordDao {
/**
* generate where sql string
*
* @param filterMap filterMap
* @return sql string
*/
private static String getWhereString(Map<String, String> filterMap) {
if(filterMap.size() ==0){
if (filterMap.size() == 0) {
return "";
}
String result = " where 1=1 ";
Object taskName = filterMap.get("taskName");
if(taskName != null && StringUtils.isNotEmpty(taskName.toString())){
if (taskName != null && StringUtils.isNotEmpty(taskName.toString())) {
result += " and PROC_NAME like concat('%', '" + taskName.toString() + "', '%') ";
}
Object taskDate = filterMap.get("taskDate");
if(taskDate != null && StringUtils.isNotEmpty(taskDate.toString())){
if (taskDate != null && StringUtils.isNotEmpty(taskDate.toString())) {
result += " and PROC_DATE='" + taskDate.toString() + "'";
}
Object state = filterMap.get("state");
if(state != null && StringUtils.isNotEmpty(state.toString())){
if (state != null && StringUtils.isNotEmpty(state.toString())) {
result += " and NOTE='" + state.toString() + "'";
}
Object sourceTable = filterMap.get("sourceTable");
if(sourceTable!= null && StringUtils.isNotEmpty(sourceTable.toString())){
result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString()+ "', '%')";
if (sourceTable != null && StringUtils.isNotEmpty(sourceTable.toString())) {
result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString() + "', '%')";
}
Object targetTable = filterMap.get("targetTable");
if(sourceTable!= null && StringUtils.isNotEmpty(targetTable.toString())){
result += " and TARGET_TAB like concat('%', '"+ targetTable.toString()+"', '%') " ;
if (sourceTable != null && StringUtils.isNotEmpty(targetTable.toString())) {
result += " and TARGET_TAB like concat('%', '" + targetTable.toString() + "', '%') ";
}
Object start = filterMap.get("startTime");
if(start != null && StringUtils.isNotEmpty(start.toString())){
if (start != null && StringUtils.isNotEmpty(start.toString())) {
result += " and STARTDATE>='" + start.toString() + "'";
}
Object end = filterMap.get("endTime");
if(end != null && StringUtils.isNotEmpty(end.toString())){
result += " and ENDDATE>='" + end.toString()+ "'";
if (end != null && StringUtils.isNotEmpty(end.toString())) {
result += " and ENDDATE>='" + end.toString() + "'";
}
return result;
}
/**
* count task record
*
* @param filterMap filterMap
* @param table table
* @param table table
* @return task record count
*/
public static int countTaskRecord(Map<String, String> filterMap, String table){
public static int countTaskRecord(Map<String, String> filterMap, String table) {
int count = 0;
Connection conn = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
conn = getConn();
if(conn == null){
if (conn == null) {
return count;
}
String sql = String.format("select count(1) as count from %s", table);
sql += getWhereString(filterMap);
pstmt = conn.prepareStatement(sql);
ResultSet rs = pstmt.executeQuery();
while(rs.next()){
rs = pstmt.executeQuery();
while (rs.next()) {
count = rs.getInt("count");
break;
}
} catch (SQLException e) {
logger.error("Exception ", e);
}finally {
try {
if(pstmt != null) {
pstmt.close();
}
if(conn != null){
conn.close();
}
} catch (SQLException e) {
logger.error("Exception ", e);
}
} finally {
closeResource(rs, pstmt, conn);
}
return count;
}
/**
* query task record by filter map paging
*
* @param filterMap filterMap
* @param table table
* @param table table
* @return task record list
*/
public static List<TaskRecord> queryAllTaskRecord(Map<String,String> filterMap , String table) {
public static List<TaskRecord> queryAllTaskRecord(Map<String, String> filterMap, String table) {
String sql = String.format("select * from %s", table);
sql += getWhereString(filterMap);
@ -194,9 +192,9 @@ public class TaskRecordDao {
sql += String.format(" order by STARTDATE desc limit %d,%d", offset, pageSize);
List<TaskRecord> recordList = new ArrayList<>();
try{
try {
recordList = getQueryResult(sql);
}catch (Exception e){
} catch (Exception e) {
logger.error("Exception ", e);
}
return recordList;
@ -204,6 +202,7 @@ public class TaskRecordDao {
/**
* convert result set to task record
*
* @param resultSet resultSet
* @return task record
* @throws SQLException if error throws SQLException
@ -232,6 +231,7 @@ public class TaskRecordDao {
/**
* query task list by select sql
*
* @param selectSql select sql
* @return task record list
*/
@ -239,65 +239,81 @@ public class TaskRecordDao {
List<TaskRecord> recordList = new ArrayList<>();
Connection conn = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
conn = getConn();
if(conn == null){
if (conn == null) {
return recordList;
}
pstmt = conn.prepareStatement(selectSql);
ResultSet rs = pstmt.executeQuery();
rs = pstmt.executeQuery();
while(rs.next()){
while (rs.next()) {
TaskRecord taskRecord = convertToTaskRecord(rs);
recordList.add(taskRecord);
}
} catch (SQLException e) {
logger.error("Exception ", e);
}finally {
try {
if(pstmt != null) {
pstmt.close();
}
if(conn != null){
conn.close();
}
} catch (SQLException e) {
logger.error("Exception ", e);
}
} finally {
closeResource(rs, pstmt, conn);
}
return recordList;
}
/**
* according to procname and procdate query task record
*
* @param procName procName
* @param procDate procDate
* @return task record status
*/
public static TaskRecordStatus getTaskRecordState(String procName,String procDate){
public static TaskRecordStatus getTaskRecordState(String procName, String procDate) {
String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'"
,procName,procDate + "%");
, procName, procDate + "%");
List<TaskRecord> taskRecordList = getQueryResult(sql);
// contains no record and sql exception
if (CollectionUtils.isEmpty(taskRecordList)){
if (CollectionUtils.isEmpty(taskRecordList)) {
// exception
return TaskRecordStatus.EXCEPTION;
}else if (taskRecordList.size() > 1){
} else if (taskRecordList.size() > 1) {
return TaskRecordStatus.EXCEPTION;
}else {
} else {
TaskRecord taskRecord = taskRecordList.get(0);
if (taskRecord == null){
if (taskRecord == null) {
return TaskRecordStatus.EXCEPTION;
}
Long targetRowCount = taskRecord.getTargetRowCount();
if (targetRowCount <= 0){
if (targetRowCount <= 0) {
return TaskRecordStatus.FAILURE;
}else {
} else {
return TaskRecordStatus.SUCCESS;
}
}
}
private static void closeResource(ResultSet rs, PreparedStatement pstmt, Connection conn) {
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
logger.error("Exception ", e);
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
logger.error("Exception ", e);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
logger.error("Exception ", e);
}
}
}
}

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java

@ -39,7 +39,7 @@ public class HiveDataSource extends BaseDataSource {
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) {
if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) {
jdbcUrl += "/";
}

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java

@ -39,7 +39,7 @@ public class MySQLDataSource extends BaseDataSource {
@Override
public String getJdbcUrl() {
String address = getAddress();
if (address.lastIndexOf("/") != (address.length() - 1)) {
if (address.lastIndexOf('/') != (address.length() - 1)) {
address += "/";
}
String jdbcUrl = address + getDatabase();

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java

@ -40,7 +40,7 @@ public class PostgreDataSource extends BaseDataSource {
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) {
if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) {
jdbcUrl += "/";
}

8
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java

@ -16,10 +16,10 @@
*/
package org.apache.dolphinscheduler.dao.entity;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.process.Property;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
@ -266,7 +266,7 @@ public class ProcessDefinition {
}
public void setGlobalParams(String globalParams) {
this.globalParamList = JSONObject.parseArray(globalParams, Property.class);
this.globalParamList = JSON.parseArray(globalParams, Property.class);
this.globalParams = globalParams;
}
@ -275,7 +275,7 @@ public class ProcessDefinition {
}
public void setGlobalParamList(List<Property> globalParamList) {
this.globalParams = JSONObject.toJSONString(globalParamList);
this.globalParams = JSON.toJSONString(globalParamList);
this.globalParamList = globalParamList;
}
@ -283,7 +283,7 @@ public class ProcessDefinition {
List<Property> propList;
if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) {
propList = JSONObject.parseArray(globalParams, Property.class);
propList = JSON.parseArray(globalParams, Property.class);
globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
}

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java

@ -506,7 +506,7 @@ public class ProcessInstance {
* check this process is start complement data
* @return whether complement data
*/
public Boolean isComplementData(){
public boolean isComplementData(){
if(!StringUtils.isNotEmpty(this.historyCmd)){
return false;
}

4
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java

@ -373,7 +373,7 @@ public class TaskInstance {
}
public Boolean isSubProcess(){
public boolean isSubProcess(){
return TaskType.SUB_PROCESS.getDescp().equals(this.taskType);
}
@ -442,7 +442,7 @@ public class TaskInstance {
this.executorName = executorName;
}
public Boolean isTaskComplete() {
public boolean isTaskComplete() {
return this.getState().typeIsPause()
|| this.getState().typeIsSuccess()

23
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java

@ -29,15 +29,7 @@ import java.sql.SQLException;
*/
public class MysqlUpgradeDao extends UpgradeDao {
public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class);
/**
* init
*/
@Override
protected void init() {
}
public static final Logger logger = LoggerFactory.getLogger(MysqlUpgradeDao.class);
/**
* mysql upgrade dao holder
@ -69,12 +61,7 @@ public class MysqlUpgradeDao extends UpgradeDao {
try {
conn = dataSource.getConnection();
rs = conn.getMetaData().getTables(null, null, tableName, null);
if (rs.next()) {
return true;
} else {
return false;
}
return rs.next();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
@ -96,11 +83,7 @@ public class MysqlUpgradeDao extends UpgradeDao {
try {
conn = dataSource.getConnection();
ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName);
if (rs.next()) {
return true;
} else {
return false;
}
return rs.next();
} catch (SQLException e) {
logger.error(e.getMessage(),e);

39
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java

@ -30,16 +30,8 @@ import java.sql.SQLException;
*/
public class PostgresqlUpgradeDao extends UpgradeDao {
public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class);
private static final String schema = getSchema();
/**
* init
*/
@Override
protected void init() {
}
public static final Logger logger = LoggerFactory.getLogger(PostgresqlUpgradeDao.class);
private static final String SCHEMA = getSchema();
/**
* postgresql upgrade dao holder
@ -58,16 +50,6 @@ public class PostgresqlUpgradeDao extends UpgradeDao {
return PostgresqlUpgradeDaoHolder.INSTANCE;
}
/**
* init schema
* @param initSqlPath initSqlPath
*/
@Override
public void initSchema(String initSqlPath) {
super.initSchema(initSqlPath);
}
/**
* getSchema
* @return schema
@ -107,13 +89,9 @@ public class PostgresqlUpgradeDao extends UpgradeDao {
try {
conn = dataSource.getConnection();
rs = conn.getMetaData().getTables(null, schema, tableName, null);
if (rs.next()) {
return true;
} else {
return false;
}
rs = conn.getMetaData().getTables(null, SCHEMA, tableName, null);
return rs.next();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
@ -135,13 +113,8 @@ public class PostgresqlUpgradeDao extends UpgradeDao {
ResultSet rs = null;
try {
conn = dataSource.getConnection();
rs = conn.getMetaData().getColumns(null,schema,tableName,columnName);
if (rs.next()) {
return true;
} else {
return false;
}
rs = conn.getMetaData().getColumns(null, SCHEMA,tableName,columnName);
return rs.next();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);

3
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java

@ -27,7 +27,6 @@ import java.util.Date;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.dao.MonitorDBDao;
import org.apache.dolphinscheduler.dao.entity.MonitorRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -37,7 +36,7 @@ import org.slf4j.LoggerFactory;
*/
public class MysqlPerformance extends BaseDBPerformance{
private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class);
private static Logger logger = LoggerFactory.getLogger(MysqlPerformance.class);
/**

3
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java

@ -24,7 +24,6 @@ import java.util.Date;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.dao.MonitorDBDao;
import org.apache.dolphinscheduler.dao.entity.MonitorRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -34,7 +33,7 @@ import org.slf4j.LoggerFactory;
*/
public class PostgrePerformance extends BaseDBPerformance {
private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class);
private static Logger logger = LoggerFactory.getLogger(PostgrePerformance.class);
/**
* get monitor record

6
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java

@ -77,7 +77,7 @@ public class ProcessDefinitionMapperTest {
//update
processDefinition.setUpdateTime(new Date());
int update = processDefinitionMapper.updateById(processDefinition);
Assert.assertEquals(update, 1);
Assert.assertEquals(1, update);
processDefinitionMapper.deleteById(processDefinition.getId());
}
@ -88,7 +88,7 @@ public class ProcessDefinitionMapperTest {
public void testDelete(){
ProcessDefinition processDefinition = insertOne();
int delete = processDefinitionMapper.deleteById(processDefinition.getId());
Assert.assertEquals(delete, 1);
Assert.assertEquals(1, delete);
}
/**
@ -189,7 +189,7 @@ public class ProcessDefinitionMapperTest {
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryDefinitionListByIdList(array);
processDefinitionMapper.deleteById(processDefinition.getId());
processDefinitionMapper.deleteById(processDefinition1.getId());
Assert.assertEquals(processDefinitions.size(), 2);
Assert.assertEquals(2, processDefinitions.size());
}

53
dolphinscheduler-dist/pom.xml vendored

@ -220,10 +220,7 @@
${basedir}/../dolphinscheduler-alert/src/main/resources
</location>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>**/*.ftl</include>
<include>**/*.*</include>
</includes>
</source>
@ -232,9 +229,7 @@
${basedir}/../dolphinscheduler-common/src/main/resources
</location>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>**/*.*</include>
</includes>
</source>
@ -243,10 +238,7 @@
${basedir}/../dolphinscheduler-dao/src/main/resources
</location>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>**/*.yml</include>
<include>**/*.*</include>
</includes>
</source>
@ -255,9 +247,7 @@
${basedir}/../dolphinscheduler-api/src/main/resources
</location>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>**/*.*</include>
</includes>
</source>
@ -266,13 +256,19 @@
${basedir}/../dolphinscheduler-server/src/main/resources
</location>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.json</include>
<include>config/*.*</include>
</includes>
</source>
<source>
<location>
${basedir}/../dolphinscheduler-service/src/main/resources
</location>
<includes>
<include>*.*</include>
</includes>
</source>
<source>
<location>
${basedir}/../script
@ -342,14 +338,6 @@
</includes>
</source>
<source>
<location>
${basedir}/../dolphinscheduler-ui
</location>
<includes>
<include>install-dolphinscheduler-ui.sh</include>
</includes>
</source>
<source>
<location>
${basedir}/release-docs
@ -362,7 +350,7 @@
</sources>
</mapping>
<mapping>
<directory>/opt/soft/${project.build.finalName}/dist</directory>
<directory>/opt/soft/${project.build.finalName}/ui</directory>
<filemode>755</filemode>
<username>root</username>
<groupname>root</groupname>
@ -391,6 +379,14 @@
<include>**/*.*</include>
</includes>
</source>
<source>
<location>
${basedir}/../sql
</location>
<includes>
<include>soft_version</include>
</includes>
</source>
</sources>
</mapping>
@ -405,7 +401,7 @@
${basedir}/../script
</location>
<includes>
<include>**/*.*</include>
<include>*.sh</include>
</includes>
</source>
@ -416,6 +412,9 @@
<preinstallScriptlet>
<script>mkdir -p /opt/soft</script>
</preinstallScriptlet>
<postinstallScriptlet>
<script>rm -rf /opt/soft/dolphinscheduler ; ln -s /opt/soft/apache-dolphinscheduler-incubating-${project.version} /opt/soft/dolphinscheduler</script>
</postinstallScriptlet>
<postremoveScriptlet>
<script>rm -rf /opt/soft/apache-dolphinscheduler-incubating-${project.version}</script>
</postremoveScriptlet>

4
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java

@ -30,12 +30,12 @@ public class Ping implements Serializable {
/**
* ping body
*/
protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
protected static final ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
/**
* request command body
*/
private static byte[] EMPTY_BODY_ARRAY = new byte[0];
private static final byte[] EMPTY_BODY_ARRAY = new byte[0];
private static final ByteBuf PING_BUF;

4
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java

@ -30,12 +30,12 @@ public class Pong implements Serializable {
/**
* pong body
*/
protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
protected static final ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
/**
* pong command body
*/
private static byte[] EMPTY_BODY_ARRAY = new byte[0];
private static final byte[] EMPTY_BODY_ARRAY = new byte[0];
/**
* ping byte buffer

3
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.remote.utils;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
/**
@ -31,7 +32,7 @@ public class Constants {
/**
* charset
*/
public static final Charset UTF8 = Charset.forName("UTF-8");
public static final Charset UTF8 = StandardCharsets.UTF_8;
/**
* cpus

21
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.server.log;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.utils.IOUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.log.*;
@ -116,16 +117,8 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
}catch (IOException e){
logger.error("get file bytes error",e);
}finally {
if (bos != null){
try {
bos.close();
} catch (IOException ignore) {}
}
if (in != null){
try {
in.close();
} catch (IOException ignore) {}
}
IOUtils.closeQuietly(bos);
IOUtils.closeQuietly(in);
}
return new byte[0];
}
@ -146,7 +139,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
} catch (IOException e) {
logger.error("read file error",e);
}
return Collections.EMPTY_LIST;
return Collections.emptyList();
}
/**
@ -168,11 +161,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
}catch (IOException e){
logger.error("read file error",e);
}finally {
try {
if (br != null){
br.close();
}
} catch (IOException ignore) {}
IOUtils.closeQuietly(br);
}
return "";
}

36
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java

@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread;
import org.apache.dolphinscheduler.server.worker.WorkerServer;
import org.apache.dolphinscheduler.server.zk.ZKMasterClient;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
@ -37,8 +38,10 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
@ -46,7 +49,9 @@ import java.util.concurrent.TimeUnit;
/**
* master server
*/
@ComponentScan("org.apache.dolphinscheduler")
@ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = {
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = {WorkerServer.class})
})
public class MasterServer implements IStoppable {
/**
@ -112,7 +117,7 @@ public class MasterServer implements IStoppable {
masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread");
heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.DEFAULT_MASTER_HEARTBEAT_THREAD_NUM);
heartbeatMasterService = ThreadUtils.newThreadScheduledExecutor("Master-Main-Thread",Constants.DEFAULT_MASTER_HEARTBEAT_THREAD_NUM, false);
// heartbeat thread implement
Runnable heartBeatThread = heartBeatThread();
@ -147,23 +152,17 @@ public class MasterServer implements IStoppable {
}
logger.error("start Quartz failed", e);
}
/**
* register hooks, which are called before the process exits
*/
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
if (zkMasterClient.getActiveMasterNum() <= 1) {
zkMasterClient.getAlertDao().sendServerStopedAlert(
1, OSUtils.getHost(), "Master-Server");
}
stop("shutdownhook");
}
}));
}
@PreDestroy
public void destroy() {
// master server exit alert
if (zkMasterClient.getActiveMasterNum() <= 1) {
zkMasterClient.getAlertDao().sendServerStopedAlert(
1, OSUtils.getHost(), "Master-Server");
}
stop("shutdownhook");
}
/**
* gracefully stop
@ -244,7 +243,7 @@ public class MasterServer implements IStoppable {
*/
private Runnable heartBeatThread(){
logger.info("start master heart beat thread...");
Runnable heartBeatThread = new Runnable() {
return new Runnable() {
@Override
public void run() {
if(Stopper.isRunning()) {
@ -258,7 +257,6 @@ public class MasterServer implements IStoppable {
}
}
};
return heartBeatThread;
}
}

67
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.common.Constants;
@ -68,7 +68,7 @@ public class MasterExecThread implements Runnable {
/**
* runing TaskNode
*/
private final Map<MasterBaseTaskExecThread,Future<Boolean>> activeTaskNode = new ConcurrentHashMap<MasterBaseTaskExecThread,Future<Boolean>>();
private final Map<MasterBaseTaskExecThread,Future<Boolean>> activeTaskNode = new ConcurrentHashMap<>();
/**
* task exec service
@ -78,7 +78,7 @@ public class MasterExecThread implements Runnable {
/**
* submit failure nodes
*/
private Boolean taskFailedSubmit = false;
private boolean taskFailedSubmit = false;
/**
* recover node id list
@ -454,7 +454,7 @@ public class MasterExecThread implements Runnable {
// process instance id
taskInstance.setProcessInstanceId(processInstance.getId());
// task instance node json
taskInstance.setTaskJson(JSONObject.toJSONString(taskNode));
taskInstance.setTaskJson(JSON.toJSONString(taskNode));
// task instance type
taskInstance.setTaskType(taskNode.getType());
// task instance whether alert
@ -652,7 +652,7 @@ public class MasterExecThread implements Runnable {
continue;
}
if(task.getState().typeIsPause() || task.getState().typeIsCancel()){
logger.info("task {} stopped, the state is {}", task.getName(), task.getState().toString());
logger.info("task {} stopped, the state is {}", task.getName(), task.getState());
}else{
addTaskToStandByList(task);
}
@ -685,11 +685,12 @@ public class MasterExecThread implements Runnable {
}
ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState();
// conditions task would not return failed.
if(depTaskState.typeIsFailure()){
if(!haveConditionsAfterNode(depsNode) && !dag.getNode(depsNode).isConditionsTask()){
return DependResult.FAILED;
}
if(depTaskState.typeIsFailure()
&& !haveConditionsAfterNode(depsNode)
&& !dag.getNode(depsNode).isConditionsTask()){
return DependResult.FAILED;
}
if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){
return DependResult.WAITING;
}
@ -737,7 +738,7 @@ public class MasterExecThread implements Runnable {
*
* @return Boolean whether has failed task
*/
private Boolean hasFailedTask(){
private boolean hasFailedTask(){
if(this.taskFailedSubmit){
return true;
@ -753,7 +754,7 @@ public class MasterExecThread implements Runnable {
*
* @return Boolean whether process instance failed
*/
private Boolean processFailed(){
private boolean processFailed(){
if(hasFailedTask()) {
if(processInstance.getFailureStrategy() == FailureStrategy.END){
return true;
@ -769,9 +770,9 @@ public class MasterExecThread implements Runnable {
* whether task for waiting thread
* @return Boolean whether has waiting thread task
*/
private Boolean hasWaitingThreadTask(){
private boolean hasWaitingThreadTask(){
List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD);
return waitingList.size() > 0;
return CollectionUtils.isNotEmpty(waitingList);
}
/**
@ -787,7 +788,7 @@ public class MasterExecThread implements Runnable {
}
List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE);
if(pauseList.size() > 0
if(CollectionUtils.isNotEmpty(pauseList)
|| !isComplementEnd()
|| readyToSubmitTaskList.size() > 0){
return ExecutionStatus.PAUSE;
@ -827,7 +828,8 @@ public class MasterExecThread implements Runnable {
if(state == ExecutionStatus.READY_STOP){
List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP);
List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL);
if(stopList.size() > 0 || killList.size() > 0 || !isComplementEnd()){
if(CollectionUtils.isNotEmpty(stopList)
|| CollectionUtils.isNotEmpty(killList) || !isComplementEnd()){
return ExecutionStatus.STOP;
}else{
return ExecutionStatus.SUCCESS;
@ -852,7 +854,7 @@ public class MasterExecThread implements Runnable {
* whether complement end
* @return Boolean whether is complement end
*/
private Boolean isComplementEnd() {
private boolean isComplementEnd() {
if(!processInstance.isComplementData()){
return true;
}
@ -877,8 +879,8 @@ public class MasterExecThread implements Runnable {
logger.info(
"work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}",
processInstance.getId(), processInstance.getName(),
processInstance.getState().toString(), state.toString(),
processInstance.getCommandType().toString());
processInstance.getState(), state,
processInstance.getCommandType());
processInstance.setState(state);
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
instance.setState(state);
@ -894,8 +896,7 @@ public class MasterExecThread implements Runnable {
* @return DependResult
*/
private DependResult getDependResultForTask(TaskInstance taskInstance){
DependResult inner = isTaskDepsComplete(taskInstance.getName());
return inner;
return isTaskDepsComplete(taskInstance.getName());
}
/**
@ -920,7 +921,7 @@ public class MasterExecThread implements Runnable {
* has retry task in standby
* @return Boolean whether has retry task in standby
*/
private Boolean hasRetryTaskInStandBy(){
private boolean hasRetryTaskInStandBy(){
for (Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) {
if(entry.getValue().getState().typeIsFailure()){
return true;
@ -958,7 +959,7 @@ public class MasterExecThread implements Runnable {
continue;
}
logger.info("task :{}, id:{} complete, state is {} ",
task.getName(), task.getId(), task.getState().toString());
task.getName(), task.getId(), task.getState());
// node success , post node submit
if(task.getState() == ExecutionStatus.SUCCESS){
completeTaskList.put(task.getName(), task);
@ -990,7 +991,7 @@ public class MasterExecThread implements Runnable {
completeTaskList.put(task.getName(), task);
}
// send alert
if(this.recoverToleranceFaultTaskList.size() > 0){
if(CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)){
alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList);
this.recoverToleranceFaultTaskList.clear();
}
@ -1034,10 +1035,7 @@ public class MasterExecThread implements Runnable {
Date now = new Date();
long runningTime = DateUtils.diffMin(now, processInstance.getStartTime());
if(runningTime > processInstance.getTimeout()){
return true;
}
return false;
return runningTime > processInstance.getTimeout();
}
/**
@ -1062,7 +1060,7 @@ public class MasterExecThread implements Runnable {
TaskInstance taskInstance = taskExecThread.getTaskInstance();
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
if(taskInstance.getState().typeIsFinished()){
if(taskInstance != null && taskInstance.getState().typeIsFinished()){
continue;
}
@ -1081,22 +1079,19 @@ public class MasterExecThread implements Runnable {
* @param taskInstance task instance
* @return Boolean
*/
private Boolean retryTaskIntervalOverTime(TaskInstance taskInstance){
private boolean retryTaskIntervalOverTime(TaskInstance taskInstance){
if(taskInstance.getState() != ExecutionStatus.FAILURE){
return Boolean.TRUE;
return true;
}
if(taskInstance.getId() == 0 ||
taskInstance.getMaxRetryTimes() ==0 ||
taskInstance.getRetryInterval() == 0 ){
return Boolean.TRUE;
return true;
}
Date now = new Date();
long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime());
// task retry does not over time, return false
if(taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT >= failedTimeInterval){
return Boolean.FALSE;
}
return Boolean.TRUE;
return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval;
}
/**
@ -1189,7 +1184,7 @@ public class MasterExecThread implements Runnable {
*/
private List<String> getRecoveryNodeNameList(){
List<String> recoveryNodeNameList = new ArrayList<>();
if(recoverNodeIdList.size() > 0) {
if(CollectionUtils.isNotEmpty(recoverNodeIdList)) {
for (TaskInstance task : recoverNodeIdList) {
recoveryNodeNameList.add(task.getName());
}

11
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
@ -25,7 +26,6 @@ import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import com.alibaba.fastjson.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -64,7 +64,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
/**
* whether already Killed,default false
*/
private Boolean alreadyKilled = false;
private boolean alreadyKilled = false;
/**
* submit task instance and wait complete
@ -98,7 +98,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
logger.info("wait task: process id: {}, task id:{}, task name:{} complete",
this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName());
// task time out
Boolean checkTimeout = false;
boolean checkTimeout = false;
TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter();
if(taskTimeoutParameter.getEnable()){
TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy();
@ -176,7 +176,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
*/
private TaskTimeoutParameter getTaskTimeoutParameter(){
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class);
TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
return taskNode.getTaskTimeoutParameter();
}
@ -189,7 +189,6 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
private long getRemaintime(long timeoutSeconds) {
Date startTime = taskInstance.getStartTime();
long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000;
long remainTime = timeoutSeconds - usedTime;
return remainTime;
return timeoutSeconds - usedTime;
}
}

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java

@ -30,7 +30,7 @@ import org.springframework.context.annotation.ComponentScan;
@ComponentScan("org.apache.dolphinscheduler")
public class MonitorServer implements CommandLineRunner {
private static Integer ARGS_LENGTH = 4;
private static final Integer ARGS_LENGTH = 4;
private static final Logger logger = LoggerFactory.getLogger(MonitorServer.class);

16
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java

@ -17,12 +17,11 @@
package org.apache.dolphinscheduler.server.utils;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.apache.commons.lang.StringUtils;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
@ -32,12 +31,7 @@ import java.util.List;
* spark args utils
*/
public class FlinkArgsUtils {
/**
* logger of FlinkArgsUtils
*/
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(FlinkArgsUtils.class);
private static final String LOCAL_DEPLOY_MODE = "local";
/**
* build args
* @param param flink parameters
@ -52,7 +46,7 @@ public class FlinkArgsUtils {
deployMode = tmpDeployMode;
}
if (!"local".equals(deployMode)) {
if (!LOCAL_DEPLOY_MODE.equals(deployMode)) {
args.add(Constants.FLINK_RUN_MODE); //-m
args.add(Constants.FLINK_YARN_CLUSTER); //yarn-cluster
@ -113,12 +107,12 @@ public class FlinkArgsUtils {
String queue = param.getQueue();
if (StringUtils.isNotEmpty(others)) {
if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals("local")) {
if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) {
args.add(Constants.FLINK_QUEUE);
args.add(param.getQueue());
}
args.add(others);
} else if (StringUtils.isNotEmpty(queue) && !deployMode.equals("local")) {
} else if (StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) {
args.add(Constants.FLINK_QUEUE);
args.add(param.getQueue());
}

22
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.utils;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import java.util.Date;
@ -70,17 +71,16 @@ public class ParamUtils {
Map.Entry<String, Property> en = iter.next();
Property property = en.getValue();
if (property.getValue() != null && property.getValue().length() > 0){
if (property.getValue().startsWith("$")){
/**
* local parameter refers to global parameter with the same name
* note: the global parameters of the process instance here are solidified parameters,
* and there are no variables in them.
*/
String val = property.getValue();
val = ParameterUtils.convertParameterPlaceholders(val, timeParams);
property.setValue(val);
}
if (StringUtils.isNotEmpty(property.getValue())
&& property.getValue().startsWith("$")){
/**
* local parameter refers to global parameter with the same name
* note: the global parameters of the process instance here are solidified parameters,
* and there are no variables in them.
*/
String val = property.getValue();
val = ParameterUtils.convertParameterPlaceholders(val, timeParams);
property.setValue(val);
}
}

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.server.utils;
import java.nio.charset.StandardCharsets;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
@ -29,7 +30,6 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
@ -297,7 +297,7 @@ public class ProcessUtils {
File f = new File(commandFile);
if (!f.exists()) {
FileUtils.writeStringToFile(new File(commandFile), sb.toString(), Charset.forName("UTF-8"));
FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8);
}
String runCmd = "sh " + commandFile;

45
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java

@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.master.MasterServer;
import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.runner.FetchTaskThread;
@ -43,10 +44,13 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
@ -56,7 +60,10 @@ import java.util.concurrent.TimeUnit;
/**
* worker server
*/
@ComponentScan("org.apache.dolphinscheduler")
@SpringBootApplication
@ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = {
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = {MasterServer.class})
})
public class WorkerServer implements IStoppable {
/**
@ -104,11 +111,6 @@ public class WorkerServer implements IStoppable {
*/
private ExecutorService fetchTaskExecutorService;
/**
* CountDownLatch latch
*/
private CountDownLatch latch;
@Value("${server.is-combined-server:false}")
private Boolean isCombinedServer;
@ -149,7 +151,7 @@ public class WorkerServer implements IStoppable {
this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor");
heartbeatWorkerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.DEFAUL_WORKER_HEARTBEAT_THREAD_NUM);
heartbeatWorkerService = ThreadUtils.newThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.DEFAUL_WORKER_HEARTBEAT_THREAD_NUM, false);
// heartbeat thread implement
Runnable heartBeatThread = heartBeatThread();
@ -171,29 +173,15 @@ public class WorkerServer implements IStoppable {
// submit fetch task thread
fetchTaskExecutorService.execute(fetchTaskThread);
}
/**
* register hooks, which are called before the process exits
*/
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
// worker server exit alert
if (zkWorkerClient.getActiveMasterNum() <= 1) {
alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server");
}
stop("shutdownhook");
}
}));
//let the main thread await
latch = new CountDownLatch(1);
if (!isCombinedServer) {
try {
latch.await();
} catch (InterruptedException ignore) {
}
@PreDestroy
public void destroy() {
// worker server exit alert
if (zkWorkerClient.getActiveMasterNum() <= 1) {
alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server");
}
stop("shutdownhook");
}
@Override
@ -251,7 +239,6 @@ public class WorkerServer implements IStoppable {
}catch (Exception e){
logger.warn("zookeeper service stopped exception:{}",e.getMessage());
}
latch.countDown();
logger.info("zookeeper service stopped");
} catch (Exception e) {

10
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java

@ -19,7 +19,7 @@ package org.apache.dolphinscheduler.server.worker.runner;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
@ -93,7 +93,7 @@ public class TaskScheduleThread implements Runnable {
logger.info("script path : {}", taskInstance.getExecutePath());
// task node
TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class);
TaskNode taskNode = JSON.parseObject(taskInstance.getTaskJson(), TaskNode.class);
// get resource files
List<String> resourceFiles = createProjectResFiles(taskNode);
@ -176,7 +176,7 @@ public class TaskScheduleThread implements Runnable {
String globalParamsStr = taskInstance.getProcessInstance().getGlobalParams();
if (globalParamsStr != null) {
List<Property> globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class);
List<Property> globalParamsList = JSON.parseArray(globalParamsStr, Property.class);
globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)));
}
return globalParamsMap;
@ -296,9 +296,7 @@ public class TaskScheduleThread implements Runnable {
if (baseParam != null) {
List<String> projectResourceFiles = baseParam.getResourceFilesList();
if (projectResourceFiles != null) {
projectFiles.addAll(projectResourceFiles);
}
projectFiles.addAll(projectResourceFiles);
}
return new ArrayList<>(projectFiles);

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.server.worker.task.datax;
import java.io.File;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
@ -209,7 +209,7 @@ public class DataxTask extends AbstractTask {
logger.debug("datax job json : {}", root.toString());
// create datax json file
FileUtils.writeStringToFile(new File(fileName), root.toString(), Charset.forName("UTF-8"));
FileUtils.writeStringToFile(new File(fileName), root.toString(), StandardCharsets.UTF_8);
return fileName;
}

29
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.server.worker.task.http;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.Charsets;
import org.apache.dolphinscheduler.common.Constants;
@ -26,6 +27,7 @@ import org.apache.dolphinscheduler.common.process.HttpProperty;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.http.HttpParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
@ -51,6 +53,7 @@ import org.slf4j.Logger;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -98,7 +101,7 @@ public class HttpTask extends AbstractTask {
@Override
public void init() {
logger.info("http task params {}", taskProps.getTaskParams());
this.httpParameters = JSONObject.parseObject(taskProps.getTaskParams(), HttpParameters.class);
this.httpParameters = JSON.parseObject(taskProps.getTaskParams(), HttpParameters.class);
if (!httpParameters.checkParameters()) {
throw new RuntimeException("http task params is not valid");
@ -146,12 +149,12 @@ public class HttpTask extends AbstractTask {
processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
List<HttpProperty> httpPropertyList = new ArrayList<>();
if(httpParameters.getHttpParams() != null && httpParameters.getHttpParams().size() > 0){
if(CollectionUtils.isNotEmpty(httpParameters.getHttpParams() )){
for (HttpProperty httpProperty: httpParameters.getHttpParams()) {
String jsonObject = JSONObject.toJSONString(httpProperty);
String jsonObject = JSON.toJSONString(httpProperty);
String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap));
logger.info("http request params:{}",params);
httpPropertyList.add(JSONObject.parseObject(params,HttpProperty.class));
httpPropertyList.add(JSON.parseObject(params,HttpProperty.class));
}
}
addRequestParams(builder,httpPropertyList);
@ -176,8 +179,7 @@ public class HttpTask extends AbstractTask {
if (entity == null) {
return null;
}
String webPage = EntityUtils.toString(entity, StandardCharsets.UTF_8.name());
return webPage;
return EntityUtils.toString(entity, StandardCharsets.UTF_8.name());
}
/**
@ -186,8 +188,7 @@ public class HttpTask extends AbstractTask {
* @return status code
*/
protected int getStatusCode(CloseableHttpResponse httpResponse) {
int status = httpResponse.getStatusLine().getStatusCode();
return status;
return httpResponse.getStatusLine().getStatusCode();
}
/**
@ -252,7 +253,7 @@ public class HttpTask extends AbstractTask {
* @param httpPropertyList http property list
*/
protected void addRequestParams(RequestBuilder builder,List<HttpProperty> httpPropertyList) {
if(httpPropertyList != null && httpPropertyList.size() > 0){
if(CollectionUtils.isNotEmpty(httpPropertyList)){
JSONObject jsonParam = new JSONObject();
for (HttpProperty property: httpPropertyList){
if(property.getHttpParametersType() != null){
@ -276,12 +277,10 @@ public class HttpTask extends AbstractTask {
* @param httpPropertyList http property list
*/
protected void setHeaders(HttpUriRequest request,List<HttpProperty> httpPropertyList) {
if(httpPropertyList != null && httpPropertyList.size() > 0){
for (HttpProperty property: httpPropertyList){
if(property.getHttpParametersType() != null) {
if (property.getHttpParametersType().equals(HttpParametersType.HEADERS)) {
request.addHeader(property.getProp(), property.getValue());
}
if(CollectionUtils.isNotEmpty(httpPropertyList)){
for (HttpProperty property: httpPropertyList) {
if (HttpParametersType.HEADERS.equals(property.getHttpParametersType())) {
request.addHeader(property.getProp(), property.getValue());
}
}
}

17
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java

@ -122,22 +122,19 @@ public class MapReduceTask extends AbstractYarnTask {
}
// main class
if(mapreduceParameters.getProgramType() !=null ){
if(mapreduceParameters.getProgramType()!= ProgramType.PYTHON){
if(StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){
result.add(mapreduceParameters.getMainClass());
}
}
if(!ProgramType.PYTHON.equals(mapreduceParameters.getProgramType())
&& StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){
result.add(mapreduceParameters.getMainClass());
}
// others
if (StringUtils.isNotEmpty(mapreduceParameters.getOthers())) {
String others = mapreduceParameters.getOthers();
if(!others.contains(Constants.MR_QUEUE)){
if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) {
result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue()));
}
if (!others.contains(Constants.MR_QUEUE)
&& StringUtils.isNotEmpty(mapreduceParameters.getQueue())) {
result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue()));
}
result.add(mapreduceParameters.getOthers());
}else if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) {
result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue()));

31
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.worker.task.processdure;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import com.cronutils.utils.StringUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DataType;
@ -60,11 +60,6 @@ public class ProcedureTask extends AbstractTask {
*/
private ProcessService processService;
/**
* base datasource
*/
private BaseDataSource baseDataSource;
/**
* constructor
* @param taskProps task props
@ -75,7 +70,7 @@ public class ProcedureTask extends AbstractTask {
logger.info("procedure task params {}", taskProps.getTaskParams());
this.procedureParameters = JSONObject.parseObject(taskProps.getTaskParams(), ProcedureParameters.class);
this.procedureParameters = JSON.parseObject(taskProps.getTaskParams(), ProcedureParameters.class);
// check parameters
if (!procedureParameters.checkParameters()) {
@ -117,7 +112,7 @@ public class ProcedureTask extends AbstractTask {
// load class
DataSourceFactory.loadClass(dataSource.getType());
// get datasource
baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(),
BaseDataSource baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(),
dataSource.getConnectionParams());
// get jdbc connection
@ -163,7 +158,7 @@ public class ProcedureTask extends AbstractTask {
stmt.setQueryTimeout(taskProps.getTaskTimeout());
}
Map<Integer,Property> outParameterMap = new HashMap<>();
if (userDefParamsList != null && userDefParamsList.size() > 0){
if (CollectionUtils.isNotEmpty(userDefParamsList)){
int index = 1;
for (Property property : userDefParamsList){
logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}"
@ -237,31 +232,31 @@ public class ProcedureTask extends AbstractTask {
private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException {
switch (dataType){
case VARCHAR:
logger.info("out prameter key : {} , value : {}",prop,stmt.getString(index));
logger.info("out prameter varchar key : {} , value : {}",prop,stmt.getString(index));
break;
case INTEGER:
logger.info("out prameter key : {} , value : {}", prop, stmt.getInt(index));
logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index));
break;
case LONG:
logger.info("out prameter key : {} , value : {}",prop,stmt.getLong(index));
logger.info("out prameter long key : {} , value : {}",prop,stmt.getLong(index));
break;
case FLOAT:
logger.info("out prameter key : {} , value : {}",prop,stmt.getFloat(index));
logger.info("out prameter float key : {} , value : {}",prop,stmt.getFloat(index));
break;
case DOUBLE:
logger.info("out prameter key : {} , value : {}",prop,stmt.getDouble(index));
logger.info("out prameter double key : {} , value : {}",prop,stmt.getDouble(index));
break;
case DATE:
logger.info("out prameter key : {} , value : {}",prop,stmt.getDate(index));
logger.info("out prameter date key : {} , value : {}",prop,stmt.getDate(index));
break;
case TIME:
logger.info("out prameter key : {} , value : {}",prop,stmt.getTime(index));
logger.info("out prameter time key : {} , value : {}",prop,stmt.getTime(index));
break;
case TIMESTAMP:
logger.info("out prameter key : {} , value : {}",prop,stmt.getTimestamp(index));
logger.info("out prameter timestamp key : {} , value : {}",prop,stmt.getTimestamp(index));
break;
case BOOLEAN:
logger.info("out prameter key : {} , value : {}",prop, stmt.getBoolean(index));
logger.info("out prameter boolean key : {} , value : {}",prop, stmt.getBoolean(index));
break;
default:
break;

9
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sql;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
@ -92,7 +93,7 @@ public class SqlTask extends AbstractTask {
super(taskProps, logger);
logger.info("sql task params {}", taskProps.getTaskParams());
this.sqlParameters = JSONObject.parseObject(taskProps.getTaskParams(), SqlParameters.class);
this.sqlParameters = JSON.parseObject(taskProps.getTaskParams(), SqlParameters.class);
if (!sqlParameters.checkParameters()) {
throw new RuntimeException("sql task params is not valid");
@ -308,16 +309,16 @@ public class SqlTask extends AbstractTask {
}
resultJSONArray.add(mapOfColValues);
}
logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
logger.debug("execute sql : {}", JSON.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
// if there is a result set
if ( !resultJSONArray.isEmpty() ) {
if (StringUtils.isNotEmpty(sqlParameters.getTitle())) {
sendAttachment(sqlParameters.getTitle(),
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
JSON.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
}else{
sendAttachment(taskProps.getNodeName() + " query resultsets ",
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
JSON.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
}
}

19
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java

@ -69,17 +69,16 @@ public class MysqlSourceGenerator implements ISourceGenerator {
result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns());
}
}else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()){
if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){
String srcQuery = sourceMysqlParameter.getSrcQuerySql();
if(srcQuery.toLowerCase().contains("where")){
srcQuery += " AND "+"$CONDITIONS";
}else{
srcQuery += " WHERE $CONDITIONS";
}
result.append(" --query \'"+srcQuery+"\'");
}else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){
String srcQuery = sourceMysqlParameter.getSrcQuerySql();
if(srcQuery.toLowerCase().contains("where")){
srcQuery += " AND "+"$CONDITIONS";
}else{
srcQuery += " WHERE $CONDITIONS";
}
result.append(" --query \'"+srcQuery+"\'");
}
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();

11
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java

@ -75,12 +75,11 @@ public class MysqlTargetGenerator implements ITargetGenerator {
result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'");
}
if(targetMysqlParameter.isUpdate()){
if(StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())&&
StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){
result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey())
.append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode());
}
if(targetMysqlParameter.isUpdate()
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){
result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey())
.append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode());
}
}
}

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.master;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.utils.DateUtils;
@ -85,7 +85,7 @@ public class MasterExecThreadTest {
Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, "2020-01-01 00:00:00");
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, "2020-01-31 23:00:00");
Mockito.when(processInstance.getCommandParam()).thenReturn(JSONObject.toJSONString(cmdParam));
Mockito.when(processInstance.getCommandParam()).thenReturn(JSON.toJSONString(cmdParam));
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setGlobalParamMap(Collections.EMPTY_MAP);
processDefinition.setGlobalParamList(Collections.EMPTY_LIST);

24
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java

@ -87,35 +87,35 @@ public class FlinkArgsUtilsTest {
}
//Expected values and order
assertEquals(result.size(),20);
assertEquals(20, result.size());
assertEquals(result.get(0),"-m");
assertEquals(result.get(1),"yarn-cluster");
assertEquals("-m", result.get(0));
assertEquals("yarn-cluster", result.get(1));
assertEquals(result.get(2),"-ys");
assertEquals("-ys", result.get(2));
assertSame(Integer.valueOf(result.get(3)),slot);
assertEquals(result.get(4),"-ynm");
assertEquals("-ynm",result.get(4));
assertEquals(result.get(5),appName);
assertEquals(result.get(6),"-yn");
assertEquals("-yn", result.get(6));
assertSame(Integer.valueOf(result.get(7)),taskManager);
assertEquals(result.get(8),"-yjm");
assertEquals("-yjm", result.get(8));
assertEquals(result.get(9),jobManagerMemory);
assertEquals(result.get(10),"-ytm");
assertEquals("-ytm", result.get(10));
assertEquals(result.get(11),taskManagerMemory);
assertEquals(result.get(12),"-d");
assertEquals("-d", result.get(12));
assertEquals(result.get(13),"-c");
assertEquals("-c", result.get(13));
assertEquals(result.get(14),mainClass);
assertEquals(result.get(15),mainJar.getRes());
assertEquals(result.get(16),mainArgs);
assertEquals(result.get(17),"--qu");
assertEquals("--qu", result.get(17));
assertEquals(result.get(18),queue);
assertEquals(result.get(19),others);
@ -125,7 +125,7 @@ public class FlinkArgsUtilsTest {
param1.setQueue(queue);
param1.setDeployMode(mode);
result = FlinkArgsUtils.buildArgs(param1);
assertEquals(result.size(),5);
assertEquals(5, result.size());
}
}

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.worker.shell;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.TaskNode;
@ -68,7 +68,7 @@ public class ShellCommandExecutorTest {
TaskInstance taskInstance = processService.findTaskInstanceById(7657);
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class);
TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
taskProps.setTaskParams(taskNode.getParams());

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.worker.sql;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
@ -112,7 +112,7 @@ public class SqlExecutorTest {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class);
TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
taskProps.setTaskParams(taskNode.getParams());

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java

@ -27,6 +27,7 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -172,7 +173,7 @@ public class ShellTaskTest {
@Test
public void testHandleForWindows() throws Exception {
try {
PowerMockito.when(OSUtils.isWindows()).thenReturn(true);
Assume.assumeTrue(OSUtils.isWindows());
shellTask.handle();
Assert.assertTrue(true);
} catch (Error | Exception e) {

10
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sqoop;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.dao.entity.DataSource;
@ -74,7 +74,7 @@ public class SqoopTaskTest {
@Test
public void testGenerator(){
String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters1 = JSONObject.parseObject(data1,SqoopParameters.class);
SqoopParameters sqoopParameters1 = JSON.parseObject(data1,SqoopParameters.class);
SqoopJobGenerator generator = new SqoopJobGenerator();
String script = generator.generateSqoopJob(sqoopParameters1);
@ -82,21 +82,21 @@ public class SqoopTaskTest {
Assert.assertEquals(expected, script);
String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters2 = JSONObject.parseObject(data2,SqoopParameters.class);
SqoopParameters sqoopParameters2 = JSON.parseObject(data2,SqoopParameters.class);
String script2 = generator.generateSqoopJob(sqoopParameters2);
String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
Assert.assertEquals(expected2, script2);
String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters3 = JSONObject.parseObject(data3,SqoopParameters.class);
SqoopParameters sqoopParameters3 = JSON.parseObject(data3,SqoopParameters.class);
String script3 = generator.generateSqoopJob(sqoopParameters3);
String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'";
Assert.assertEquals(expected3, script3);
String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters4 = JSONObject.parseObject(data4,SqoopParameters.class);
SqoopParameters sqoopParameters4 = JSON.parseObject(data4,SqoopParameters.class);
String script4 = generator.generateSqoopJob(sqoopParameters4);
String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";

8
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java

@ -40,7 +40,7 @@ public class LogClientService {
/**
* request time out
*/
private final long logRequestTimeout = 10 * 1000;
private static final long LOG_REQUEST_TIMEOUT = 10 * 1000L;
/**
* construct client
@ -75,7 +75,7 @@ public class LogClientService {
final Address address = new Address(host, port);
try {
Command command = request.convert2Command();
Command response = this.client.sendSync(address, command, logRequestTimeout);
Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT);
if(response != null){
RollViewLogResponseCommand rollReviewLog = FastJsonSerializer.deserialize(
response.getBody(), RollViewLogResponseCommand.class);
@ -103,7 +103,7 @@ public class LogClientService {
final Address address = new Address(host, port);
try {
Command command = request.convert2Command();
Command response = this.client.sendSync(address, command, logRequestTimeout);
Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT);
if(response != null){
ViewLogResponseCommand viewLog = FastJsonSerializer.deserialize(
response.getBody(), ViewLogResponseCommand.class);
@ -131,7 +131,7 @@ public class LogClientService {
final Address address = new Address(host, port);
try {
Command command = request.convert2Command();
Command response = this.client.sendSync(address, command, logRequestTimeout);
Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT);
if(response != null){
GetLogBytesResponseCommand getLog = FastJsonSerializer.deserialize(
response.getBody(), GetLogBytesResponseCommand.class);

2
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java

@ -147,7 +147,7 @@ public class PermissionCheck<T> {
List<T> unauthorizedList = processService.listUnauthorized(userId,needChecks,authorizationType);
// if exist unauthorized resource
if(CollectionUtils.isNotEmpty(unauthorizedList)){
logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList.toString());
logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList);
throw new RuntimeException(String.format("user %s didn't has permission of %s %s", user.getUserName(), authorizationType.getDescp(), unauthorizedList.get(0)));
}
}

7
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.service.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.cronutils.model.Cron;
import org.apache.commons.lang.ArrayUtils;
@ -207,7 +208,7 @@ public class ProcessService {
CommandType commandType = command.getCommandType();
if(cmdTypeMap.containsKey(commandType)){
JSONObject cmdParamObj = (JSONObject) JSONObject.parse(command.getCommandParam());
JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam());
JSONObject tempObj;
int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING);
@ -215,7 +216,7 @@ public class ProcessService {
// for all commands
for (Command tmpCommand:commands){
if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){
tempObj = (JSONObject) JSONObject.parse(tmpCommand.getCommandParam());
tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam());
if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){
isNeedCreate = false;
break;
@ -309,7 +310,7 @@ public class ProcessService {
for (TaskNode taskNode : taskNodeList){
String parameter = taskNode.getParams();
if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){
SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class);
SubProcessParameters subProcessParam = JSON.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids);
}

55
dolphinscheduler-ui/pom.xml

@ -89,6 +89,61 @@
</build>
</profile>
<profile>
<id>rpmbuild</id>
<build>
<plugins>
<plugin>
<groupId>com.github.eirslett</groupId>
<artifactId>frontend-maven-plugin</artifactId>
<version>${frontend-maven-plugin.version}</version>
<executions>
<execution>
<id>install node and npm</id>
<goals>
<goal>install-node-and-npm</goal>
</goals>
<configuration>
<nodeVersion>${node.version}</nodeVersion>
<npmVersion>${npm.version}</npmVersion>
</configuration>
</execution>
<execution>
<id>npm install node-sass --unsafe-perm</id>
<goals>
<goal>npm</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<arguments>install node-sass --unsafe-perm</arguments>
</configuration>
</execution>
<execution>
<id>npm install</id>
<goals>
<goal>npm</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<arguments>install</arguments>
</configuration>
</execution>
<execution>
<id>npm run build:release</id>
<goals>
<goal>npm</goal>
</goals>
<configuration>
<arguments>run build:release</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>nginx</id>
<build>

6
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js

@ -26,7 +26,7 @@ import Permissions from '@/module/permissions'
* @desc tooltip
*/
const toolOper = (dagThis) => {
let disabled =!dagThis.$store.state.dag.isDetails// Permissions.getAuth() === false ? false : !dagThis.$store.state.dag.isDetails
let disabled =!!dagThis.$store.state.dag.isDetails// Permissions.getAuth() === false ? false : !dagThis.$store.state.dag.isDetails
return [
{
code: 'pointer',
@ -49,13 +49,13 @@ const toolOper = (dagThis) => {
{
code: 'download',
icon: 'ans-icon-download',
disable: !!dagThis.type,
disable: !dagThis.type,
desc: `${i18n.$t('Download')}`
},
{
code: 'screen',
icon: 'ans-icon-max',
disable: disabled,
disable: false,
desc: `${i18n.$t('Full Screen')}`
}
]

4
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue

@ -25,7 +25,7 @@
:key="v"
v-for="(item,v) in tasksTypeList"
@mousedown="_getDagId(v)">
<div data-toggle="tooltip" :title="item.description">
<div data-toggle="tooltip" :title="item.desc">
<div class="icos" :class="'icos-' + v" ></div>
</div>
</div>
@ -293,7 +293,7 @@
let is = true
let code = ''
if (!item.disable) {
if (item.disable) {
return
}

12
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue

@ -252,6 +252,7 @@
v-if="taskType === 'CONDITIONS'"
ref="CONDITIONS"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
:backfill-item="backfillItem"
:pre-node="preNode">
</m-conditions>
@ -438,6 +439,8 @@
},
_cacheItem () {
this.conditionResult.successNode[0] = this.successBranch
this.conditionResult.failedNode[0] = this.failedBranch
this.$emit('cacheTaskInfo', {
item: {
type: this.taskType,
@ -446,12 +449,15 @@
params: this.params,
description: this.description,
runFlag: this.runFlag,
conditionResult: this.conditionResult,
dependence: this.cacheDependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroupId: this.workerGroupId
workerGroupId: this.workerGroupId,
status: this.status,
branch: this.branch
},
fromThis: this
})
@ -657,7 +663,9 @@
retryInterval: this.retryInterval,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroupId: this.workerGroupId
workerGroupId: this.workerGroupId,
successBranch: this.successBranch,
failedBranch: this.failedBranch
}
}
},

10
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue

@ -122,8 +122,11 @@
},
// Watch the cacheParams
watch: {
cacheParams (val) {
this.$emit('on-dsData', val);
datasource (val) {
this.$emit('on-dsData', {
type: this.type,
datasource: val
});
}
},
created () {
@ -150,7 +153,8 @@
})
}
this.$emit('on-dsData', {
type: this.type
type: this.type,
datasource: this.datasource
})
})
},

21
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue

@ -19,7 +19,7 @@
<m-list-box>
<div slot="text">{{$t('Custom Parameters')}}</div>
<div slot="content">
<div class="dep-opt">
<div class="dep-opt">
<a href="javascript:"
@click="!isDetails && _addDep()"
class="add-dep">
@ -133,6 +133,9 @@
setTimeout(() => {
this.isLoading = false
}, 600)
},
cacheDependence (val) {
this.$emit('on-cache-dependent', val)
}
},
beforeCreate () {
@ -153,7 +156,19 @@
},
destroyed () {
},
computed: {},
computed: {
cacheDependence () {
return {
relation: this.relation,
dependTaskList: _.map(this.dependTaskList, v => {
return {
relation: v.relation,
dependItemList: _.map(v.dependItemList, v1 => _.omit(v1, ['depTasksList', 'state', 'dateValueList']))
}
})
}
}
},
components: { mListBox, mNodeStatus }
}
</script>
@ -257,4 +272,4 @@
}
}
}
</style>
</style>

71
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue

@ -65,7 +65,7 @@
<m-datasource
ref="refSourceDs"
@on-dsData="_onSourceDsData"
:data="{ type:'MYSQL',datasource:srcDatasource }"
:data="{ type:sourceMysqlParams.srcType,datasource:sourceMysqlParams.srcDatasource }"
>
</m-datasource>
</div>
@ -186,8 +186,8 @@
<div slot="content">
<div class="from-mirror">
<textarea
id="code-sql-mirror"
name="code-sql-mirror"
id="code-sqoop-mirror"
name="code-sqoop-mirror"
style="opacity: 0;">
</textarea>
</div>
@ -385,7 +385,7 @@
<m-datasource
ref="refTargetDs"
@on-dsData="_onTargetDsData"
:data="{ type:type,datasource:targetDatasource }"
:data="{ type:targetMysqlParams.targetType,datasource:targetMysqlParams.targetDatasource }"
>
</m-datasource>
</div>
@ -556,7 +556,8 @@
targetType:"HDFS",
sourceMysqlParams:{
srcDatasource:-1,
srcType:"MYSQL",
srcDatasource:"",
srcTable:"",
srcQueryType:"1",
srcQuerySql:'',
@ -588,7 +589,8 @@
},
targetMysqlParams:{
targetDatasource:-1,
targetType:"MYSQL",
targetDatasource:"",
targetTable:"",
targetColumns:"",
fieldsTerminated:"",
@ -680,6 +682,7 @@
* return data source
*/
_onSourceDsData (o) {
this.sourceMysqlParams.srcType = o.type
this.sourceMysqlParams.srcDatasource = o.datasource
},
@ -687,6 +690,7 @@
* return data source
*/
_onTargetDsData (o) {
this.targetMysqlParams.targetType = o.type
this.targetMysqlParams.targetDatasource = o.datasource
},
@ -697,7 +701,7 @@
var params = null
switch(this.sourceType){
case "MYSQL":
this.sourceMysqlParams.srcQuerySql = editor.getValue()
this.sourceMysqlParams.srcQuerySql = editor ? editor.getValue() : this.sourceMysqlParams.srcQuerySql
params = JSON.stringify(this.sourceMysqlParams)
break;
case "ORACLE":
@ -879,7 +883,9 @@
* Processing code highlighting
*/
_handlerEditor () {
editor = codemirror('code-sql-mirror', {
this._destroyEditor()
editor = codemirror('code-sqoop-mirror', {
mode: 'sql',
readOnly: this.isDetails
})
@ -892,9 +898,15 @@
}
}
this.changes = () => {
this._cacheParams()
}
// Monitor keyboard
editor.on('keypress', this.keypress)
editor.on('changes', this.changes)
editor.setValue(this.sourceMysqlParams.srcQuerySql)
return editor
@ -906,6 +918,27 @@
_onLocalParams (a) {
this.localParams = a
},
_cacheParams () {
this.$emit('on-cache-params', {
concurrency:this.concurrency,
modelType:this.modelType,
sourceType:this.sourceType,
targetType:this.targetType,
sourceParams:this._handleSourceParams(),
targetParams:this._handleTargetParams(),
localParams:this.localParams
});
},
_destroyEditor () {
if (editor) {
editor.toTextArea() // Uninstall
editor.off($('.code-sqoop-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'changes', this.changes)
editor = null
}
},
},
watch: {
// Listening to sqlType
@ -927,11 +960,12 @@
},
//Watch the cacheParams
cacheParams (val) {
this.$emit('on-cache-params', val);
this._cacheParams()
}
},
created () {
this._destroyEditor()
let o = this.backfillItem
// Non-null objects represent backfill
@ -963,11 +997,28 @@
*/
if (editor) {
editor.toTextArea() // Uninstall
editor.off($('.code-sql-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'changes', this.changes)
editor = null
}
},
computed: {
cacheParams () {
return {
concurrency:this.concurrency,
modelType:this.modelType,
sourceType:this.sourceType,
targetType:this.targetType,
localParams:this.localParams,
sourceMysqlParams:this.sourceMysqlParams,
sourceHdfsParams:this.sourceHdfsParams,
sourceHiveParams:this.sourceHiveParams,
targetHdfsParams:this.targetHdfsParams,
targetMysqlParams:this.targetMysqlParams,
targetHiveParams:this.targetHiveParams
}
}
},
components: { mListBox, mDatasource, mLocalParams}
}

1
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -567,7 +567,6 @@ export default {
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Modify User': 'Modify User',
'Branch flow': 'Branch flow',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow'
}

1
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js

@ -567,7 +567,6 @@ export default {
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Modify User': '修改用户',
'Branch flow': '分支流转',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点'
}

8
e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.base;
import org.apache.dolphinscheduler.constant.TestConstant;
import org.apache.dolphinscheduler.util.PropertiesReader;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
@ -35,7 +36,7 @@ public class BaseDriver {
/**
* driver
*/
private WebDriver driver;
private static WebDriver driver;
/**
* chrome driver path
@ -88,6 +89,7 @@ public class BaseDriver {
chromeOptions.setPageLoadStrategy(PageLoadStrategy.NONE);
chromeOptions.addArguments("--no-sandbox");
chromeOptions.addArguments("--disable-dev-shm-usage");
//Browser client running requires annotation --headless
chromeOptions.addArguments("--headless");
chromeOptions.addArguments("--disable-gpu");
chromeOptions.addArguments("--whitelisted-ips");
@ -120,7 +122,7 @@ public class BaseDriver {
*
* @return driver
*/
public WebDriver getDriver() {
public static WebDriver getDriver() {
return driver;
}
@ -141,7 +143,7 @@ public class BaseDriver {
public void closeBrowser() throws InterruptedException {
// JS Show a pop-up box to indicate the end of the test
Thread.sleep(TestConstant.ONE_THOUSANG);
((JavascriptExecutor) driver).executeScript("alert('Test completed, browser closes after 3s')");
// ((JavascriptExecutor) driver).executeScript("alert('Test completed, browser closes after 3s')");
Thread.sleep(TestConstant.THREE_THOUSANG);
if (driver != null) {
driver.quit();

17
e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.base;
import org.apache.dolphinscheduler.page.LoginPage;
import org.apache.dolphinscheduler.util.PropertiesReader;
import org.openqa.selenium.WebDriver;
import org.testng.annotations.*;
@ -28,6 +27,7 @@ import java.util.Properties;
/**
* base test class
*/
@Test(groups={"functionTests"})
public class BaseTest {
/**
* properties
@ -43,7 +43,7 @@ public class BaseTest {
/**
* driver
*/
public WebDriver driver;
public static WebDriver driver;
/**
* Executed before executing a test suite 
@ -54,7 +54,7 @@ public class BaseTest {
*/
@BeforeSuite(alwaysRun = true)
@Parameters({"propertiesPath"})
public void beforeSuite(@Optional("src/test/resources/config/config.properties") String propertiesPath) throws IOException {
public void beforeSuite(@Optional("src/test/resources/config/config.properties") String propertiesPath) throws Exception {
// read properties
properties = PropertiesReader.readProperties(propertiesPath);
}
@ -70,14 +70,13 @@ public class BaseTest {
driver = baseDriver.getDriver();
}
/**
* Executed before executing a class method in a test case
*/
@BeforeClass(alwaysRun = true)
public void setUp() throws IOException, InterruptedException {
LoginPage loginPage = new LoginPage(driver);
loginPage.jumpPage();
loginPage.login();
public void setUp() throws Exception {
}
@ -85,7 +84,7 @@ public class BaseTest {
* Execute after executing a class method in a test case
*/
@AfterClass(alwaysRun = true)
public void afterClass() {
public void afterClass() throws InterruptedException {
// logout
}
@ -102,6 +101,6 @@ public class BaseTest {
* Execute after executing a testsuite
*/
@AfterSuite(alwaysRun = true)
public void afterSuite() {
public void afterSuite() throws InterruptedException {
}
}

2
e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java

@ -39,5 +39,5 @@ public class LoginData {
*/
public static final String PASSWORD = PropertiesReader.getKey("PASSWORD");
public static final String TENANT = "Tenant Manage - DolphinScheduler";
public static final String TENANT = "租户管理 - DolphinScheduler";
}

2
e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.data.project;
public class CreatWorkflowData {
//input shell task name
public static final String SHELL_TASK_NAME = "shell task description test";
public static final String SHELL_TASK_NAME = "shell task description test1";
//input shell task description
public static final String SHELL_TASK_DESCRIPTION = "shell task description test";

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save