Browse Source

[Feature][Task]Task plugin spi (#6116)

2.0.7-release
Kerwin 3 years ago committed by GitHub
parent
commit
36d60a564a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 32
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java
  2. 5
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java
  3. 226
      dolphinscheduler-spi/pom.xml
  4. 84
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/CommandType.java
  5. 37
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DataType.java
  6. 42
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbConnectType.java
  7. 59
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java
  8. 51
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/Flag.java
  9. 30
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResUploadType.java
  10. 58
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/TaskTimeoutStrategy.java
  11. 2
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractTask.java
  12. 2
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/Property.java
  13. 2
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskChannel.java
  14. 241
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java
  15. 188
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/UdfFuncBean.java
  16. 79
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/AbstractDatasourceProcessor.java
  17. 108
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseConnectionParam.java
  18. 161
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseDataSourceParamDTO.java
  19. 57
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseHdfsConnectionParam.java
  20. 61
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseHdfsDatasourceParamDTO.java
  21. 26
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/ConnectionParam.java
  22. 81
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/DatasourceProcessor.java
  23. 121
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/DatasourceUtil.java
  24. 87
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/HiveConfUtils.java
  25. 88
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/PasswordUtils.java
  26. 41
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/clickhouse/ClickHouseDatasourceParamDTO.java
  27. 131
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/clickhouse/ClickHouseDatasourceProcessor.java
  28. 34
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/clickhouse/ClickhouseConnectionParam.java
  29. 34
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/db2/Db2ConnectionParam.java
  30. 43
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/db2/Db2DatasourceParamDTO.java
  31. 132
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/db2/Db2DatasourceProcessor.java
  32. 38
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/hive/HiveConnectionParam.java
  33. 45
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/hive/HiveDataSourceParamDTO.java
  34. 192
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/hive/HiveDatasourceProcessor.java
  35. 35
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/mysql/MysqlConnectionParam.java
  36. 43
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/mysql/MysqlDatasourceParamDTO.java
  37. 176
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/mysql/MysqlDatasourceProcessor.java
  38. 47
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/oracle/OracleConnectionParam.java
  39. 55
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/oracle/OracleDatasourceParamDTO.java
  40. 149
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/oracle/OracleDatasourceProcessor.java
  41. 34
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/postgresql/PostgreSqlConnectionParam.java
  42. 41
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/postgresql/PostgreSqlDatasourceParamDTO.java
  43. 132
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/postgresql/PostgreSqlDatasourceProcessor.java
  44. 34
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/presto/PrestoConnectionParam.java
  45. 43
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/presto/PrestoDatasourceParamDTO.java
  46. 134
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/presto/PrestoDatasourceProcessor.java
  47. 38
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/spark/SparkConnectionParam.java
  48. 45
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/spark/SparkDatasourceParamDTO.java
  49. 161
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/spark/SparkDatasourceProcessor.java
  50. 34
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/sqlserver/SqlServerConnectionParam.java
  51. 43
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/sqlserver/SqlServerDatasourceParamDTO.java
  52. 129
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/sqlserver/SqlServerDatasourceProcessor.java
  53. 78
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/BusinessTimeUtils.java
  54. 158
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParamUtils.java
  55. 269
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParameterUtils.java
  56. 103
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PlaceholderUtils.java
  57. 255
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PropertyPlaceholderHelper.java
  58. 570
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/TimePlaceholderUtils.java
  59. 115
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/DataxTaskRequest.java
  60. 45
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/ProcedureTaskRequest.java
  61. 80
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SQLTaskRequest.java
  62. 115
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SqoopTaskRequest.java
  63. 16
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java
  64. 104
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CommonUtils.java
  65. 20
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java
  66. 622
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/DateUtils.java
  67. 260
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/PropertyUtils.java
  68. 195
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java
  69. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java
  70. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java
  71. 9
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java
  72. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ProcessUtils.java
  73. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ShellCommandExecutor.java
  74. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContextCacheManager.java
  75. 12
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/pom.xml
  76. 251
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxParameters.java
  77. 570
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java
  78. 36
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannel.java
  79. 42
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java
  80. 31
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskPlugin.java
  81. 136
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java
  82. 22
      dolphinscheduler-task-plugin/dolphinscheduler-task-flink/pom.xml
  83. 87
      dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java
  84. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskChannel.java
  85. 12
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/pom.xml
  86. 18
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpMethod.java
  87. 5
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpParameters.java
  88. 1
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpParametersType.java
  89. 192
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpProperty.java
  90. 269
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java
  91. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannel.java
  92. 5
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannelFactory.java
  93. 8
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskConstants.java
  94. 3
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskPlugin.java
  95. 5
      dolphinscheduler-task-plugin/dolphinscheduler-task-mr/pom.xml
  96. 88
      dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceArgsUtils.java
  97. 159
      dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceParameters.java
  98. 134
      dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTask.java
  99. 34
      dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannel.java
  100. 42
      dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannelFactory.java
  101. Some files were not shown because too many files have changed in this diff Show More

32
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java

@ -483,6 +483,38 @@ public class DateUtils {
Constants.YYYY_MM_DD_HH_MM_SS);
}
public static Date addYears(Date date, int amount) {
return add(date, 1, amount);
}
public static Date addMonths(Date date, int amount) {
return add(date, 2, amount);
}
public static Date addWeeks(Date date, int amount) {
return add(date, 3, amount);
}
public static Date addDays(Date date, int amount) {
return add(date, 5, amount);
}
public static Date addHours(Date date, int amount) {
return add(date, 11, amount);
}
public static Date addMinutes(Date date, int amount) {
return add(date, 12, amount);
}
public static Date addSeconds(Date date, int amount) {
return add(date, 13, amount);
}
public static Date addMilliseconds(Date date, int amount) {
return add(date, 14, amount);
}
/**
* get date
*

5
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java

@ -41,7 +41,7 @@ import org.apache.dolphinscheduler.service.alert.AlertClientService;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.commons.collections.MapUtils;
@ -57,9 +57,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

226
dolphinscheduler-spi/pom.xml

@ -56,7 +56,11 @@
<artifactId>commons-beanutils</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
@ -94,6 +98,226 @@
<artifactId>resolver</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>jetty-all</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.ant</groupId>
<artifactId>ant</artifactId>
</exclusion>
<exclusion>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-json</artifactId>
</exclusion>
<exclusion>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-jvm</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.joshelser</groupId>
<artifactId>dropwizard-metrics-hadoop-metrics2-reporter</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
<exclusion>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
<exclusion>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
</exclusion>
<exclusion>
<artifactId>log4j-slf4j-impl</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet</artifactId>
<groupId>org.eclipse.jetty.orbit</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api-2.5</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jasper-runtime</artifactId>
<groupId>tomcat</groupId>
</exclusion>
<exclusion>
<artifactId>slider-core</artifactId>
<groupId>org.apache.slider</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-client</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-core</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-json</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-server</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-guice</artifactId>
<groupId>com.sun.jersey.contribs</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-common</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-hadoop2-compat</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-client</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-hadoop-compat</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>tephra-hbase-compat-1.0</artifactId>
<groupId>co.cask.tephra</groupId>
</exclusion>
<exclusion>
<artifactId>jaxb-api</artifactId>
<groupId>javax.xml.bind</groupId>
</exclusion>
<exclusion>
<artifactId>hive-llap-client</artifactId>
<groupId>org.apache.hive</groupId>
</exclusion>
<exclusion>
<artifactId>hive-llap-common</artifactId>
<groupId>org.apache.hive</groupId>
</exclusion>
<exclusion>
<artifactId>hive-llap-server</artifactId>
<groupId>org.apache.hive</groupId>
</exclusion>
<exclusion>
<artifactId>tephra-core</artifactId>
<groupId>co.cask.tephra</groupId>
</exclusion>
<exclusion>
<artifactId>ant</artifactId>
<groupId>ant</groupId>
</exclusion>
<exclusion>
<artifactId>stringtemplate</artifactId>
<groupId>org.antlr</groupId>
</exclusion>
<exclusion>
<artifactId>antlr-runtime</artifactId>
<groupId>org.antlr</groupId>
</exclusion>
<exclusion>
<artifactId>hive-shims</artifactId>
<groupId>org.apache.hive</groupId>
</exclusion>
<exclusion>
<artifactId>jsp-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-api</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-core</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-web</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>jasper-compiler</artifactId>
<groupId>tomcat</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</project>

84
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/CommandType.java

@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
import java.util.HashMap;
import java.util.Map;
/**
* command types
*/
public enum CommandType {
/**
* command types
* 0 start a new process
* 1 start a new process from current nodes
* 2 recover tolerance fault process
* 3 recover suspended process
* 4 start process from failure task nodes
* 5 complement data
* 6 start a new process from scheduler
* 7 repeat running a process
* 8 pause a process
* 9 stop a process
* 10 recover waiting thread
*/
START_PROCESS(0, "start a new process"),
START_CURRENT_TASK_PROCESS(1, "start a new process from current nodes"),
RECOVER_TOLERANCE_FAULT_PROCESS(2, "recover tolerance fault process"),
RECOVER_SUSPENDED_PROCESS(3, "recover suspended process"),
START_FAILURE_TASK_PROCESS(4, "start process from failure task nodes"),
COMPLEMENT_DATA(5, "complement data"),
SCHEDULER(6, "start a new process from scheduler"),
REPEAT_RUNNING(7, "repeat running a process"),
PAUSE(8, "pause a process"),
STOP(9, "stop a process"),
RECOVER_WAITING_THREAD(10, "recover waiting thread");
CommandType(int code, String descp) {
this.code = code;
this.descp = descp;
}
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
private static final Map<Integer, CommandType> COMMAND_TYPE_MAP = new HashMap<>();
static {
for (CommandType commandType : CommandType.values()) {
COMMAND_TYPE_MAP.put(commandType.code,commandType);
}
}
public static CommandType of(Integer status) {
if (COMMAND_TYPE_MAP.containsKey(status)) {
return COMMAND_TYPE_MAP.get(status);
}
throw new IllegalArgumentException("invalid status : " + status);
}
}

37
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DataType.java

@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
/**
* data types in user define parameter
*/
public enum DataType {
/**
* 0 string
* 1 integer
* 2 long
* 3 float
* 4 double
* 5 date, "YYYY-MM-DD"
* 6 time, "HH:MM:SS"
* 7 time stamp
* 8 Boolean
* 9 list <String>
*/
VARCHAR,INTEGER,LONG,FLOAT,DOUBLE,DATE,TIME,TIMESTAMP,BOOLEAN,LIST
}

42
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbConnectType.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
public enum DbConnectType {
ORACLE_SERVICE_NAME(0, "Oracle Service Name"),
ORACLE_SID(1, "Oracle SID");
DbConnectType(int code, String descp) {
this.code = code;
this.descp = descp;
}
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
}

59
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java

@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
import static java.util.stream.Collectors.toMap;
import java.util.Arrays;
import java.util.Map;
import com.google.common.base.Functions;
public enum DbType {
MYSQL(0),
POSTGRESQL(1),
HIVE(2),
SPARK(3),
CLICKHOUSE(4),
ORACLE(5),
SQLSERVER(6),
DB2(7),
PRESTO(8),
H2(9);
DbType(int code) {
this.code = code;
}
private final int code;
public int getCode() {
return code;
}
private static final Map<Integer, DbType> DB_TYPE_MAP =
Arrays.stream(DbType.values()).collect(toMap(DbType::getCode, Functions.identity()));
public static DbType of(int type) {
if (DB_TYPE_MAP.containsKey(type)) {
return DB_TYPE_MAP.get(type);
}
return null;
}
}

51
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/Flag.java

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
/**
* have_script
* have_file
* can_retry
* have_arr_variables
* have_map_variables
* have_alert
*/
public enum Flag {
/**
* 0 no
* 1 yes
*/
NO(0, "no"),
YES(1, "yes");
Flag(int code, String descp) {
this.code = code;
this.descp = descp;
}
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
}

30
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResUploadType.java

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
/**
* data base types
*/
public enum ResUploadType {
/**
* 0 hdfs
* 1 s3
* 2 none
*/
HDFS,S3,NONE
}

58
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/TaskTimeoutStrategy.java

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.enums;
/**
* task timeout strategy
*/
public enum TaskTimeoutStrategy {
/**
* 0 warn
* 1 failed
* 2 warn+failed
*/
WARN(0, "warn"),
FAILED(1,"failed"),
WARNFAILED(2,"warnfailed");
TaskTimeoutStrategy(int code, String descp) {
this.code = code;
this.descp = descp;
}
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
public static TaskTimeoutStrategy of(int status) {
for (TaskTimeoutStrategy es : values()) {
if (es.getCode() == status) {
return es;
}
}
throw new IllegalArgumentException("invalid status : " + status);
}
}

2
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractTask.java

@ -17,6 +17,8 @@
package org.apache.dolphinscheduler.spi.task;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
/**
* executive task
*/

2
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/Property.java

@ -17,7 +17,7 @@
package org.apache.dolphinscheduler.spi.task;
import org.apache.dolphinscheduler.spi.params.base.DataType;
import org.apache.dolphinscheduler.spi.enums.DataType;
import java.io.Serializable;
import java.util.Objects;

2
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskChannel.java

@ -15,6 +15,8 @@ package org.apache.dolphinscheduler.spi.task;/*
* limitations under the License.
*/
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
public interface TaskChannel {
void cancelApplication(boolean status);

241
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java

@ -41,6 +41,54 @@ public class TaskConstants {
*/
public static final String COMMA = ",";
/**
* slash /
*/
public static final String SLASH = "/";
/**
* COLON :
*/
public static final String COLON = ":";
/**
* SPACE " "
*/
public static final String SPACE = " ";
/**
* SINGLE_SLASH /
*/
public static final String SINGLE_SLASH = "/";
/**
* DOUBLE_SLASH //
*/
public static final String DOUBLE_SLASH = "//";
/**
* SINGLE_QUOTES "'"
*/
public static final String SINGLE_QUOTES = "'";
/**
* DOUBLE_QUOTES "\""
*/
public static final String DOUBLE_QUOTES = "\"";
/**
* SEMICOLON ;
*/
public static final String SEMICOLON = ";";
/**
* EQUAL SIGN
*/
public static final String EQUAL_SIGN = "=";
/**
* AT SIGN
*/
public static final String AT_SIGN = "@";
/**
* sleep time
*/
@ -75,4 +123,197 @@ public class TaskConstants {
public static final String RWXR_XR_X = "rwxr-xr-x";
/**
* task log info format
*/
public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s";
/**
* date format of yyyyMMdd
*/
public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd";
/**
* date format of yyyyMMddHHmmss
*/
public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss";
/**
* new
* schedule time
*/
public static final String PARAMETER_SHECDULE_TIME = "schedule.time";
/**
* system date(yyyyMMddHHmmss)
*/
public static final String PARAMETER_DATETIME = "system.datetime";
/**
* system date(yyyymmdd) today
*/
public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate";
/**
* system date(yyyymmdd) yesterday
*/
public static final String PARAMETER_BUSINESS_DATE = "system.biz.date";
/**
* the absolute path of current executing task
*/
public static final String PARAMETER_TASK_EXECUTE_PATH = "system.task.execute.path";
/**
* the instance id of current task
*/
public static final String PARAMETER_TASK_INSTANCE_ID = "system.task.instance.id";
/**
* month_begin
*/
public static final String MONTH_BEGIN = "month_begin";
/**
* add_months
*/
public static final String ADD_MONTHS = "add_months";
/**
* month_end
*/
public static final String MONTH_END = "month_end";
/**
* week_begin
*/
public static final String WEEK_BEGIN = "week_begin";
/**
* week_end
*/
public static final String WEEK_END = "week_end";
/**
* timestamp
*/
public static final String TIMESTAMP = "timestamp";
public static final char SUBTRACT_CHAR = '-';
public static final char ADD_CHAR = '+';
public static final char MULTIPLY_CHAR = '*';
public static final char DIVISION_CHAR = '/';
public static final char LEFT_BRACE_CHAR = '(';
public static final char RIGHT_BRACE_CHAR = ')';
public static final String ADD_STRING = "+";
public static final String MULTIPLY_STRING = "*";
public static final String DIVISION_STRING = "/";
public static final String LEFT_BRACE_STRING = "(";
public static final char P = 'P';
public static final char N = 'N';
public static final String SUBTRACT_STRING = "-";
public static final String GLOBAL_PARAMS = "globalParams";
public static final String LOCAL_PARAMS = "localParams";
public static final String LOCAL_PARAMS_LIST = "localParamsList";
public static final String SUBPROCESS_INSTANCE_ID = "subProcessInstanceId";
public static final String PROCESS_INSTANCE_STATE = "processInstanceState";
public static final String PARENT_WORKFLOW_INSTANCE = "parentWorkflowInstance";
public static final String CONDITION_RESULT = "conditionResult";
public static final String SWITCH_RESULT = "switchResult";
public static final String DEPENDENCE = "dependence";
public static final String TASK_TYPE = "taskType";
public static final String TASK_LIST = "taskList";
public static final String QUEUE = "queue";
public static final String QUEUE_NAME = "queueName";
public static final int LOG_QUERY_SKIP_LINE_NUMBER = 0;
public static final int LOG_QUERY_LIMIT = 4096;
/**
* jar
*/
public static final String JAR = "jar";
/**
* hadoop
*/
public static final String HADOOP = "hadoop";
/**
* -D <property>=<value>
*/
public static final String D = "-D";
/**
* jdbc url
*/
public static final String JDBC_MYSQL = "jdbc:mysql://";
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@";
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String JDBC_PRESTO = "jdbc:presto://";
/**
* driver
*/
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver";
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver";
/**
* datasource encryption salt
*/
public static final String DATASOURCE_ENCRYPTION_SALT_DEFAULT = "!@#$%^&*";
public static final String DATASOURCE_ENCRYPTION_ENABLE = "datasource.encryption.enable";
public static final String DATASOURCE_ENCRYPTION_SALT = "datasource.encryption.salt";
/**
* resource storage type
*/
public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type";
/**
* kerberos
*/
public static final String KERBEROS = "kerberos";
/**
* kerberos expire time
*/
public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time";
/**
* java.security.krb5.conf
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java.security.krb5.conf.path
*/
public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path";
/**
* loginUserFromKeytab user
*/
public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username";
/**
* loginUserFromKeytab path
*/
public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state";
}

188
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/UdfFuncBean.java

@ -0,0 +1,188 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.io.IOException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.KeyDeserializer;
/**
* udf function
*/
public class UdfFuncBean {
/**
* id
*/
private int id;
/**
* user id
*/
private int userId;
/**
* udf function name
*/
private String funcName;
/**
* udf class name
*/
private String className;
/**
* udf argument types
*/
private String argTypes;
/**
* udf data base
*/
private String database;
/**
* udf description
*/
private String description;
/**
* resource id
*/
private int resourceId;
/**
* resource name
*/
private String resourceName;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
public String getFuncName() {
return funcName;
}
public void setFuncName(String funcName) {
this.funcName = funcName;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getArgTypes() {
return argTypes;
}
public void setArgTypes(String argTypes) {
this.argTypes = argTypes;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getResourceId() {
return resourceId;
}
public void setResourceId(int resourceId) {
this.resourceId = resourceId;
}
public String getResourceName() {
return resourceName;
}
public void setResourceName(String resourceName) {
this.resourceName = resourceName;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UdfFuncBean udfFunc = (UdfFuncBean) o;
if (id != udfFunc.id) {
return false;
}
return !(funcName != null ? !funcName.equals(udfFunc.funcName) : udfFunc.funcName != null);
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (funcName != null ? funcName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return JSONUtils.toJsonString(this);
}
public static class UdfFuncDeserializer extends KeyDeserializer {
@Override
public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException {
if (StringUtils.isBlank(key)) {
return null;
}
return JSONUtils.parseObject(key, UdfFuncBean.class);
}
}
}

79
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/AbstractDatasourceProcessor.java

@ -0,0 +1,79 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import org.apache.commons.collections4.MapUtils;
import java.util.Map;
import java.util.regex.Pattern;
public abstract class AbstractDatasourceProcessor implements DatasourceProcessor {
private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$");
private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]]+$");
private static final Pattern DATABASE_PATTER = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$");
private static final Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9\\-\\_\\/]+$");
@Override
public void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) {
checkHost(baseDataSourceParamDTO.getHost());
checkDatasourcePatter(baseDataSourceParamDTO.getDatabase());
checkOther(baseDataSourceParamDTO.getOther());
}
/**
* Check the host is valid
*
* @param host datasource host
*/
protected void checkHost(String host) {
if (!IPV4_PATTERN.matcher(host).matches() || !IPV6_PATTERN.matcher(host).matches()) {
throw new IllegalArgumentException("datasource host illegal");
}
}
/**
* check database name is valid
*
* @param database database name
*/
protected void checkDatasourcePatter(String database) {
if (!DATABASE_PATTER.matcher(database).matches()) {
throw new IllegalArgumentException("datasource name illegal");
}
}
/**
* check other is valid
*
* @param other other
*/
protected void checkOther(Map<String, String> other) {
if (MapUtils.isEmpty(other)) {
return;
}
boolean paramsCheck = other.entrySet().stream().allMatch(p -> PARAMS_PATTER.matcher(p.getValue()).matches());
if (!paramsCheck) {
throw new IllegalArgumentException("datasource other params illegal");
}
}
}

108
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseConnectionParam.java

@ -0,0 +1,108 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import org.apache.dolphinscheduler.spi.task.datasource.clickhouse.ClickhouseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.db2.Db2ConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.hive.HiveConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.mysql.MysqlConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.oracle.OracleConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.postgresql.PostgreSqlConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.presto.PrestoConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.spark.SparkConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.sqlserver.SqlServerConnectionParam;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
/**
* The base model of connection param
* <p>
* {@link ClickhouseConnectionParam}
* {@link Db2ConnectionParam}
* {@link HiveConnectionParam}
* {@link MysqlConnectionParam}
* {@link OracleConnectionParam}
* {@link PostgreSqlConnectionParam}
* {@link PrestoConnectionParam}
* {@link SparkConnectionParam}
* {@link SqlServerConnectionParam}
*/
@JsonInclude(Include.NON_NULL)
public abstract class BaseConnectionParam implements ConnectionParam {
protected String user;
protected String password;
protected String address;
protected String database;
protected String jdbcUrl;
protected String other;
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public void setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
}
public String getOther() {
return other;
}
public void setOther(String other) {
this.other = other;
}
}

161
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseDataSourceParamDTO.java

@ -0,0 +1,161 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.clickhouse.ClickHouseDatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.db2.Db2DatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.hive.HiveDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.mysql.MysqlDatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.oracle.OracleDatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.postgresql.PostgreSqlDatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.presto.PrestoDatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.spark.SparkDatasourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.sqlserver.SqlServerDatasourceParamDTO;
import java.io.Serializable;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Basic datasource params submitted to api.
* <p>
* see {@link MysqlDatasourceParamDTO}
* see {@link PostgreSqlDatasourceParamDTO}
* see {@link HiveDataSourceParamDTO}
* see {@link SparkDatasourceParamDTO}
* see {@link ClickHouseDatasourceParamDTO}
* see {@link OracleDatasourceParamDTO}
* see {@link SqlServerDatasourceParamDTO}
* see {@link Db2DatasourceParamDTO}
* see {@link PrestoDatasourceParamDTO}
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes(value = {
@JsonSubTypes.Type(value = MysqlDatasourceParamDTO.class, name = "MYSQL"),
@JsonSubTypes.Type(value = PostgreSqlDatasourceParamDTO.class, name = "POSTGRESQL"),
@JsonSubTypes.Type(value = HiveDataSourceParamDTO.class, name = "HIVE"),
@JsonSubTypes.Type(value = SparkDatasourceParamDTO.class, name = "SPARK"),
@JsonSubTypes.Type(value = ClickHouseDatasourceParamDTO.class, name = "CLICKHOUSE"),
@JsonSubTypes.Type(value = OracleDatasourceParamDTO.class, name = "ORACLE"),
@JsonSubTypes.Type(value = SqlServerDatasourceParamDTO.class, name = "SQLSERVER"),
@JsonSubTypes.Type(value = Db2DatasourceParamDTO.class, name = "DB2"),
@JsonSubTypes.Type(value = PrestoDatasourceParamDTO.class, name = "PRESTO"),
})
public abstract class BaseDataSourceParamDTO implements Serializable {
protected Integer id;
protected String name;
protected String note;
protected String host;
protected Integer port;
protected String database;
protected String userName;
protected String password;
protected Map<String, String> other;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getNote() {
return note;
}
public void setNote(String note) {
this.note = note;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Map<String, String> getOther() {
return other;
}
public void setOther(Map<String, String> other) {
this.other = other;
}
/**
* Get the datasource type
* see{@link DbType}
*
* @return datasource type code
*/
public abstract DbType getType();
}

57
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseHdfsConnectionParam.java

@ -0,0 +1,57 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
public class BaseHdfsConnectionParam extends BaseConnectionParam {
protected String principal;
protected String javaSecurityKrb5Conf;
protected String loginUserKeytabUsername;
protected String loginUserKeytabPath;
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
public String getJavaSecurityKrb5Conf() {
return javaSecurityKrb5Conf;
}
public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) {
this.javaSecurityKrb5Conf = javaSecurityKrb5Conf;
}
public String getLoginUserKeytabUsername() {
return loginUserKeytabUsername;
}
public void setLoginUserKeytabUsername(String loginUserKeytabUsername) {
this.loginUserKeytabUsername = loginUserKeytabUsername;
}
public String getLoginUserKeytabPath() {
return loginUserKeytabPath;
}
public void setLoginUserKeytabPath(String loginUserKeytabPath) {
this.loginUserKeytabPath = loginUserKeytabPath;
}
}

61
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/BaseHdfsDatasourceParamDTO.java

@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
public abstract class BaseHdfsDatasourceParamDTO extends BaseDataSourceParamDTO {
protected String principal;
protected String javaSecurityKrb5Conf;
protected String loginUserKeytabUsername;
protected String loginUserKeytabPath;
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
public String getLoginUserKeytabUsername() {
return loginUserKeytabUsername;
}
public void setLoginUserKeytabUsername(String loginUserKeytabUsername) {
this.loginUserKeytabUsername = loginUserKeytabUsername;
}
public String getLoginUserKeytabPath() {
return loginUserKeytabPath;
}
public void setLoginUserKeytabPath(String loginUserKeytabPath) {
this.loginUserKeytabPath = loginUserKeytabPath;
}
public String getJavaSecurityKrb5Conf() {
return javaSecurityKrb5Conf;
}
public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) {
this.javaSecurityKrb5Conf = javaSecurityKrb5Conf;
}
}

26
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/ConnectionParam.java

@ -0,0 +1,26 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import java.io.Serializable;
/**
* The model of Datasource Connection param
*/
public interface ConnectionParam extends Serializable {
}

81
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/DatasourceProcessor.java

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import org.apache.dolphinscheduler.spi.enums.DbType;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
public interface DatasourceProcessor {
/**
* check datasource param is valid
*/
void checkDatasourceParam(BaseDataSourceParamDTO datasourceParam);
/**
* create BaseDataSourceParamDTO by connectionJson
*
* @param connectionJson see{@link org.apache.dolphinscheduler.dao.entity.Datasource}
* @return {@link BaseDataSourceParamDTO}
*/
BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson);
/**
* create datasource connection parameter which will be stored at DataSource
* <p>
* see {@code org.apache.dolphinscheduler.dao.entity.DataSource.connectionParams}
*/
ConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam);
/**
* deserialize json to datasource connection param
*
* @param connectionJson {@code org.apache.dolphinscheduler.dao.entity.DataSource.connectionParams}
* @return {@link BaseConnectionParam}
*/
ConnectionParam createConnectionParams(String connectionJson);
/**
* get datasource Driver
*/
String getDatasourceDriver();
/**
* get jdbcUrl by connection param, the jdbcUrl is different with ConnectionParam.jdbcUrl, this method will inject
* other to jdbcUrl
*
* @param connectionParam connection param
*/
String getJdbcUrl(ConnectionParam connectionParam);
/**
* get connection by connectionParam
*
* @param connectionParam connectionParam
* @return {@link Connection}
*/
Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException, IOException;
/**
* @return {@link DbType}
*/
DbType getDbType();
}

121
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/DatasourceUtil.java

@ -0,0 +1,121 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.clickhouse.ClickHouseDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.db2.Db2DatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.hive.HiveDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.mysql.MysqlDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.oracle.OracleDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.postgresql.PostgreSqlDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.presto.PrestoDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.spark.SparkDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.sqlserver.SqlServerDatasourceProcessor;
import java.sql.Connection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DatasourceUtil {
private DatasourceUtil() {
}
private static final Logger logger = LoggerFactory.getLogger(DatasourceUtil.class);
private static final DatasourceProcessor mysqlProcessor = new MysqlDatasourceProcessor();
private static final DatasourceProcessor postgreSqlProcessor = new PostgreSqlDatasourceProcessor();
private static final DatasourceProcessor hiveProcessor = new HiveDatasourceProcessor();
private static final DatasourceProcessor sparkProcessor = new SparkDatasourceProcessor();
private static final DatasourceProcessor clickhouseProcessor = new ClickHouseDatasourceProcessor();
private static final DatasourceProcessor oracleProcessor = new OracleDatasourceProcessor();
private static final DatasourceProcessor sqlServerProcessor = new SqlServerDatasourceProcessor();
private static final DatasourceProcessor db2PROCESSOR = new Db2DatasourceProcessor();
private static final DatasourceProcessor prestoPROCESSOR = new PrestoDatasourceProcessor();
/**
* check datasource param
*
* @param baseDataSourceParamDTO datasource param
*/
public static void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) {
getDatasourceProcessor(baseDataSourceParamDTO.getType()).checkDatasourceParam(baseDataSourceParamDTO);
}
/**
* build connection url
*
* @param baseDataSourceParamDTO datasourceParam
*/
public static ConnectionParam buildConnectionParams(BaseDataSourceParamDTO baseDataSourceParamDTO) {
ConnectionParam connectionParams = getDatasourceProcessor(baseDataSourceParamDTO.getType())
.createConnectionParams(baseDataSourceParamDTO);
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", connectionParams);
}
return connectionParams;
}
public static ConnectionParam buildConnectionParams(DbType dbType, String connectionJson) {
return getDatasourceProcessor(dbType).createConnectionParams(connectionJson);
}
public static Connection getConnection(DbType dbType, ConnectionParam connectionParam) {
try {
return getDatasourceProcessor(dbType).getConnection(connectionParam);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static String getJdbcUrl(DbType dbType, ConnectionParam baseConnectionParam) {
return getDatasourceProcessor(dbType).getJdbcUrl(baseConnectionParam);
}
public static BaseDataSourceParamDTO buildDatasourceParamDTO(DbType dbType, String connectionParams) {
return getDatasourceProcessor(dbType).createDatasourceParamDTO(connectionParams);
}
public static DatasourceProcessor getDatasourceProcessor(DbType dbType) {
switch (dbType) {
case MYSQL:
return mysqlProcessor;
case POSTGRESQL:
return postgreSqlProcessor;
case HIVE:
return hiveProcessor;
case SPARK:
return sparkProcessor;
case CLICKHOUSE:
return clickhouseProcessor;
case ORACLE:
return oracleProcessor;
case SQLSERVER:
return sqlServerProcessor;
case DB2:
return db2PROCESSOR;
case PRESTO:
return prestoPROCESSOR;
default:
throw new IllegalArgumentException("datasource type illegal:" + dbType);
}
}
}

87
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/HiveConfUtils.java

@ -0,0 +1,87 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* hive conf utils
*/
public class HiveConfUtils {
private HiveConfUtils() {
throw new UnsupportedOperationException("Construct HiveConfUtils");
}
private static class HiveConfHandler {
private static HiveConf singleton;
private static Map<String,Object> hiveConfVars;
static {
singleton = new HiveConf();
hiveConfVars = new HashMap<>();
Arrays.stream(ConfVars.values()).forEach(confVar -> hiveConfVars.put(confVar.varname,confVar));
}
}
/**
* get HiveConf instance
* @return HiveConf hiveConf
*/
public static HiveConf getInstance() {
return HiveConfHandler.singleton;
}
/**
* get hive conf vars
* @return
*/
public static Map<String,Object> getHiveConfVars() {
return HiveConfHandler.hiveConfVars;
}
/**
* Determine if it belongs to a hive conf property
* @param conf config
* @return boolean result
*/
public static boolean isHiveConfVar(String conf) {
// the default hive conf var name
String confKey = conf.split("=")[0];
Map<String, Object> hiveConfVars = HiveConfUtils.getHiveConfVars();
if (hiveConfVars.get(confKey) != null) {
return true;
}
// the security authorization hive conf var name
HiveConf hiveConf = HiveConfUtils.getInstance();
String hiveAuthorizationSqlStdAuthConfigWhitelist = hiveConf.getVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST);
Pattern modWhiteListPattern = Pattern.compile(hiveAuthorizationSqlStdAuthConfigWhitelist);
Matcher matcher = modWhiteListPattern.matcher(confKey);
return matcher.matches();
}
}

88
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/PasswordUtils.java

@ -0,0 +1,88 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATASOURCE_ENCRYPTION_ENABLE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATASOURCE_ENCRYPTION_SALT;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATASOURCE_ENCRYPTION_SALT_DEFAULT;
import org.apache.dolphinscheduler.spi.utils.PropertyUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.codec.binary.Base64;
import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PasswordUtils {
private static final Logger logger = LoggerFactory.getLogger(PasswordUtils.class);
private static final Base64 BASE64 = new Base64();
private PasswordUtils() {
throw new UnsupportedOperationException("Construct PasswordUtils");
}
/**
* encode password
*/
public static String encodePassword(String password) {
if (StringUtils.isEmpty(password)) {
return StringUtils.EMPTY;
}
//if encryption is not turned on, return directly
boolean encryptionEnable = PropertyUtils.getBoolean(DATASOURCE_ENCRYPTION_ENABLE, false);
if (!encryptionEnable) {
return password;
}
// Using Base64 + salt to process password
String salt = PropertyUtils.getString(DATASOURCE_ENCRYPTION_SALT, DATASOURCE_ENCRYPTION_SALT_DEFAULT);
String passwordWithSalt = salt + new String(BASE64.encode(password.getBytes(
StandardCharsets.UTF_8)));
return new String(BASE64.encode(passwordWithSalt.getBytes(StandardCharsets.UTF_8)));
}
/**
* decode password
*/
public static String decodePassword(String password) {
if (StringUtils.isEmpty(password)) {
return StringUtils.EMPTY;
}
//if encryption is not turned on, return directly
boolean encryptionEnable = PropertyUtils.getBoolean(DATASOURCE_ENCRYPTION_ENABLE, false);
if (!encryptionEnable) {
return password;
}
// Using Base64 + salt to process password
String salt = PropertyUtils.getString(DATASOURCE_ENCRYPTION_SALT, DATASOURCE_ENCRYPTION_SALT_DEFAULT);
String passwordWithSalt = new String(BASE64.decode(password), StandardCharsets.UTF_8);
if (!passwordWithSalt.startsWith(salt)) {
logger.warn("There is a password and salt mismatch: {} ", password);
return password;
}
return new String(BASE64.decode(passwordWithSalt.substring(salt.length())), StandardCharsets.UTF_8);
}
}

41
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/clickhouse/ClickHouseDatasourceParamDTO.java

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.clickhouse;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class ClickHouseDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "ClickHouseDatasourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.CLICKHOUSE;
}
}

131
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/clickhouse/ClickHouseDatasourceProcessor.java

@ -0,0 +1,131 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.clickhouse;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_CLICKHOUSE_JDBC_DRIVER;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_CLICKHOUSE;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class ClickHouseDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
ClickhouseConnectionParam connectionParams = (ClickhouseConnectionParam) createConnectionParams(connectionJson);
ClickHouseDatasourceParamDTO clickHouseDatasourceParamDTO = new ClickHouseDatasourceParamDTO();
clickHouseDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
clickHouseDatasourceParamDTO.setUserName(connectionParams.getUser());
clickHouseDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA);
clickHouseDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
clickHouseDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
return clickHouseDatasourceParamDTO;
}
@Override
public ConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
ClickHouseDatasourceParamDTO clickHouseParam = (ClickHouseDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", JDBC_CLICKHOUSE, clickHouseParam.getHost(), clickHouseParam.getPort());
String jdbcUrl = address + "/" + clickHouseParam.getDatabase();
ClickhouseConnectionParam clickhouseConnectionParam = new ClickhouseConnectionParam();
clickhouseConnectionParam.setDatabase(clickHouseParam.getDatabase());
clickhouseConnectionParam.setAddress(address);
clickhouseConnectionParam.setJdbcUrl(jdbcUrl);
clickhouseConnectionParam.setUser(clickHouseParam.getUserName());
clickhouseConnectionParam.setPassword(encodePassword(clickHouseParam.getPassword()));
clickhouseConnectionParam.setOther(transformOther(clickHouseParam.getOther()));
return clickhouseConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, ClickhouseConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return COM_CLICKHOUSE_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam;
String jdbcUrl = clickhouseConnectionParam.getJdbcUrl();
if (StringUtils.isNotEmpty(clickhouseConnectionParam.getOther())) {
jdbcUrl = String.format("%s?%s", jdbcUrl, clickhouseConnectionParam.getOther());
}
return jdbcUrl;
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(clickhouseConnectionParam),
clickhouseConnectionParam.getUser(), decodePassword(clickhouseConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.CLICKHOUSE;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s%s", key, value, "&")));
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (other == null) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split("&");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

34
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/clickhouse/ClickhouseConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.clickhouse;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class ClickhouseConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "ClickhouseConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

34
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/db2/Db2ConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.db2;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class Db2ConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "Db2ConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

43
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/db2/Db2DatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.db2;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class Db2DatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "Db2DatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.DB2;
}
}

132
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/db2/Db2DatasourceProcessor.java

@ -0,0 +1,132 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.db2;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_DB2_JDBC_DRIVER;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_DB2;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class Db2DatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
Db2ConnectionParam connectionParams = (Db2ConnectionParam) createConnectionParams(connectionJson);
Db2DatasourceParamDTO db2DatasourceParamDTO = new Db2DatasourceParamDTO();
db2DatasourceParamDTO.setDatabase(connectionParams.getDatabase());
db2DatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
db2DatasourceParamDTO.setUserName(db2DatasourceParamDTO.getUserName());
String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA);
db2DatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
db2DatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
return db2DatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
Db2DatasourceParamDTO db2Param = (Db2DatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", JDBC_DB2, db2Param.getHost(), db2Param.getPort());
String jdbcUrl = String.format("%s/%s", address, db2Param.getDatabase());
Db2ConnectionParam db2ConnectionParam = new Db2ConnectionParam();
db2ConnectionParam.setAddress(address);
db2ConnectionParam.setDatabase(db2Param.getDatabase());
db2ConnectionParam.setJdbcUrl(jdbcUrl);
db2ConnectionParam.setUser(db2Param.getUserName());
db2ConnectionParam.setPassword(encodePassword(db2Param.getPassword()));
db2ConnectionParam.setOther(transformOther(db2Param.getOther()));
return db2ConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, Db2ConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return COM_DB2_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(db2ConnectionParam.getOther())) {
return String.format("%s;%s", db2ConnectionParam.getJdbcUrl(), db2ConnectionParam.getOther());
}
return db2ConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(db2ConnectionParam),
db2ConnectionParam.getUser(), decodePassword(db2ConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.DB2;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s%s", key, value, ";")));
stringBuilder.deleteCharAt(stringBuilder.length() - 1);
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (other == null) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split("&")) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

38
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/hive/HiveConnectionParam.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.hive;
import org.apache.dolphinscheduler.spi.task.datasource.BaseHdfsConnectionParam;
public class HiveConnectionParam extends BaseHdfsConnectionParam {
@Override
public String toString() {
return "HiveConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ ", principal='" + principal + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
}

45
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/hive/HiveDataSourceParamDTO.java

@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.hive;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseHdfsDatasourceParamDTO;
public class HiveDataSourceParamDTO extends BaseHdfsDatasourceParamDTO {
@Override
public String toString() {
return "HiveDataSourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", principal='" + principal + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.HIVE;
}
}

192
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/hive/HiveDatasourceProcessor.java

@ -0,0 +1,192 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.hive;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_HIVE_2;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.HiveConfUtils;
import org.apache.dolphinscheduler.spi.utils.CommonUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class HiveDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO();
HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) createConnectionParams(connectionJson);
hiveDataSourceParamDTO.setDatabase(hiveConnectionParam.getDatabase());
hiveDataSourceParamDTO.setUserName(hiveConnectionParam.getUser());
hiveDataSourceParamDTO.setOther(parseOther(hiveConnectionParam.getOther()));
hiveDataSourceParamDTO.setLoginUserKeytabUsername(hiveConnectionParam.getLoginUserKeytabUsername());
hiveDataSourceParamDTO.setLoginUserKeytabPath(hiveConnectionParam.getLoginUserKeytabPath());
hiveDataSourceParamDTO.setJavaSecurityKrb5Conf(hiveConnectionParam.getJavaSecurityKrb5Conf());
String[] tmpArray = hiveConnectionParam.getAddress().split(DOUBLE_SLASH);
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = tmpArray[tmpArray.length - 1].split(COMMA);
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(COLON)[0]).append(COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
hiveDataSourceParamDTO.setHost(hosts.toString());
hiveDataSourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
return hiveDataSourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
HiveDataSourceParamDTO hiveParam = (HiveDataSourceParamDTO) datasourceParam;
StringBuilder address = new StringBuilder();
address.append(JDBC_HIVE_2);
for (String zkHost : hiveParam.getHost().split(",")) {
address.append(String.format("%s:%s,", zkHost, hiveParam.getPort()));
}
address.deleteCharAt(address.length() - 1);
String jdbcUrl = address.toString() + "/" + hiveParam.getDatabase();
if (CommonUtils.getKerberosStartupState()) {
jdbcUrl += ";principal=" + hiveParam.getPrincipal();
}
HiveConnectionParam hiveConnectionParam = new HiveConnectionParam();
hiveConnectionParam.setDatabase(hiveParam.getDatabase());
hiveConnectionParam.setAddress(address.toString());
hiveConnectionParam.setJdbcUrl(jdbcUrl);
hiveConnectionParam.setUser(hiveParam.getUserName());
hiveConnectionParam.setPassword(encodePassword(hiveParam.getPassword()));
if (CommonUtils.getKerberosStartupState()) {
hiveConnectionParam.setPrincipal(hiveParam.getPrincipal());
hiveConnectionParam.setJavaSecurityKrb5Conf(hiveParam.getJavaSecurityKrb5Conf());
hiveConnectionParam.setLoginUserKeytabPath(hiveParam.getLoginUserKeytabPath());
hiveConnectionParam.setLoginUserKeytabUsername(hiveParam.getLoginUserKeytabUsername());
}
hiveConnectionParam.setOther(transformOther(hiveParam.getOther()));
return hiveConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, HiveConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam;
String jdbcUrl = hiveConnectionParam.getJdbcUrl();
String otherParams = filterOther(hiveConnectionParam.getOther());
if (StringUtils.isNotEmpty(otherParams) && !"?".equals(otherParams.substring(0, 1))) {
jdbcUrl += ";";
}
return jdbcUrl + otherParams;
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws IOException, ClassNotFoundException, SQLException {
HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam;
CommonUtils.loadKerberosConf(hiveConnectionParam.getJavaSecurityKrb5Conf(),
hiveConnectionParam.getLoginUserKeytabUsername(), hiveConnectionParam.getLoginUserKeytabPath());
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam),
hiveConnectionParam.getUser(), decodePassword(hiveConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.HIVE;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s;", key, value)));
return stringBuilder.toString();
}
private String filterOther(String otherParams) {
if (StringUtils.isBlank(otherParams)) {
return "";
}
StringBuilder hiveConfListSb = new StringBuilder();
hiveConfListSb.append("?");
StringBuilder sessionVarListSb = new StringBuilder();
String[] otherArray = otherParams.split(";", -1);
for (String conf : otherArray) {
if (HiveConfUtils.isHiveConfVar(conf)) {
hiveConfListSb.append(conf).append(";");
} else {
sessionVarListSb.append(conf).append(";");
}
}
// remove the last ";"
if (sessionVarListSb.length() > 0) {
sessionVarListSb.deleteCharAt(sessionVarListSb.length() - 1);
}
if (hiveConfListSb.length() > 0) {
hiveConfListSb.deleteCharAt(hiveConfListSb.length() - 1);
}
return sessionVarListSb.toString() + hiveConfListSb.toString();
}
private Map<String, String> parseOther(String other) {
if (other == null) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split(";");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

35
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/mysql/MysqlConnectionParam.java

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.mysql;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class MysqlConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "MysqlConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

43
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/mysql/MysqlDatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.mysql;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class MysqlDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "MysqlDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.MYSQL;
}
}

176
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/mysql/MysqlDatasourceProcessor.java

@ -0,0 +1,176 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.mysql;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_MYSQL_JDBC_DRIVER;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_MYSQL;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MysqlDatasourceProcessor extends AbstractDatasourceProcessor {
private final Logger logger = LoggerFactory.getLogger(MysqlDatasourceProcessor.class);
private static final String ALLOW_LOAD_LOCAL_IN_FILE_NAME = "allowLoadLocalInfile";
private static final String AUTO_DESERIALIZE = "autoDeserialize";
private static final String ALLOW_LOCAL_IN_FILE_NAME = "allowLocalInfile";
private static final String ALLOW_URL_IN_LOCAL_IN_FILE_NAME = "allowUrlInLocalInfile";
private static final String APPEND_PARAMS = "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false";
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
MysqlConnectionParam connectionParams = (MysqlConnectionParam) createConnectionParams(connectionJson);
MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
mysqlDatasourceParamDTO.setUserName(connectionParams.getUser());
mysqlDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
mysqlDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String address = connectionParams.getAddress();
String[] hostSeperator = address.split(DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA);
mysqlDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
mysqlDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
return mysqlDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO dataSourceParam) {
MysqlDatasourceParamDTO mysqlDatasourceParam = (MysqlDatasourceParamDTO) dataSourceParam;
String address = String.format("%s%s:%s", JDBC_MYSQL, mysqlDatasourceParam.getHost(), mysqlDatasourceParam.getPort());
String jdbcUrl = String.format("%s/%s", address, mysqlDatasourceParam.getDatabase());
MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam();
mysqlConnectionParam.setJdbcUrl(jdbcUrl);
mysqlConnectionParam.setDatabase(mysqlDatasourceParam.getDatabase());
mysqlConnectionParam.setAddress(address);
mysqlConnectionParam.setUser(mysqlDatasourceParam.getUserName());
mysqlConnectionParam.setPassword(encodePassword(mysqlDatasourceParam.getPassword()));
mysqlConnectionParam.setOther(transformOther(mysqlDatasourceParam.getOther()));
return mysqlConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, MysqlConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return COM_MYSQL_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
MysqlConnectionParam mysqlConnectionParam = (MysqlConnectionParam) connectionParam;
String jdbcUrl = mysqlConnectionParam.getJdbcUrl();
if (StringUtils.isNotEmpty(mysqlConnectionParam.getOther())) {
return String.format("%s?%s&%s", jdbcUrl, mysqlConnectionParam.getOther(), APPEND_PARAMS);
}
return String.format("%s?%s", jdbcUrl, APPEND_PARAMS);
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
MysqlConnectionParam mysqlConnectionParam = (MysqlConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
String user = mysqlConnectionParam.getUser();
if (user.contains(AUTO_DESERIALIZE)) {
logger.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE);
user = user.replace(AUTO_DESERIALIZE, "");
}
String password = decodePassword(mysqlConnectionParam.getPassword());
if (password.contains(AUTO_DESERIALIZE)) {
logger.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE);
password = password.replace(AUTO_DESERIALIZE, "");
}
return DriverManager.getConnection(getJdbcUrl(connectionParam), user, password);
}
@Override
public DbType getDbType() {
return DbType.MYSQL;
}
private String transformOther(Map<String, String> paramMap) {
if (MapUtils.isEmpty(paramMap)) {
return null;
}
Map<String, String> otherMap = new HashMap<>();
paramMap.forEach((k, v) -> {
if (!checkKeyIsLegitimate(k)) {
return;
}
otherMap.put(k, v);
});
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s&", key, value)));
return stringBuilder.toString();
}
private static boolean checkKeyIsLegitimate(String key) {
return !key.contains(ALLOW_LOAD_LOCAL_IN_FILE_NAME)
&& !key.contains(AUTO_DESERIALIZE)
&& !key.contains(ALLOW_LOCAL_IN_FILE_NAME)
&& !key.contains(ALLOW_URL_IN_LOCAL_IN_FILE_NAME);
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split("&")) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

47
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/oracle/OracleConnectionParam.java

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.oracle;
import org.apache.dolphinscheduler.spi.enums.DbConnectType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class OracleConnectionParam extends BaseConnectionParam {
protected DbConnectType connectType;
public DbConnectType getConnectType() {
return connectType;
}
public void setConnectType(DbConnectType connectType) {
this.connectType = connectType;
}
@Override
public String toString() {
return "OracleConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ ", connectType=" + connectType
+ '}';
}
}

55
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/oracle/OracleDatasourceParamDTO.java

@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.oracle;
import org.apache.dolphinscheduler.spi.enums.DbConnectType;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class OracleDatasourceParamDTO extends BaseDataSourceParamDTO {
private DbConnectType connectType;
public DbConnectType getConnectType() {
return connectType;
}
public void setConnectType(DbConnectType connectType) {
this.connectType = connectType;
}
@Override
public String toString() {
return "OracleDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", connectType=" + connectType
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.ORACLE;
}
}

149
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/oracle/OracleDatasourceProcessor.java

@ -0,0 +1,149 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.oracle;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.AT_SIGN;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_ORACLE_JDBC_DRIVER;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_ORACLE_SERVICE_NAME;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_ORACLE_SID;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbConnectType;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class OracleDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
OracleConnectionParam connectionParams = (OracleConnectionParam) createConnectionParams(connectionJson);
OracleDatasourceParamDTO oracleDatasourceParamDTO = new OracleDatasourceParamDTO();
oracleDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
oracleDatasourceParamDTO.setUserName(connectionParams.getUser());
oracleDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String hostSeperator = DOUBLE_SLASH;
if (DbConnectType.ORACLE_SID.equals(connectionParams.connectType)) {
hostSeperator = AT_SIGN;
}
String[] hostPort = connectionParams.getAddress().split(hostSeperator);
String[] hostPortArray = hostPort[hostPort.length - 1].split(COMMA);
oracleDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
oracleDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
return oracleDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
OracleDatasourceParamDTO oracleParam = (OracleDatasourceParamDTO) datasourceParam;
String address;
if (DbConnectType.ORACLE_SID.equals(oracleParam.getConnectType())) {
address = String.format("%s%s:%s",
JDBC_ORACLE_SID, oracleParam.getHost(), oracleParam.getPort());
} else {
address = String.format("%s%s:%s",
JDBC_ORACLE_SERVICE_NAME, oracleParam.getHost(), oracleParam.getPort());
}
String jdbcUrl = address + "/" + oracleParam.getDatabase();
OracleConnectionParam oracleConnectionParam = new OracleConnectionParam();
oracleConnectionParam.setUser(oracleParam.getUserName());
oracleConnectionParam.setPassword(encodePassword(oracleParam.getPassword()));
oracleConnectionParam.setAddress(address);
oracleConnectionParam.setJdbcUrl(jdbcUrl);
oracleConnectionParam.setDatabase(oracleParam.getDatabase());
oracleConnectionParam.setConnectType(oracleParam.getConnectType());
oracleConnectionParam.setOther(transformOther(oracleParam.getOther()));
return oracleConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, OracleConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return COM_ORACLE_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(oracleConnectionParam.getOther())) {
return String.format("%s?%s", oracleConnectionParam.getJdbcUrl(), oracleConnectionParam.getOther());
}
return oracleConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam),
oracleConnectionParam.getUser(), decodePassword(oracleConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.ORACLE;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
List<String> list = new ArrayList<>();
otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value)));
return String.join("&", list);
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split("&");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

34
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/postgresql/PostgreSqlConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.postgresql;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class PostgreSqlConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "PostgreSqlConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

41
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/postgresql/PostgreSqlDatasourceParamDTO.java

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.postgresql;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class PostgreSqlDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "PostgreSqlDatasourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.POSTGRESQL;
}
}

132
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/postgresql/PostgreSqlDatasourceProcessor.java

@ -0,0 +1,132 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.postgresql;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_POSTGRESQL;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.ORG_POSTGRESQL_DRIVER;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class PostgreSqlDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
PostgreSqlConnectionParam connectionParams = (PostgreSqlConnectionParam) createConnectionParams(connectionJson);
PostgreSqlDatasourceParamDTO postgreSqlDatasourceParamDTO = new PostgreSqlDatasourceParamDTO();
postgreSqlDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
postgreSqlDatasourceParamDTO.setUserName(connectionParams.getUser());
postgreSqlDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String address = connectionParams.getAddress();
String[] hostSeperator = address.split(DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA);
postgreSqlDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
postgreSqlDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
return postgreSqlDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
PostgreSqlDatasourceParamDTO postgreSqlParam = (PostgreSqlDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", JDBC_POSTGRESQL, postgreSqlParam.getHost(), postgreSqlParam.getPort());
String jdbcUrl = String.format("%s/%s", address, postgreSqlParam.getDatabase());
PostgreSqlConnectionParam postgreSqlConnectionParam = new PostgreSqlConnectionParam();
postgreSqlConnectionParam.setJdbcUrl(jdbcUrl);
postgreSqlConnectionParam.setAddress(address);
postgreSqlConnectionParam.setDatabase(postgreSqlParam.getDatabase());
postgreSqlConnectionParam.setUser(postgreSqlParam.getUserName());
postgreSqlConnectionParam.setPassword(encodePassword(postgreSqlParam.getPassword()));
postgreSqlConnectionParam.setOther(transformOther(postgreSqlParam.getOther()));
return postgreSqlConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, PostgreSqlConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return ORG_POSTGRESQL_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(postgreSqlConnectionParam.getOther())) {
return String.format("%s?%s", postgreSqlConnectionParam.getJdbcUrl(), postgreSqlConnectionParam.getOther());
}
return postgreSqlConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(postgreSqlConnectionParam),
postgreSqlConnectionParam.getUser(), decodePassword(postgreSqlConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.POSTGRESQL;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s&", key, value)));
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split("&")) {
String[] split = config.split("=");
otherMap.put(split[0], split[1]);
}
return otherMap;
}
}

34
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/presto/PrestoConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.presto;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class PrestoConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "PrestoConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

43
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/presto/PrestoDatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.presto;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class PrestoDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "PrestoDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.PRESTO;
}
}

134
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/presto/PrestoDatasourceProcessor.java

@ -0,0 +1,134 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.presto;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_PRESTO_JDBC_DRIVER;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_PRESTO;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class PrestoDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
PrestoConnectionParam connectionParams = (PrestoConnectionParam) createConnectionParams(connectionJson);
String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA);
PrestoDatasourceParamDTO prestoDatasourceParamDTO = new PrestoDatasourceParamDTO();
prestoDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
prestoDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
prestoDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
prestoDatasourceParamDTO.setUserName(connectionParams.getUser());
prestoDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
return prestoDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
PrestoDatasourceParamDTO prestoParam = (PrestoDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", JDBC_PRESTO, prestoParam.getHost(), prestoParam.getPort());
String jdbcUrl = address + "/" + prestoParam.getDatabase();
PrestoConnectionParam prestoConnectionParam = new PrestoConnectionParam();
prestoConnectionParam.setUser(prestoParam.getUserName());
prestoConnectionParam.setPassword(encodePassword(prestoParam.getPassword()));
prestoConnectionParam.setOther(transformOther(prestoParam.getOther()));
prestoConnectionParam.setAddress(address);
prestoConnectionParam.setJdbcUrl(jdbcUrl);
prestoConnectionParam.setDatabase(prestoParam.getDatabase());
return prestoConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, PrestoConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return COM_PRESTO_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(prestoConnectionParam.getOther())) {
return String.format("%s?%s", prestoConnectionParam.getJdbcUrl(), prestoConnectionParam.getOther());
}
return prestoConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam),
prestoConnectionParam.getUser(), decodePassword(prestoConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.PRESTO;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isNotEmpty(otherMap)) {
List<String> list = new ArrayList<>();
otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value)));
return String.join("&", list);
}
return null;
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split("&");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

38
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/spark/SparkConnectionParam.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.spark;
import org.apache.dolphinscheduler.spi.task.datasource.BaseHdfsConnectionParam;
public class SparkConnectionParam extends BaseHdfsConnectionParam {
@Override
public String toString() {
return "SparkConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ ", principal='" + principal + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
}

45
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/spark/SparkDatasourceParamDTO.java

@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.spark;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseHdfsDatasourceParamDTO;
public class SparkDatasourceParamDTO extends BaseHdfsDatasourceParamDTO {
@Override
public String toString() {
return "SparkDatasourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", principal='" + principal + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.SPARK;
}
}

161
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/spark/SparkDatasourceProcessor.java

@ -0,0 +1,161 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.spark;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_HIVE_2;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.CommonUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class SparkDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
SparkConnectionParam connectionParams = (SparkConnectionParam) createConnectionParams(connectionJson);
SparkDatasourceParamDTO sparkDatasourceParamDTO = new SparkDatasourceParamDTO();
sparkDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
sparkDatasourceParamDTO.setUserName(connectionParams.getUser());
sparkDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
sparkDatasourceParamDTO.setJavaSecurityKrb5Conf(connectionParams.getJavaSecurityKrb5Conf());
sparkDatasourceParamDTO.setLoginUserKeytabPath(connectionParams.getLoginUserKeytabPath());
sparkDatasourceParamDTO.setLoginUserKeytabUsername(connectionParams.getLoginUserKeytabUsername());
StringBuilder hosts = new StringBuilder();
String[] tmpArray = connectionParams.getAddress().split(DOUBLE_SLASH);
String[] hostPortArray = tmpArray[tmpArray.length - 1].split(COMMA);
Arrays.stream(hostPortArray).forEach(hostPort -> hosts.append(hostPort.split(COLON)[0]).append(COMMA));
hosts.deleteCharAt(hosts.length() - 1);
sparkDatasourceParamDTO.setHost(hosts.toString());
sparkDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
return sparkDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO dataSourceParam) {
StringBuilder address = new StringBuilder();
SparkDatasourceParamDTO sparkDatasourceParam = (SparkDatasourceParamDTO) dataSourceParam;
address.append(JDBC_HIVE_2);
for (String zkHost : sparkDatasourceParam.getHost().split(",")) {
address.append(String.format("%s:%s,", zkHost, sparkDatasourceParam.getPort()));
}
address.deleteCharAt(address.length() - 1);
String jdbcUrl = address + "/" + sparkDatasourceParam.getDatabase();
if (CommonUtils.getKerberosStartupState()) {
jdbcUrl += ";principal=" + sparkDatasourceParam.getPrincipal();
}
SparkConnectionParam sparkConnectionParam = new SparkConnectionParam();
sparkConnectionParam.setPassword(encodePassword(sparkDatasourceParam.getPassword()));
sparkConnectionParam.setUser(sparkDatasourceParam.getUserName());
sparkConnectionParam.setOther(transformOther(sparkDatasourceParam.getOther()));
sparkConnectionParam.setDatabase(sparkDatasourceParam.getDatabase());
sparkConnectionParam.setAddress(address.toString());
sparkConnectionParam.setJdbcUrl(jdbcUrl);
if (CommonUtils.getKerberosStartupState()) {
sparkConnectionParam.setPrincipal(sparkDatasourceParam.getPrincipal());
sparkConnectionParam.setJavaSecurityKrb5Conf(sparkDatasourceParam.getJavaSecurityKrb5Conf());
sparkConnectionParam.setLoginUserKeytabPath(sparkDatasourceParam.getLoginUserKeytabPath());
sparkConnectionParam.setLoginUserKeytabUsername(sparkDatasourceParam.getLoginUserKeytabUsername());
}
return sparkConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, SparkConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(sparkConnectionParam.getOther())) {
return String.format("%s;%s", sparkConnectionParam.getJdbcUrl(), sparkConnectionParam.getOther());
}
return sparkConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws IOException, ClassNotFoundException, SQLException {
SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam;
CommonUtils.loadKerberosConf(sparkConnectionParam.getJavaSecurityKrb5Conf(),
sparkConnectionParam.getLoginUserKeytabUsername(), sparkConnectionParam.getLoginUserKeytabPath());
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(sparkConnectionParam),
sparkConnectionParam.getUser(), decodePassword(sparkConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.SPARK;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
List<String> stringBuilder = otherMap.entrySet().stream()
.map(entry -> String.format("%s=%s", entry.getKey(), entry.getValue())).collect(Collectors.toList());
return String.join(";", stringBuilder);
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split(";");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

34
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/sqlserver/SqlServerConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.sqlserver;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
public class SqlServerConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "SqlServerConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

43
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/sqlserver/SqlServerDatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.sqlserver;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
public class SqlServerDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "SqlServerDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.SQLSERVER;
}
}

129
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/datasource/sqlserver/SqlServerDatasourceProcessor.java

@ -0,0 +1,129 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.datasource.sqlserver;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_SQLSERVER_JDBC_DRIVER;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_SQLSERVER;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.encodePassword;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.task.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.task.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class SqlServerDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
SqlServerConnectionParam connectionParams = (SqlServerConnectionParam) createConnectionParams(connectionJson);
String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA);
SqlServerDatasourceParamDTO sqlServerDatasourceParamDTO = new SqlServerDatasourceParamDTO();
sqlServerDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
sqlServerDatasourceParamDTO.setUserName(connectionParams.getUser());
sqlServerDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
sqlServerDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1]));
sqlServerDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]);
return sqlServerDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
SqlServerDatasourceParamDTO sqlServerParam = (SqlServerDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", JDBC_SQLSERVER, sqlServerParam.getHost(), sqlServerParam.getPort());
String jdbcUrl = address + ";databaseName=" + sqlServerParam.getDatabase();
SqlServerConnectionParam sqlServerConnectionParam = new SqlServerConnectionParam();
sqlServerConnectionParam.setAddress(address);
sqlServerConnectionParam.setDatabase(sqlServerParam.getDatabase());
sqlServerConnectionParam.setJdbcUrl(jdbcUrl);
sqlServerConnectionParam.setOther(transformOther(sqlServerParam.getOther()));
sqlServerConnectionParam.setUser(sqlServerParam.getUserName());
sqlServerConnectionParam.setPassword(encodePassword(sqlServerParam.getPassword()));
return sqlServerConnectionParam;
}
@Override
public BaseConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, SqlServerConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return COM_SQLSERVER_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(sqlServerConnectionParam.getOther())) {
return String.format("%s;%s", sqlServerConnectionParam.getJdbcUrl(), sqlServerConnectionParam.getOther());
}
return sqlServerConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam), sqlServerConnectionParam.getUser(),
decodePassword(sqlServerConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.SQLSERVER;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s;", key, value)));
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split(";")) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

78
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/BusinessTimeUtils.java

@ -0,0 +1,78 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.paramparser;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_BUSINESS_DATE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_CURRENT_DATE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_DATETIME;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_FORMAT_DATE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_FORMAT_TIME;
import static org.apache.dolphinscheduler.spi.utils.DateUtils.addDays;
import static org.apache.dolphinscheduler.spi.utils.DateUtils.format;
import org.apache.dolphinscheduler.spi.enums.CommandType;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* business time utils
*/
public class BusinessTimeUtils {
private BusinessTimeUtils() {
throw new IllegalStateException("BusinessTimeUtils class");
}
/**
* get business time in parameters by different command types
*
* @param commandType command type
* @param runTime run time or schedule time
* @return business time
*/
public static Map<String, String> getBusinessTime(CommandType commandType, Date runTime) {
Date businessDate = runTime;
switch (commandType) {
case COMPLEMENT_DATA:
break;
case START_PROCESS:
case START_CURRENT_TASK_PROCESS:
case RECOVER_TOLERANCE_FAULT_PROCESS:
case RECOVER_SUSPENDED_PROCESS:
case START_FAILURE_TASK_PROCESS:
case REPEAT_RUNNING:
case SCHEDULER:
default:
businessDate = addDays(new Date(), -1);
if (runTime != null) {
/**
* If there is a scheduled time, take the scheduling time. Recovery from failed nodes, suspension of recovery, re-run for scheduling
*/
businessDate = addDays(runTime, -1);
}
break;
}
Date businessCurrentDate = addDays(businessDate, 1);
Map<String, String> result = new HashMap<>();
result.put(PARAMETER_CURRENT_DATE, format(businessCurrentDate, PARAMETER_FORMAT_DATE));
result.put(PARAMETER_BUSINESS_DATE, format(businessDate, PARAMETER_FORMAT_DATE));
result.put(PARAMETER_DATETIME, format(businessCurrentDate, PARAMETER_FORMAT_TIME));
return result;
}
}

158
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParamUtils.java

@ -0,0 +1,158 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.paramparser;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_TASK_EXECUTE_PATH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_TASK_INSTANCE_ID;
import org.apache.dolphinscheduler.spi.enums.CommandType;
import org.apache.dolphinscheduler.spi.enums.DataType;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.Direct;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import com.google.common.base.Preconditions;
/**
* param utils
*/
public class ParamUtils {
/**
* parameter conversion
* Warning:
* When you first invoke the function of convert, the variables of localParams and varPool in the ShellParameters will be modified.
* But in the whole system the variables of localParams and varPool have been used in other functions. I'm not sure if this current
* situation is wrong. So I cannot modify the original logic.
*
* @param taskExecutionContext the context of this task instance
* @param parameters the parameters
* @return global params
*
*/
public static Map<String, Property> convert(TaskRequest taskExecutionContext, AbstractParameters parameters) {
Preconditions.checkNotNull(taskExecutionContext);
Preconditions.checkNotNull(parameters);
Map<String, Property> globalParams = getUserDefParamsMap(taskExecutionContext.getDefinedParams());
Map<String,String> globalParamsMap = taskExecutionContext.getDefinedParams();
CommandType commandType = CommandType.of(taskExecutionContext.getCmdTypeIfComplement());
Date scheduleTime = taskExecutionContext.getScheduleTime();
// combining local and global parameters
Map<String, Property> localParams = parameters.getLocalParametersMap();
Map<String, Property> varParams = parameters.getVarPoolMap();
if (globalParams == null && localParams == null) {
return null;
}
// if it is a complement,
// you need to pass in the task instance id to locate the time
// of the process instance complement
Map<String,String> params = BusinessTimeUtils
.getBusinessTime(commandType,
scheduleTime);
if (globalParamsMap != null) {
params.putAll(globalParamsMap);
}
if (StringUtils.isNotBlank(taskExecutionContext.getExecutePath())) {
params.put(PARAMETER_TASK_EXECUTE_PATH, taskExecutionContext.getExecutePath());
}
params.put(PARAMETER_TASK_INSTANCE_ID, Integer.toString(taskExecutionContext.getTaskInstanceId()));
if (globalParams != null && localParams != null) {
globalParams.putAll(localParams);
} else if (globalParams == null && localParams != null) {
globalParams = localParams;
}
if (varParams != null) {
varParams.putAll(globalParams);
globalParams = varParams;
}
Iterator<Map.Entry<String, Property>> iter = globalParams.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Property> en = iter.next();
Property property = en.getValue();
if (StringUtils.isNotEmpty(property.getValue())
&& property.getValue().startsWith("$")) {
/**
* local parameter refers to global parameter with the same name
* note: the global parameters of the process instance here are solidified parameters,
* and there are no variables in them.
*/
String val = property.getValue();
val = ParameterUtils.convertParameterPlaceholders(val, params);
property.setValue(val);
}
}
return globalParams;
}
/**
* format convert
*
* @param paramsMap params map
* @return Map of converted
*/
public static Map<String,String> convert(Map<String,Property> paramsMap) {
if (paramsMap == null) {
return null;
}
Map<String, String> map = new HashMap<>();
Iterator<Map.Entry<String, Property>> iter = paramsMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Property> en = iter.next();
map.put(en.getKey(), en.getValue().getValue());
}
return map;
}
/**
* get parameters map
*
* @param definedParams definedParams
* @return parameters map
*/
public static Map<String, Property> getUserDefParamsMap(Map<String, String> definedParams) {
if (definedParams != null) {
Map<String, Property> userDefParamsMaps = new HashMap<>();
Iterator<Map.Entry<String, String>> iter = definedParams.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, String> en = iter.next();
Property property = new Property(en.getKey(), Direct.IN, DataType.VARCHAR, en.getValue());
userDefParamsMaps.put(property.getProp(),property);
}
return userDefParamsMaps;
}
return null;
}
}

269
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParameterUtils.java

@ -0,0 +1,269 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.paramparser;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_DATETIME;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_FORMAT_TIME;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_SHECDULE_TIME;
import org.apache.dolphinscheduler.spi.enums.CommandType;
import org.apache.dolphinscheduler.spi.enums.DataType;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.utils.DateUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.sql.PreparedStatement;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* parameter parse utils
*/
public class ParameterUtils {
private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class);
private static final String DATE_PARSE_PATTERN = "\\$\\[([^\\$\\]]+)]";
private static final String DATE_START_PATTERN = "^[0-9]";
private ParameterUtils() {
throw new UnsupportedOperationException("Construct ParameterUtils");
}
/**
* convert parameters place holders
*
* @param parameterString parameter
* @param parameterMap parameter map
* @return convert parameters place holders
*/
public static String convertParameterPlaceholders(String parameterString, Map<String, String> parameterMap) {
if (StringUtils.isEmpty(parameterString)) {
return parameterString;
}
Date cronTime;
if (parameterMap != null && !parameterMap.isEmpty()) {
// replace variable ${} form,refers to the replacement of system variables and custom variables
parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true);
}
if (parameterMap != null && null != parameterMap.get(PARAMETER_DATETIME)) {
//Get current time, schedule execute time
String cronTimeStr = parameterMap.get(PARAMETER_DATETIME);
cronTime = DateUtils.parse(cronTimeStr, PARAMETER_FORMAT_TIME);
} else {
cronTime = new Date();
}
// replace time $[...] form, eg. $[yyyyMMdd]
if (cronTime != null) {
return dateTemplateParse(parameterString, cronTime);
}
return parameterString;
}
/**
* new
* convert parameters place holders
*
* @param parameterString parameter
* @param parameterMap parameter map
* @return convert parameters place holders
*/
public static String convertParameterPlaceholders2(String parameterString, Map<String, String> parameterMap) {
if (StringUtils.isEmpty(parameterString)) {
return parameterString;
}
//Get current time, schedule execute time
String cronTimeStr = parameterMap.get(PARAMETER_SHECDULE_TIME);
Date cronTime = null;
if (StringUtils.isNotEmpty(cronTimeStr)) {
cronTime = DateUtils.parse(cronTimeStr, PARAMETER_FORMAT_TIME);
} else {
cronTime = new Date();
}
// replace variable ${} form,refers to the replacement of system variables and custom variables
if (!parameterMap.isEmpty()) {
parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true);
}
// replace time $[...] form, eg. $[yyyyMMdd]
if (cronTime != null) {
return dateTemplateParse(parameterString, cronTime);
}
return parameterString;
}
/**
* set in parameter
*
* @param index index
* @param stmt preparedstatement
* @param dataType data type
* @param value value
* @throws Exception errors
*/
public static void setInParameter(int index, PreparedStatement stmt, DataType dataType, String value) throws Exception {
if (dataType.equals(DataType.VARCHAR)) {
stmt.setString(index, value);
} else if (dataType.equals(DataType.INTEGER)) {
stmt.setInt(index, Integer.parseInt(value));
} else if (dataType.equals(DataType.LONG)) {
stmt.setLong(index, Long.parseLong(value));
} else if (dataType.equals(DataType.FLOAT)) {
stmt.setFloat(index, Float.parseFloat(value));
} else if (dataType.equals(DataType.DOUBLE)) {
stmt.setDouble(index, Double.parseDouble(value));
} else if (dataType.equals(DataType.DATE)) {
stmt.setDate(index, java.sql.Date.valueOf(value));
} else if (dataType.equals(DataType.TIME)) {
stmt.setString(index, value);
} else if (dataType.equals(DataType.TIMESTAMP)) {
stmt.setTimestamp(index, java.sql.Timestamp.valueOf(value));
} else if (dataType.equals(DataType.BOOLEAN)) {
stmt.setBoolean(index, Boolean.parseBoolean(value));
}
}
/**
* curing user define parameters
*
* @param globalParamMap global param map
* @param globalParamList global param list
* @param commandType command type
* @param scheduleTime schedule time
* @return curing user define parameters
*/
public static String curingGlobalParams(Map<String, String> globalParamMap, List<Property> globalParamList,
CommandType commandType, Date scheduleTime) {
if (globalParamList == null || globalParamList.isEmpty()) {
return null;
}
Map<String, String> globalMap = new HashMap<>();
if (globalParamMap != null) {
globalMap.putAll(globalParamMap);
}
Map<String, String> allParamMap = new HashMap<>();
//If it is a complement, a complement time needs to be passed in, according to the task type
Map<String, String> timeParams = BusinessTimeUtils
.getBusinessTime(commandType, scheduleTime);
if (timeParams != null) {
allParamMap.putAll(timeParams);
}
allParamMap.putAll(globalMap);
Set<Map.Entry<String, String>> entries = allParamMap.entrySet();
Map<String, String> resolveMap = new HashMap<>();
for (Map.Entry<String, String> entry : entries) {
String val = entry.getValue();
if (val.startsWith("$")) {
String str = ParameterUtils.convertParameterPlaceholders(val, allParamMap);
resolveMap.put(entry.getKey(), str);
}
}
globalMap.putAll(resolveMap);
for (Property property : globalParamList) {
String val = globalMap.get(property.getProp());
if (val != null) {
property.setValue(val);
}
}
return JSONUtils.toJsonString(globalParamList);
}
/**
* $[yyyyMMdd] replace schedule time
*/
public static String replaceScheduleTime(String text, Date scheduleTime) {
Map<String, Property> paramsMap = new HashMap<>();
//if getScheduleTime null ,is current date
if (null == scheduleTime) {
scheduleTime = new Date();
}
String dateTime = DateUtils.format(scheduleTime, PARAMETER_FORMAT_TIME);
Property p = new Property();
p.setValue(dateTime);
p.setProp(PARAMETER_SHECDULE_TIME);
paramsMap.put(PARAMETER_SHECDULE_TIME, p);
text = ParameterUtils.convertParameterPlaceholders2(text, convert(paramsMap));
return text;
}
/**
* format convert
*
* @param paramsMap params map
* @return Map of converted
* see org.apache.dolphinscheduler.server.utils.ParamUtils.convert
*/
public static Map<String, String> convert(Map<String, Property> paramsMap) {
Map<String, String> map = new HashMap<>();
Iterator<Map.Entry<String, Property>> iter = paramsMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Property> en = iter.next();
map.put(en.getKey(), en.getValue().getValue());
}
return map;
}
private static String dateTemplateParse(String templateStr, Date date) {
if (templateStr == null) {
return null;
}
Pattern pattern = Pattern.compile(DATE_PARSE_PATTERN);
StringBuffer newValue = new StringBuffer(templateStr.length());
Matcher matcher = pattern.matcher(templateStr);
while (matcher.find()) {
String key = matcher.group(1);
if (Pattern.matches(DATE_START_PATTERN, key)) {
continue;
}
String value = TimePlaceholderUtils.getPlaceHolderTime(key, date);
assert value != null;
matcher.appendReplacement(newValue, value);
}
matcher.appendTail(newValue);
return newValue.toString();
}
}

103
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PlaceholderUtils.java

@ -0,0 +1,103 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.paramparser;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* placeholder utils
*/
public class PlaceholderUtils {
private static final Logger logger = LoggerFactory.getLogger(PlaceholderUtils.class);
/**
* Prefix of the position to be replaced
*/
public static final String PLACEHOLDER_PREFIX = "${";
/**
* The suffix of the position to be replaced
*/
public static final String PLACEHOLDER_SUFFIX = "}";
/**
* Replaces all placeholders of format {@code ${name}} with the value returned
* from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}.
*
* @param value the value containing the placeholders to be replaced
* @param paramsMap placeholder data dictionary
* @param ignoreUnresolvablePlaceholders ignoreUnresolvablePlaceholders
* @return the supplied value with placeholders replaced inline
*/
public static String replacePlaceholders(String value,
Map<String, String> paramsMap,
boolean ignoreUnresolvablePlaceholders) {
//replacement tool, parameter key will be replaced by value,if can't match , will throw an exception
PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false);
//Non-strict replacement tool implementation, when the position to be replaced does not get the corresponding value, the current position is ignored, and the next position is replaced.
PropertyPlaceholderHelper nonStrictHelper = getPropertyPlaceholderHelper(true);
PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper);
//the PlaceholderResolver to use for replacement
return helper.replacePlaceholders(value, new PropertyPlaceholderResolver(value, paramsMap));
}
/**
* Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix.
* @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should
* be ignored ({@code true}) or cause an exception ({@code false})
* @return PropertyPlaceholderHelper
*/
public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) {
return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders);
}
/**
* Placeholder replacement resolver
*/
private static class PropertyPlaceholderResolver implements PropertyPlaceholderHelper.PlaceholderResolver {
private final String value;
private final Map<String, String> paramsMap;
public PropertyPlaceholderResolver(String value, Map<String, String> paramsMap) {
this.value = value;
this.paramsMap = paramsMap;
}
@Override
public String resolvePlaceholder(String placeholderName) {
try {
return paramsMap.get(placeholderName);
} catch (Exception ex) {
logger.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex);
return null;
}
}
}
}

255
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PropertyPlaceholderHelper.java

@ -0,0 +1,255 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.paramparser;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class for working with Strings that have placeholder values in them. A placeholder takes the form
* {@code ${name}}. Using {@code PropertyPlaceholderHelper} these placeholders can be substituted for
* user-supplied values. <p> Values for substitution can be supplied using a {@link Properties} instance or
* using a {@link PlaceholderResolver}.
*
* @author Juergen Hoeller
* @author Rob Harrop
* @since 3.0
*/
public class PropertyPlaceholderHelper {
private static final Logger logger = LoggerFactory.getLogger(PropertyPlaceholderHelper.class);
private static final Map<String, String> wellKnownSimplePrefixes = new HashMap<String, String>(4);
static {
wellKnownSimplePrefixes.put("}", "{");
wellKnownSimplePrefixes.put("]", "[");
wellKnownSimplePrefixes.put(")", "(");
}
private final String placeholderPrefix;
private final String placeholderSuffix;
private final String simplePrefix;
private final String valueSeparator;
private final boolean ignoreUnresolvablePlaceholders;
/**
* Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix.
* Unresolvable placeholders are ignored.
* @param placeholderPrefix the prefix that denotes the start of a placeholder
* @param placeholderSuffix the suffix that denotes the end of a placeholder
*/
public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix) {
this(placeholderPrefix, placeholderSuffix, null, true);
}
/**
* Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix.
* @param placeholderPrefix the prefix that denotes the start of a placeholder
* @param placeholderSuffix the suffix that denotes the end of a placeholder
* @param valueSeparator the separating character between the placeholder variable
* and the associated default value, if any
* @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should
* be ignored ({@code true}) or cause an exception ({@code false})
*/
public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix,
String valueSeparator, boolean ignoreUnresolvablePlaceholders) {
notNull(placeholderPrefix, "'placeholderPrefix' must not be null");
notNull(placeholderSuffix, "'placeholderSuffix' must not be null");
this.placeholderPrefix = placeholderPrefix;
this.placeholderSuffix = placeholderSuffix;
String simplePrefixForSuffix = wellKnownSimplePrefixes.get(this.placeholderSuffix);
if (simplePrefixForSuffix != null && this.placeholderPrefix.endsWith(simplePrefixForSuffix)) {
this.simplePrefix = simplePrefixForSuffix;
}
else {
this.simplePrefix = this.placeholderPrefix;
}
this.valueSeparator = valueSeparator;
this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders;
}
/**
* Replaces all placeholders of format {@code ${name}} with the corresponding
* property from the supplied {@link Properties}.
* @param value the value containing the placeholders to be replaced
* @param properties the {@code Properties} to use for replacement
* @return the supplied value with placeholders replaced inline
*/
public String replacePlaceholders(String value, final Properties properties) {
notNull(properties, "'properties' must not be null");
return replacePlaceholders(value, new PlaceholderResolver() {
@Override
public String resolvePlaceholder(String placeholderName) {
return properties.getProperty(placeholderName);
}
});
}
/**
* Replaces all placeholders of format {@code ${name}} with the value returned
* from the supplied {@link PlaceholderResolver}.
* @param value the value containing the placeholders to be replaced
* @param placeholderResolver the {@code PlaceholderResolver} to use for replacement
* @return the supplied value with placeholders replaced inline
*/
public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) {
notNull(value, "'value' must not be null");
return parseStringValue(value, placeholderResolver, new HashSet<String>());
}
protected String parseStringValue(
String value, PlaceholderResolver placeholderResolver, Set<String> visitedPlaceholders) {
StringBuilder result = new StringBuilder(value);
int startIndex = value.indexOf(this.placeholderPrefix);
while (startIndex != -1) {
int endIndex = findPlaceholderEndIndex(result, startIndex);
if (endIndex != -1) {
String placeholder = result.substring(startIndex + this.placeholderPrefix.length(), endIndex);
String originalPlaceholder = placeholder;
if (!visitedPlaceholders.add(originalPlaceholder)) {
throw new IllegalArgumentException(
"Circular placeholder reference '" + originalPlaceholder + "' in property definitions");
}
// Recursive invocation, parsing placeholders contained in the placeholder key.
placeholder = parseStringValue(placeholder, placeholderResolver, visitedPlaceholders);
// Now obtain the value for the fully resolved key...
String propVal = placeholderResolver.resolvePlaceholder(placeholder);
if (propVal == null && this.valueSeparator != null) {
int separatorIndex = placeholder.indexOf(this.valueSeparator);
if (separatorIndex != -1) {
String actualPlaceholder = placeholder.substring(0, separatorIndex);
String defaultValue = placeholder.substring(separatorIndex + this.valueSeparator.length());
propVal = placeholderResolver.resolvePlaceholder(actualPlaceholder);
if (propVal == null) {
propVal = defaultValue;
}
}
}
if (propVal != null) {
// Recursive invocation, parsing placeholders contained in the
// previously resolved placeholder value.
propVal = parseStringValue(propVal, placeholderResolver, visitedPlaceholders);
result.replace(startIndex, endIndex + this.placeholderSuffix.length(), propVal);
if (logger.isTraceEnabled()) {
logger.trace("Resolved placeholder '" + placeholder + "'");
}
startIndex = result.indexOf(this.placeholderPrefix, startIndex + propVal.length());
}
else if (this.ignoreUnresolvablePlaceholders) {
// Proceed with unprocessed value.
startIndex = result.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length());
}
else {
throw new IllegalArgumentException("Could not resolve placeholder '"
+ placeholder + "'" + " in value \"" + value + "\"");
}
visitedPlaceholders.remove(originalPlaceholder);
}
else {
startIndex = -1;
}
}
return result.toString();
}
private int findPlaceholderEndIndex(CharSequence buf, int startIndex) {
int index = startIndex + this.placeholderPrefix.length();
int withinNestedPlaceholder = 0;
while (index < buf.length()) {
if (substringMatch(buf, index, this.placeholderSuffix)) {
if (withinNestedPlaceholder > 0) {
withinNestedPlaceholder--;
index = index + this.placeholderSuffix.length();
}
else {
return index;
}
}
else if (substringMatch(buf, index, this.simplePrefix)) {
withinNestedPlaceholder++;
index = index + this.simplePrefix.length();
}
else {
index++;
}
}
return -1;
}
/**
* Strategy interface used to resolve replacement values for placeholders contained in Strings.
*/
public interface PlaceholderResolver {
/**
* Resolve the supplied placeholder name to the replacement value.
* @param placeholderName the name of the placeholder to resolve
* @return the replacement value, or {@code null} if no replacement is to be made
*/
String resolvePlaceholder(String placeholderName);
}
/**
* Test whether the given string matches the given substring
* at the given index.
* @param str the original string (or StringBuilder)
* @param index the index in the original string to start matching against
* @param substring the substring to match at the given index
* @return whether the given string matches the given substring
*/
public static boolean substringMatch(CharSequence str, int index, CharSequence substring) {
for (int j = 0; j < substring.length(); j++) {
int i = index + j;
if (i >= str.length() || str.charAt(i) != substring.charAt(j)) {
return false;
}
}
return true;
}
/**
* Assert that an object is not {@code null}.
* <pre class="code">Assert.notNull(clazz, "The class must not be null");</pre>
* @param object the object to check
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if the object is {@code null}
*/
public static void notNull(Object object, String message) {
if (object == null) {
throw new IllegalArgumentException(message);
}
}
}

570
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/TimePlaceholderUtils.java

@ -0,0 +1,570 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.paramparser;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.ADD_CHAR;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.ADD_MONTHS;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.ADD_STRING;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DIVISION_CHAR;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.DIVISION_STRING;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.LEFT_BRACE_CHAR;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.LEFT_BRACE_STRING;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.MONTH_BEGIN;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.MONTH_END;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.MULTIPLY_CHAR;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.MULTIPLY_STRING;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.N;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.P;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_FORMAT_TIME;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.RIGHT_BRACE_CHAR;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.SUBTRACT_CHAR;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.SUBTRACT_STRING;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.TIMESTAMP;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.WEEK_BEGIN;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.WEEK_END;
import static org.apache.dolphinscheduler.spi.utils.DateUtils.addDays;
import static org.apache.dolphinscheduler.spi.utils.DateUtils.addMinutes;
import static org.apache.dolphinscheduler.spi.utils.DateUtils.addMonths;
import org.apache.dolphinscheduler.spi.utils.DateUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* time place holder utils
*/
public class TimePlaceholderUtils {
private static final Logger logger = LoggerFactory.getLogger(TimePlaceholderUtils.class);
/**
* Prefix of the position to be replaced
*/
public static final String PLACEHOLDER_PREFIX = "$[";
/**
* The suffix of the position to be replaced
*/
public static final String PLACEHOLDER_SUFFIX = "]";
/**
* Replaces all placeholders of format {@code ${name}} with the value returned
* from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}.
*
* @param value the value containing the placeholders to be replaced
* @param date custom date
* @param ignoreUnresolvablePlaceholders ignore unresolvable placeholders
* @return the supplied value with placeholders replaced inline
*/
public static String replacePlaceholders(String value, Date date, boolean ignoreUnresolvablePlaceholders) {
PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false);
PropertyPlaceholderHelper nonStrictHelper = getPropertyPlaceholderHelper(true);
PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper);
return helper.replacePlaceholders(value, new TimePlaceholderResolver(value, date));
}
/**
* Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix.
*
* @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should
* be ignored ({@code true}) or cause an exception ({@code false})
*/
private static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) {
return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders);
}
/**
* calculate expression's value
*
* @param expression expression
* @return expression's value
*/
public static Integer calculate(String expression) {
expression = StringUtils.trim(expression);
expression = convert(expression);
List<String> result = string2List(expression);
result = convert2SuffixList(result);
return calculate(result);
}
/**
* Change the sign in the expression to P (positive) N (negative)
*
* @param expression
* @return eg. "-3+-6*(+8)-(-5) -> S3+S6*(P8)-(S5)"
*/
private static String convert(String expression) {
char[] arr = expression.toCharArray();
for (int i = 0; i < arr.length; i++) {
if (arr[i] == SUBTRACT_CHAR) {
if (i == 0) {
arr[i] = N;
} else {
char c = arr[i - 1];
if (c == ADD_CHAR || c == SUBTRACT_CHAR || c == MULTIPLY_CHAR || c == DIVISION_CHAR || c == LEFT_BRACE_CHAR) {
arr[i] = N;
}
}
} else if (arr[i] == ADD_CHAR) {
if (i == 0) {
arr[i] = P;
} else {
char c = arr[i - 1];
if (c == ADD_CHAR || c == SUBTRACT_CHAR || c == MULTIPLY_CHAR || c == DIVISION_CHAR || c == LEFT_BRACE_CHAR) {
arr[i] = P;
}
}
}
}
return new String(arr);
}
/**
* to suffix expression
*
* @param srcList
* @return
*/
private static List<String> convert2SuffixList(List<String> srcList) {
List<String> result = new ArrayList<>();
Stack<String> stack = new Stack<>();
for (int i = 0; i < srcList.size(); i++) {
if (Character.isDigit(srcList.get(i).charAt(0))) {
result.add(srcList.get(i));
} else {
switch (srcList.get(i).charAt(0)) {
case LEFT_BRACE_CHAR:
stack.push(srcList.get(i));
break;
case RIGHT_BRACE_CHAR:
while (!LEFT_BRACE_STRING.equals(stack.peek())) {
result.add(stack.pop());
}
stack.pop();
break;
default:
while (!stack.isEmpty() && compare(stack.peek(), srcList.get(i))) {
result.add(stack.pop());
}
stack.push(srcList.get(i));
break;
}
}
}
while (!stack.isEmpty()) {
result.add(stack.pop());
}
return result;
}
/**
* Calculate the suffix expression
*
* @param result
* @return
*/
private static Integer calculate(List<String> result) {
Stack<Integer> stack = new Stack<>();
for (int i = 0; i < result.size(); i++) {
if (Character.isDigit(result.get(i).charAt(0))) {
stack.push(Integer.parseInt(result.get(i)));
} else {
Integer backInt = stack.pop();
Integer frontInt = 0;
char op = result.get(i).charAt(0);
if (!(op == P || op == N)) {
frontInt = stack.pop();
}
Integer res = 0;
switch (result.get(i).charAt(0)) {
case P:
res = frontInt + backInt;
break;
case N:
res = frontInt - backInt;
break;
case ADD_CHAR:
res = frontInt + backInt;
break;
case SUBTRACT_CHAR:
res = frontInt - backInt;
break;
case MULTIPLY_CHAR:
res = frontInt * backInt;
break;
case DIVISION_CHAR:
res = frontInt / backInt;
break;
default:
break;
}
stack.push(res);
}
}
return stack.pop();
}
/**
* string to list
*
* @param expression
* @return list
*/
private static List<String> string2List(String expression) {
List<String> result = new ArrayList<>();
String num = "";
for (int i = 0; i < expression.length(); i++) {
if (Character.isDigit(expression.charAt(i))) {
num = num + expression.charAt(i);
} else {
if (!num.isEmpty()) {
result.add(num);
}
result.add(expression.charAt(i) + "");
num = "";
}
}
if (!num.isEmpty()) {
result.add(num);
}
return result;
}
/**
* compare loginUser level
*
* @param peek
* @param cur
* @return true or false
*/
private static boolean compare(String peek, String cur) {
if (MULTIPLY_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) {
return true;
} else if (DIVISION_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) {
return true;
} else if (ADD_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) {
return true;
} else {
return SUBTRACT_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur));
}
}
/**
* Placeholder replacement resolver
*/
private static class TimePlaceholderResolver implements
PropertyPlaceholderHelper.PlaceholderResolver {
private final String value;
private final Date date;
public TimePlaceholderResolver(String value, Date date) {
this.value = value;
this.date = date;
}
@Override
public String resolvePlaceholder(String placeholderName) {
try {
return calculateTime(placeholderName, date);
} catch (Exception ex) {
logger.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex);
return null;
}
}
}
/**
* return the formatted date according to the corresponding date format
*
* @param expression date expression
* @param date date
* @return reformat date
*/
public static String getPlaceHolderTime(String expression, Date date) {
if (StringUtils.isBlank(expression)) {
return null;
}
if (null == date) {
return null;
}
return calculateTime(expression, date);
}
/**
* calculate time
*
* @param date date
* @return calculate time
*/
private static String calculateTime(String expression, Date date) {
// After N years: $[add_months(yyyyMMdd,12*N)], the first N months: $[add_months(yyyyMMdd,-N)], etc
String value;
try {
if (expression.startsWith(TIMESTAMP)) {
String timeExpression = expression.substring(TIMESTAMP.length() + 1, expression.length() - 1);
Map.Entry<Date, String> entry = calcTimeExpression(timeExpression, date);
String dateStr = DateUtils.format(entry.getKey(), entry.getValue());
Date timestamp = DateUtils.parse(dateStr, PARAMETER_FORMAT_TIME);
value = String.valueOf(timestamp.getTime() / 1000);
} else {
Map.Entry<Date, String> entry = calcTimeExpression(expression, date);
value = DateUtils.format(entry.getKey(), entry.getValue());
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw e;
}
return value;
}
/**
* calculate time expresstion
*
* @param expression expresstion
* @param date date
* @return map with date, date format
*/
public static Map.Entry<Date, String> calcTimeExpression(String expression, Date date) {
Map.Entry<Date, String> resultEntry;
if (expression.startsWith(ADD_MONTHS)) {
resultEntry = calcMonths(expression, date);
} else if (expression.startsWith(MONTH_BEGIN)) {
resultEntry = calcMonthBegin(expression, date);
} else if (expression.startsWith(MONTH_END)) {
resultEntry = calcMonthEnd(expression, date);
} else if (expression.startsWith(WEEK_BEGIN)) {
resultEntry = calcWeekStart(expression, date);
} else if (expression.startsWith(WEEK_END)) {
resultEntry = calcWeekEnd(expression, date);
} else {
resultEntry = calcMinutes(expression, date);
}
return resultEntry;
}
/**
* get first day of month
*
* @param expression expresstion
* @param date date
* @return first day of month
*/
public static Map.Entry<Date, String> calcMonthBegin(String expression, Date date) {
String addMonthExpr = expression.substring(MONTH_BEGIN.length() + 1, expression.length() - 1);
String[] params = addMonthExpr.split(COMMA);
if (params.length == 2) {
String dateFormat = params[0];
String dayExpr = params[1];
Integer day = calculate(dayExpr);
Date targetDate = DateUtils.getFirstDayOfMonth(date);
targetDate = addDays(targetDate, day);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
throw new RuntimeException("expression not valid");
}
/**
* get last day of month
*
* @param expression expresstion
* @param date date
* @return last day of month
*/
public static Map.Entry<Date, String> calcMonthEnd(String expression, Date date) {
String addMonthExpr = expression.substring(MONTH_END.length() + 1, expression.length() - 1);
String[] params = addMonthExpr.split(COMMA);
if (params.length == 2) {
String dateFormat = params[0];
String dayExpr = params[1];
Integer day = calculate(dayExpr);
Date targetDate = DateUtils.getLastDayOfMonth(date);
targetDate = addDays(targetDate, day);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
throw new RuntimeException("expression not valid");
}
/**
* get first day of week
*
* @param expression expresstion
* @param date date
* @return monday
*/
public static Map.Entry<Date, String> calcWeekStart(String expression, Date date) {
String addMonthExpr = expression.substring(WEEK_BEGIN.length() + 1, expression.length() - 1);
String[] params = addMonthExpr.split(COMMA);
if (params.length == 2) {
String dateFormat = params[0];
String dayExpr = params[1];
Integer day = calculate(dayExpr);
Date targetDate = DateUtils.getMonday(date);
targetDate = addDays(targetDate, day);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
throw new RuntimeException("expression not valid");
}
/**
* get last day of week
*
* @param expression expresstion
* @param date date
* @return last day of week
*/
public static Map.Entry<Date, String> calcWeekEnd(String expression, Date date) {
String addMonthExpr = expression.substring(WEEK_END.length() + 1, expression.length() - 1);
String[] params = addMonthExpr.split(COMMA);
if (params.length == 2) {
String dateFormat = params[0];
String dayExpr = params[1];
Integer day = calculate(dayExpr);
Date targetDate = DateUtils.getSunday(date);
targetDate = addDays(targetDate, day);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
throw new RuntimeException("Expression not valid");
}
/**
* calc months expression
*
* @param expression expresstion
* @param date date
* @return calc months
*/
public static Map.Entry<Date, String> calcMonths(String expression, Date date) {
String addMonthExpr = expression.substring(ADD_MONTHS.length() + 1, expression.length() - 1);
String[] params = addMonthExpr.split(COMMA);
if (params.length == 2) {
String dateFormat = params[0];
String monthExpr = params[1];
Integer addMonth = calculate(monthExpr);
Date targetDate = addMonths(date, addMonth);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
throw new RuntimeException("expression not valid");
}
/**
* calculate time expression
*
* @param expression expresstion
* @param date date
* @return calculate time expression with date,format
*/
public static Map.Entry<Date, String> calcMinutes(String expression, Date date) {
if (expression.contains("+")) {
int index = expression.lastIndexOf('+');
if (Character.isDigit(expression.charAt(index + 1))) {
String addMinuteExpr = expression.substring(index + 1);
Date targetDate = addMinutes(date, calcMinutes(addMinuteExpr));
String dateFormat = expression.substring(0, index);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
} else if (expression.contains("-")) {
int index = expression.lastIndexOf('-');
if (Character.isDigit(expression.charAt(index + 1))) {
String addMinuteExpr = expression.substring(index + 1);
Date targetDate = addMinutes(date, 0 - calcMinutes(addMinuteExpr));
String dateFormat = expression.substring(0, index);
return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat);
}
// yyyy-MM-dd/HH:mm:ss
return new AbstractMap.SimpleImmutableEntry<>(date, expression);
}
// $[HHmmss]
return new AbstractMap.SimpleImmutableEntry<>(date, expression);
}
/**
* calculate need minutes
*
* @param minuteExpression minute expression
* @return calculate need minutes
*/
public static Integer calcMinutes(String minuteExpression) {
int index = minuteExpression.indexOf('/');
String calcExpression;
if (index == -1) {
calcExpression = String.format("60*24*(%s)", minuteExpression);
} else {
calcExpression = String.format("60*24*(%s)%s", minuteExpression.substring(0, index),
minuteExpression.substring(index));
}
return calculate(calcExpression);
}
}

115
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/DataxTaskRequest.java

@ -0,0 +1,115 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.request;
/**
* DataX Task ExecutionContext
* to master/worker task transport
*/
public class DataxTaskRequest extends TaskRequest {
/**
* dataSourceId
*/
private int dataSourceId;
/**
* sourcetype
*/
private int sourcetype;
/**
* sourceConnectionParams
*/
private String sourceConnectionParams;
/**
* dataTargetId
*/
private int dataTargetId;
/**
* targetType
*/
private int targetType;
/**
* targetConnectionParams
*/
private String targetConnectionParams;
public int getDataSourceId() {
return dataSourceId;
}
public void setDataSourceId(int dataSourceId) {
this.dataSourceId = dataSourceId;
}
public int getSourcetype() {
return sourcetype;
}
public void setSourcetype(int sourcetype) {
this.sourcetype = sourcetype;
}
public String getSourceConnectionParams() {
return sourceConnectionParams;
}
public void setSourceConnectionParams(String sourceConnectionParams) {
this.sourceConnectionParams = sourceConnectionParams;
}
public int getDataTargetId() {
return dataTargetId;
}
public void setDataTargetId(int dataTargetId) {
this.dataTargetId = dataTargetId;
}
public int getTargetType() {
return targetType;
}
public void setTargetType(int targetType) {
this.targetType = targetType;
}
public String getTargetConnectionParams() {
return targetConnectionParams;
}
public void setTargetConnectionParams(String targetConnectionParams) {
this.targetConnectionParams = targetConnectionParams;
}
@Override
public String toString() {
return "DataxTaskExecutionContext{"
+ "dataSourceId=" + dataSourceId
+ ", sourcetype=" + sourcetype
+ ", sourceConnectionParams='" + sourceConnectionParams + '\''
+ ", dataTargetId=" + dataTargetId
+ ", targetType=" + targetType
+ ", targetConnectionParams='" + targetConnectionParams + '\''
+ '}';
}
}

45
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/ProcedureTaskRequest.java

@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.request;
/**
* Procedure Task ExecutionContext
* to master/worker task transport
*/
public class ProcedureTaskRequest extends TaskRequest {
/**
* connectionParams
*/
private String connectionParams;
public String getConnectionParams() {
return connectionParams;
}
public void setConnectionParams(String connectionParams) {
this.connectionParams = connectionParams;
}
@Override
public String toString() {
return "ProcedureTaskExecutionContext{"
+ "connectionParams='" + connectionParams + '\''
+ '}';
}
}

80
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SQLTaskRequest.java

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.request;
import org.apache.dolphinscheduler.spi.task.UdfFuncBean;
import org.apache.dolphinscheduler.spi.task.UdfFuncBean.UdfFuncDeserializer;
import java.util.Map;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
/**
* SQL Task ExecutionContext
* to master/worker task transport
*/
public class SQLTaskRequest extends TaskRequest {
/**
* warningGroupId
*/
private int warningGroupId;
/**
* connectionParams
*/
private String connectionParams;
/**
* udf function tenant code map
*/
@JsonDeserialize(keyUsing = UdfFuncDeserializer.class)
private Map<UdfFuncBean,String> udfFuncTenantCodeMap;
public int getWarningGroupId() {
return warningGroupId;
}
public void setWarningGroupId(int warningGroupId) {
this.warningGroupId = warningGroupId;
}
public Map<UdfFuncBean, String> getUdfFuncTenantCodeMap() {
return udfFuncTenantCodeMap;
}
public void setUdfFuncTenantCodeMap(Map<UdfFuncBean, String> udfFuncTenantCodeMap) {
this.udfFuncTenantCodeMap = udfFuncTenantCodeMap;
}
public String getConnectionParams() {
return connectionParams;
}
public void setConnectionParams(String connectionParams) {
this.connectionParams = connectionParams;
}
@Override
public String toString() {
return "SQLTaskExecutionContext{"
+ "warningGroupId=" + warningGroupId
+ ", connectionParams='" + connectionParams + '\''
+ ", udfFuncTenantCodeMap=" + udfFuncTenantCodeMap
+ '}';
}
}

115
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SqoopTaskRequest.java

@ -0,0 +1,115 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task.request;
/**
* Sqoop Task ExecutionContext
* to master/worker task transport
*/
public class SqoopTaskRequest extends TaskRequest {
/**
* dataSourceId
*/
private int dataSourceId;
/**
* sourcetype
*/
private int sourcetype;
/**
* sourceConnectionParams
*/
private String sourceConnectionParams;
/**
* dataTargetId
*/
private int dataTargetId;
/**
* targetType
*/
private int targetType;
/**
* targetConnectionParams
*/
private String targetConnectionParams;
public int getDataSourceId() {
return dataSourceId;
}
public void setDataSourceId(int dataSourceId) {
this.dataSourceId = dataSourceId;
}
public int getSourcetype() {
return sourcetype;
}
public void setSourcetype(int sourcetype) {
this.sourcetype = sourcetype;
}
public String getSourceConnectionParams() {
return sourceConnectionParams;
}
public void setSourceConnectionParams(String sourceConnectionParams) {
this.sourceConnectionParams = sourceConnectionParams;
}
public int getDataTargetId() {
return dataTargetId;
}
public void setDataTargetId(int dataTargetId) {
this.dataTargetId = dataTargetId;
}
public int getTargetType() {
return targetType;
}
public void setTargetType(int targetType) {
this.targetType = targetType;
}
public String getTargetConnectionParams() {
return targetConnectionParams;
}
public void setTargetConnectionParams(String targetConnectionParams) {
this.targetConnectionParams = targetConnectionParams;
}
@Override
public String toString() {
return "SqoopTaskExecutionContext{"
+ "dataSourceId=" + dataSourceId
+ ", sourcetype=" + sourcetype
+ ", sourceConnectionParams='" + sourceConnectionParams + '\''
+ ", dataTargetId=" + dataTargetId
+ ", targetType=" + targetType
+ ", targetConnectionParams='" + targetConnectionParams + '\''
+ '}';
}
}

16
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskRequest.java → dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java

@ -15,17 +15,21 @@
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task;
package org.apache.dolphinscheduler.spi.task.request;
import org.apache.dolphinscheduler.spi.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.spi.task.Property;
import java.util.Date;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
* to master/worker task transport
*/
public class TaskRequest {
/**
* task id
*/
@ -157,7 +161,7 @@ public class TaskRequest {
/**
* task timeout strategy
*/
private int taskTimeoutStrategy;
private TaskTimeoutStrategy taskTimeoutStrategy;
/**
* task timeout
@ -392,11 +396,11 @@ public class TaskRequest {
this.taskAppId = taskAppId;
}
public int getTaskTimeoutStrategy() {
public TaskTimeoutStrategy getTaskTimeoutStrategy() {
return taskTimeoutStrategy;
}
public void setTaskTimeoutStrategy(int taskTimeoutStrategy) {
public void setTaskTimeoutStrategy(TaskTimeoutStrategy taskTimeoutStrategy) {
this.taskTimeoutStrategy = taskTimeoutStrategy;
}

104
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CommonUtils.java

@ -0,0 +1,104 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.utils;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JAVA_SECURITY_KRB5_CONF;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JAVA_SECURITY_KRB5_CONF_PATH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.KERBEROS;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.LOGIN_USER_KEY_TAB_PATH;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.LOGIN_USER_KEY_TAB_USERNAME;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.RESOURCE_STORAGE_TYPE;
import org.apache.dolphinscheduler.spi.enums.ResUploadType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
/**
* common utils
*/
public class CommonUtils {
private CommonUtils() {
throw new UnsupportedOperationException("Construct CommonUtils");
}
/**
* if upload resource is HDFS and kerberos startup is true , else false
*
* @return true if upload resource is HDFS and kerberos startup
*/
public static boolean getKerberosStartupState() {
String resUploadStartupType = PropertyUtils.getUpperCaseString(RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
Boolean kerberosStartupState = PropertyUtils.getBoolean(HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false);
return resUploadType == ResUploadType.HDFS && kerberosStartupState;
}
/**
* load kerberos configuration
*
* @param configuration
* @return load kerberos config return true
* @throws IOException errors
*/
public static boolean loadKerberosConf(Configuration configuration) throws IOException {
return loadKerberosConf(PropertyUtils.getString(JAVA_SECURITY_KRB5_CONF_PATH),
PropertyUtils.getString(LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(LOGIN_USER_KEY_TAB_PATH), configuration);
}
/**
* load kerberos configuration
*
* @param javaSecurityKrb5Conf javaSecurityKrb5Conf
* @param loginUserKeytabUsername loginUserKeytabUsername
* @param loginUserKeytabPath loginUserKeytabPath
* @throws IOException errors
*/
public static void loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) throws IOException {
loadKerberosConf(javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath, new Configuration());
}
/**
* load kerberos configuration
*
* @param javaSecurityKrb5Conf javaSecurityKrb5Conf
* @param loginUserKeytabUsername loginUserKeytabUsername
* @param loginUserKeytabPath loginUserKeytabPath
* @param configuration configuration
* @return load kerberos config return true
* @throws IOException errors
*/
public static boolean loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath, Configuration configuration) throws IOException {
if (CommonUtils.getKerberosStartupState()) {
System.setProperty(JAVA_SECURITY_KRB5_CONF, StringUtils.defaultIfBlank(javaSecurityKrb5Conf, PropertyUtils.getString(JAVA_SECURITY_KRB5_CONF_PATH)));
configuration.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS);
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(StringUtils.defaultIfBlank(loginUserKeytabUsername, PropertyUtils.getString(LOGIN_USER_KEY_TAB_USERNAME)),
StringUtils.defaultIfBlank(loginUserKeytabPath, PropertyUtils.getString(LOGIN_USER_KEY_TAB_PATH)));
return true;
}
return false;
}
}

20
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java

@ -52,4 +52,24 @@ public class Constants {
/** string no */
public static final String STRING_NO = "NO";
/**
* common properties path
*/
public static final String COMMON_PROPERTIES_PATH = "/common.properties";
/**
* date format of yyyy-MM-dd HH:mm:ss
*/
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
/**
* date format of yyyyMMddHHmmss
*/
public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss";
/**
* date format of yyyyMMddHHmmssSSS
*/
public static final String YYYYMMDDHHMMSSSSS = "yyyyMMddHHmmssSSS";
}

622
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/DateUtils.java

@ -0,0 +1,622 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.utils;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Calendar;
import java.util.Date;
import java.util.Objects;
import java.util.TimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* date utils
*/
public class DateUtils {
static final long C0 = 1L;
static final long C1 = C0 * 1000L;
static final long C2 = C1 * 1000L;
static final long C3 = C2 * 1000L;
static final long C4 = C3 * 60L;
static final long C5 = C4 * 60L;
static final long C6 = C5 * 24L;
/**
* a default datetime formatter for the timestamp
*/
private static final DateTimeFormatter DEFAULT_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
private static final Logger logger = LoggerFactory.getLogger(DateUtils.class);
private DateUtils() {
throw new UnsupportedOperationException("Construct DateUtils");
}
/**
* @param timeMillis timeMillis like System.currentTimeMillis()
* @return string formatted as yyyy-MM-dd HH:mm:ss
*/
public static String formatTimeStamp(long timeMillis) {
return formatTimeStamp(timeMillis, DEFAULT_DATETIME_FORMATTER);
}
/**
* @param timeMillis timeMillis like System.currentTimeMillis()
* @param dateTimeFormatter expect formatter, like yyyy-MM-dd HH:mm:ss
* @return formatted string
*/
public static String formatTimeStamp(long timeMillis, DateTimeFormatter dateTimeFormatter) {
Objects.requireNonNull(dateTimeFormatter);
return dateTimeFormatter.format(LocalDateTime.ofInstant(Instant.ofEpochMilli(timeMillis),
ZoneId.systemDefault()));
}
/**
* date to local datetime
*
* @param date date
* @return local datetime
*/
private static LocalDateTime date2LocalDateTime(Date date) {
return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
}
/**
* local datetime to date
*
* @param localDateTime local datetime
* @return date
*/
private static Date localDateTime2Date(LocalDateTime localDateTime) {
Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant();
return Date.from(instant);
}
/**
* get current date str
*
* @return date string
*/
public static String getCurrentTime() {
return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS);
}
/**
* get the date string in the specified format of the current time
*
* @param format date format
* @return date string
*/
public static String getCurrentTime(String format) {
return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format));
}
/**
* get the formatted date string
*
* @param date date
* @param format e.g. yyyy-MM-dd HH:mm:ss
* @return date string
*/
public static String format(Date date, String format) {
return format(date2LocalDateTime(date), format);
}
/**
* get the formatted date string
*
* @param localDateTime local data time
* @param format yyyy-MM-dd HH:mm:ss
* @return date string
*/
public static String format(LocalDateTime localDateTime, String format) {
return localDateTime.format(DateTimeFormatter.ofPattern(format));
}
/**
* convert time to yyyy-MM-dd HH:mm:ss format
*
* @param date date
* @return date string
*/
public static String dateToString(Date date) {
return format(date, Constants.YYYY_MM_DD_HH_MM_SS);
}
/**
* convert string to date and time
*
* @param date date
* @param format format
* @return date
*/
public static Date parse(String date, String format) {
try {
LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format));
return localDateTime2Date(ldt);
} catch (Exception e) {
logger.error("error while parse date:" + date, e);
}
return null;
}
/**
* convert date str to yyyy-MM-dd HH:mm:ss format
*
* @param str date string
* @return yyyy-MM-dd HH:mm:ss format
*/
public static Date stringToDate(String str) {
return parse(str, Constants.YYYY_MM_DD_HH_MM_SS);
}
/**
* get seconds between two dates
*
* @param d1 date1
* @param d2 date2
* @return differ seconds
*/
public static long differSec(Date d1, Date d2) {
if (d1 == null || d2 == null) {
return 0;
}
return (long) Math.ceil(differMs(d1, d2) / 1000.0);
}
/**
* get ms between two dates
*
* @param d1 date1
* @param d2 date2
* @return differ ms
*/
public static long differMs(Date d1, Date d2) {
return Math.abs(d1.getTime() - d2.getTime());
}
/**
* get hours between two dates
*
* @param d1 date1
* @param d2 date2
* @return differ hours
*/
public static long diffHours(Date d1, Date d2) {
return (long) Math.ceil(diffMin(d1, d2) / 60.0);
}
/**
* get minutes between two dates
*
* @param d1 date1
* @param d2 date2
* @return differ minutes
*/
public static long diffMin(Date d1, Date d2) {
return (long) Math.ceil(differSec(d1, d2) / 60.0);
}
/**
* get the date of the specified date in the days before and after
*
* @param date date
* @param day day
* @return the date of the specified date in the days before and after
*/
public static Date getSomeDay(Date date, int day) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.add(Calendar.DATE, day);
return calendar.getTime();
}
/**
* get the hour of day.
*
* @param date date
* @return hour of day
*/
public static int getHourIndex(Date date) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar.get(Calendar.HOUR_OF_DAY);
}
/**
* compare two dates
*
* @param future future date
* @param old old date
* @return true if future time greater than old time
*/
public static boolean compare(Date future, Date old) {
return future.getTime() > old.getTime();
}
/**
* convert schedule string to date
*
* @param schedule schedule
* @return convert schedule string to date
*/
public static Date getScheduleDate(String schedule) {
return stringToDate(schedule);
}
/**
* format time to readable
*
* @param ms ms
* @return format time
*/
public static String format2Readable(long ms) {
long days = MILLISECONDS.toDays(ms);
long hours = MILLISECONDS.toDurationHours(ms);
long minutes = MILLISECONDS.toDurationMinutes(ms);
long seconds = MILLISECONDS.toDurationSeconds(ms);
return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds);
}
/**
* format time to duration
*
* @param d1 d1
* @param d2 d2
* @return format time
*/
public static String format2Duration(Date d1, Date d2) {
if (d1 == null || d2 == null) {
return null;
}
return format2Duration(differMs(d1, d2));
}
/**
* format time to duration
*
* @param ms ms
* @return format time
*/
public static String format2Duration(long ms) {
long days = MILLISECONDS.toDays(ms);
long hours = MILLISECONDS.toDurationHours(ms);
long minutes = MILLISECONDS.toDurationMinutes(ms);
long seconds = MILLISECONDS.toDurationSeconds(ms);
StringBuilder strBuilder = new StringBuilder();
strBuilder = days > 0 ? strBuilder.append(days).append("d").append(" ") : strBuilder;
strBuilder = hours > 0 ? strBuilder.append(hours).append("h").append(" ") : strBuilder;
strBuilder = minutes > 0 ? strBuilder.append(minutes).append("m").append(" ") : strBuilder;
strBuilder = seconds > 0 ? strBuilder.append(seconds).append("s") : strBuilder;
return strBuilder.toString();
}
/**
* get monday
* <p>
* note: Set the first day of the week to Monday, the default is Sunday
*
* @param date date
* @return get monday
*/
public static Date getMonday(Date date) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.setFirstDayOfWeek(Calendar.MONDAY);
cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
return cal.getTime();
}
/**
* get sunday
* <p>
* note: Set the first day of the week to Monday, the default is Sunday
*
* @param date date
* @return get sunday
*/
public static Date getSunday(Date date) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.setFirstDayOfWeek(Calendar.MONDAY);
cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
return cal.getTime();
}
/**
* get first day of month
*
* @param date date
* @return first day of month
*/
public static Date getFirstDayOfMonth(Date date) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.DAY_OF_MONTH, 1);
return cal.getTime();
}
/**
* get some hour of day
*
* @param date date
* @param offsetHour hours
* @return some hour of day
*/
public static Date getSomeHourOfDay(Date date, int offsetHour) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) + offsetHour);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
/**
* get last day of month
*
* @param date date
* @return get last day of month
*/
public static Date getLastDayOfMonth(Date date) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.MONTH, 1);
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.add(Calendar.DAY_OF_MONTH, -1);
return cal.getTime();
}
/**
* return YYYY-MM-DD 00:00:00
*
* @param inputDay date
* @return start day
*/
public static Date getStartOfDay(Date inputDay) {
Calendar cal = Calendar.getInstance();
cal.setTime(inputDay);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
/**
* return YYYY-MM-DD 23:59:59
*
* @param inputDay day
* @return end of day
*/
public static Date getEndOfDay(Date inputDay) {
Calendar cal = Calendar.getInstance();
cal.setTime(inputDay);
cal.set(Calendar.HOUR_OF_DAY, 23);
cal.set(Calendar.MINUTE, 59);
cal.set(Calendar.SECOND, 59);
cal.set(Calendar.MILLISECOND, 999);
return cal.getTime();
}
/**
* return YYYY-MM-DD 00:00:00
*
* @param inputDay day
* @return start of hour
*/
public static Date getStartOfHour(Date inputDay) {
Calendar cal = Calendar.getInstance();
cal.setTime(inputDay);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
/**
* return YYYY-MM-DD 23:59:59
*
* @param inputDay day
* @return end of hour
*/
public static Date getEndOfHour(Date inputDay) {
Calendar cal = Calendar.getInstance();
cal.setTime(inputDay);
cal.set(Calendar.MINUTE, 59);
cal.set(Calendar.SECOND, 59);
cal.set(Calendar.MILLISECOND, 999);
return cal.getTime();
}
/**
* get current date
*
* @return current date
*/
public static Date getCurrentDate() {
return DateUtils.parse(DateUtils.getCurrentTime(),
Constants.YYYY_MM_DD_HH_MM_SS);
}
public static Date addYears(Date date, int amount) {
return add(date, 1, amount);
}
public static Date addMonths(Date date, int amount) {
return add(date, 2, amount);
}
public static Date addWeeks(Date date, int amount) {
return add(date, 3, amount);
}
public static Date addDays(Date date, int amount) {
return add(date, 5, amount);
}
public static Date addHours(Date date, int amount) {
return add(date, 11, amount);
}
public static Date addMinutes(Date date, int amount) {
return add(date, 12, amount);
}
public static Date addSeconds(Date date, int amount) {
return add(date, 13, amount);
}
public static Date addMilliseconds(Date date, int amount) {
return add(date, 14, amount);
}
/**
* get date
*
* @param date date
* @param calendarField calendarField
* @param amount amount
* @return date
*/
public static Date add(final Date date, final int calendarField, final int amount) {
if (date == null) {
throw new IllegalArgumentException("The date must not be null");
}
final Calendar c = Calendar.getInstance();
c.setTime(date);
c.add(calendarField, amount);
return c.getTime();
}
/**
* starting from the current time, get how many seconds are left before the target time.
* targetTime = baseTime + intervalSeconds
*
* @param baseTime base time
* @param intervalSeconds a period of time
* @return the number of seconds
*/
public static long getRemainTime(Date baseTime, long intervalSeconds) {
if (baseTime == null) {
return 0;
}
long usedTime = (System.currentTimeMillis() - baseTime.getTime()) / 1000;
return intervalSeconds - usedTime;
}
/**
* get current time stamp : yyyyMMddHHmmssSSS
*
* @return date string
*/
public static String getCurrentTimeStamp() {
return getCurrentTime(Constants.YYYYMMDDHHMMSSSSS);
}
/**
* transform date to target timezone date
* <p>e.g.
* <p> if input date is 2020-01-01 00:00:00 current timezone is CST
* <p>targetTimezoneId is MST
* <p>this method will return 2020-01-01 15:00:00
*/
public static Date getTimezoneDate(Date date, String targetTimezoneId) {
if (StringUtils.isEmpty(targetTimezoneId)) {
return date;
}
String dateToString = dateToString(date);
LocalDateTime localDateTime = LocalDateTime.parse(dateToString, DateTimeFormatter.ofPattern(Constants.YYYY_MM_DD_HH_MM_SS));
ZonedDateTime zonedDateTime = ZonedDateTime.of(localDateTime, TimeZone.getTimeZone(targetTimezoneId).toZoneId());
return Date.from(zonedDateTime.toInstant());
}
/**
* get timezone by timezoneId
*/
public static TimeZone getTimezone(String timezoneId) {
if (StringUtils.isEmpty(timezoneId)) {
return null;
}
return TimeZone.getTimeZone(timezoneId);
}
/**
* Time unit representing one thousandth of a second
*/
public static class MILLISECONDS {
public static long toSeconds(long d) {
return d / (C3 / C2);
}
public static long toMinutes(long d) {
return d / (C4 / C2);
}
public static long toHours(long d) {
return d / (C5 / C2);
}
public static long toDays(long d) {
return d / (C6 / C2);
}
public static long toDurationSeconds(long d) {
return (d % (C4 / C2)) / (C3 / C2);
}
public static long toDurationMinutes(long d) {
return (d % (C5 / C2)) / (C4 / C2);
}
public static long toDurationHours(long d) {
return (d % (C6 / C2)) / (C5 / C2);
}
}
}

260
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/PropertyUtils.java

@ -0,0 +1,260 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.utils;
import static org.apache.dolphinscheduler.spi.utils.Constants.COMMON_PROPERTIES_PATH;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PropertyUtils {
private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class);
private static final Properties properties = new Properties();
private PropertyUtils() {
throw new UnsupportedOperationException("Construct PropertyUtils");
}
static {
loadPropertyFile(COMMON_PROPERTIES_PATH);
}
public static synchronized void loadPropertyFile(String... propertyFiles) {
for (String fileName : propertyFiles) {
try (InputStream fis = PropertyUtils.class.getResourceAsStream(fileName);) {
properties.load(fis);
} catch (IOException e) {
logger.error(e.getMessage(), e);
System.exit(1);
}
}
// Override from system properties
System.getProperties().forEach((k, v) -> {
final String key = String.valueOf(k);
logger.info("Overriding property from system property: {}", key);
PropertyUtils.setValue(key, String.valueOf(v));
});
}
/**
* get property value
*
* @param key property name
* @return property value
*/
public static String getString(String key) {
return properties.getProperty(key.trim());
}
/**
* get property value with upper case
*
* @param key property name
* @return property value with upper case
*/
public static String getUpperCaseString(String key) {
return properties.getProperty(key.trim()).toUpperCase();
}
/**
* get property value
*
* @param key property name
* @param defaultVal default value
* @return property value
*/
public static String getString(String key, String defaultVal) {
String val = properties.getProperty(key.trim());
return val == null ? defaultVal : val;
}
/**
* get property value
*
* @param key property name
* @return get property int value , if key == null, then return -1
*/
public static int getInt(String key) {
return getInt(key, -1);
}
/**
* @param key key
* @param defaultValue default value
* @return property value
*/
public static int getInt(String key, int defaultValue) {
String value = getString(key);
if (value == null) {
return defaultValue;
}
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
logger.info(e.getMessage(), e);
}
return defaultValue;
}
/**
* get property value
*
* @param key property name
* @return property value
*/
public static boolean getBoolean(String key) {
String value = properties.getProperty(key.trim());
if (null != value) {
return Boolean.parseBoolean(value);
}
return false;
}
/**
* get property value
*
* @param key property name
* @param defaultValue default value
* @return property value
*/
public static Boolean getBoolean(String key, boolean defaultValue) {
String value = properties.getProperty(key.trim());
if (null != value) {
return Boolean.parseBoolean(value);
}
return defaultValue;
}
/**
* get property long value
*
* @param key key
* @param defaultVal default value
* @return property value
*/
public static long getLong(String key, long defaultVal) {
String val = getString(key);
return val == null ? defaultVal : Long.parseLong(val);
}
/**
* @param key key
* @return property value
*/
public static long getLong(String key) {
return getLong(key, -1);
}
/**
* @param key key
* @param defaultVal default value
* @return property value
*/
public static double getDouble(String key, double defaultVal) {
String val = getString(key);
return val == null ? defaultVal : Double.parseDouble(val);
}
/**
* get array
*
* @param key property name
* @param splitStr separator
* @return property value through array
*/
public static String[] getArray(String key, String splitStr) {
String value = getString(key);
if (value == null) {
return new String[0];
}
try {
String[] propertyArray = value.split(splitStr);
return propertyArray;
} catch (NumberFormatException e) {
logger.info(e.getMessage(), e);
}
return new String[0];
}
/**
* @param key key
* @param type type
* @param defaultValue default value
* @param <T> T
* @return get enum value
*/
public static <T extends Enum<T>> T getEnum(String key, Class<T> type,
T defaultValue) {
String val = getString(key);
return val == null ? defaultValue : Enum.valueOf(type, val);
}
/**
* get all properties with specified prefix, like: fs.
*
* @param prefix prefix to search
* @return all properties with specified prefix
*/
public static Map<String, String> getPrefixedProperties(String prefix) {
Map<String, String> matchedProperties = new HashMap<>();
for (String propName : properties.stringPropertyNames()) {
if (propName.startsWith(prefix)) {
matchedProperties.put(propName, properties.getProperty(propName));
}
}
return matchedProperties;
}
/**
*
*/
public static void setValue(String key, String value) {
properties.setProperty(key, value);
}
public static Map<String, String> getPropertiesByPrefix(String prefix) {
if (StringUtils.isEmpty(prefix)) {
return null;
}
Set<Object> keys = properties.keySet();
if (keys.isEmpty()) {
return null;
}
Map<String, String> propertiesMap = new HashMap<>();
keys.forEach(k -> {
if (k.toString().contains(prefix)) {
propertiesMap.put(k.toString().replaceFirst(prefix + ".", ""), properties.getProperty((String) k));
}
});
return propertiesMap;
}
}

195
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java

@ -17,25 +17,210 @@
package org.apache.dolphinscheduler.spi.utils;
import java.util.Collection;
import java.util.Iterator;
/**
* java.lang.String utils class
*/
public class StringUtils {
/**
* The empty String {@code ""}.
*/
public static final String EMPTY = "";
public static final int INDEX_NOT_FOUND = -1;
private StringUtils() {
throw new UnsupportedOperationException("Construct StringUtils");
}
/**
* <p>Checks if a CharSequence is empty ("") or null.</p>
*
* @param cs the CharSequence to check, may be null
* @return {@code true} if the CharSequence is empty or null
*/
public static boolean isEmpty(final CharSequence cs) {
return cs == null || cs.length() == 0;
}
/**
* <p>Checks if a CharSequence is not empty ("") and not null.</p>
*
* @param cs the CharSequence to check, may be null
* @return {@code true} if the CharSequence is not empty and not null
*/
public static boolean isNotEmpty(final CharSequence cs) {
return !isEmpty(cs);
}
public static boolean isBlank(String s) {
if (isEmpty(s)) {
/**
* <p>Checks if a CharSequence is empty (""), null or whitespace only.</p>
*
* @param cs the CharSequence to check, may be null
* @return {@code true} if the CharSequence is null, empty or whitespace only
*/
public static boolean isBlank(final CharSequence cs) {
int strLen;
if (cs == null || (strLen = cs.length()) == 0) {
return true;
}
return s.trim().length() == 0;
for (int i = 0; i < strLen; i++) {
if (!Character.isWhitespace(cs.charAt(i))) {
return false;
}
}
return true;
}
/**
* <p>Checks if a CharSequence is not empty (""), not null and not whitespace only.</p>
*
* @param cs the CharSequence to check, may be null
* @return {@code true} if the CharSequence is not empty and not null and not whitespace only
*/
public static boolean isNotBlank(final CharSequence cs) {
return !isBlank(cs);
}
public static boolean isNotBlank(String s) {
return !isBlank(s);
/**
* <p>Replace all strings matching the regular expression \t \n \r with _</p>
*
* @param src the String , may be null
* @return the string that has been replaced
*/
public static String replaceNRTtoUnderline(String src) {
return isBlank(src) ? src : src.replaceAll("[\n|\r|\t]", "_");
}
/**
* <p>Removes control characters (char &lt;= 32) from both
* ends of this String, handling {@code null} by returning
* {@code null}.</p>
*
* @param str the String to be trimmed, may be null
* @return the trimmed string, {@code null} if null String input
*/
public static String trim(final String str) {
return str == null ? null : str.trim();
}
/**
* <p>Returns either the passed in CharSequence, or if the CharSequence is
* whitespace, empty ("") or {@code null}, the value of {@code defaultStr}.</p>
*
* @param <T> the specific kind of CharSequence
* @param str the CharSequence to check, may be null
* @param defaultStr the default CharSequence to return
* if the input is whitespace, empty ("") or {@code null}, may be null
* @return the passed in CharSequence, or the default
*/
public static <T extends CharSequence> T defaultIfBlank(final T str, final T defaultStr) {
return isBlank(str) ? defaultStr : str;
}
/**
* <p>Compares two String, returning {@code true} if they represent
* equal string, ignoring case.</p>
*
* @param str1 the first String, may be null
* @param str2 the second String, may be null
* @return {@code true} if the String are equal, case insensitive, or
* both {@code null}
*/
public static boolean equalsIgnoreCase(String str1, String str2) {
return str1 == null ? str2 == null : str1.equalsIgnoreCase(str2);
}
public static String substringBefore(final String str, final String separator) {
if (isEmpty(str) || separator == null) {
return str;
}
if (separator.isEmpty()) {
return EMPTY;
}
final int pos = str.indexOf(separator);
if (pos == INDEX_NOT_FOUND) {
return str;
}
return str.substring(0, pos);
}
public static String substringAfter(final String str, final String separator) {
if (isEmpty(str)) {
return str;
}
if (separator == null) {
return EMPTY;
}
final int pos = str.indexOf(separator);
if (pos == INDEX_NOT_FOUND) {
return EMPTY;
}
return str.substring(pos + separator.length());
}
public static long strDigitToLong(String str, long defaultValue) {
if (str == null) {
return defaultValue;
} else {
try {
return Long.parseLong(str);
} catch (NumberFormatException var4) {
return defaultValue;
}
}
}
/**
* <p>Joins the elements of the provided Collection into a single String
* containing the provided Collection of elements.</p>
*
* @param collection the collection, may be null
* @param separator the separator
* @return a single String
*/
public static String join(Collection<?> collection, String separator) {
return collection == null ? null : join(collection.iterator(), separator);
}
/**
* <p>Joins the elements of the provided Iterator into a single String
* containing the provided Iterator of elements.</p>
*
* @param iterator the iterator, may be null
* @param separator the separator
* @return a single String
*/
public static String join(Iterator<?> iterator, String separator) {
if (iterator == null) {
return null;
} else if (!iterator.hasNext()) {
return "";
} else {
Object first = iterator.next();
if (!iterator.hasNext()) {
return first == null ? "" : first.toString();
} else {
StringBuilder buf = new StringBuilder(256);
if (first != null) {
buf.append(first);
}
while (iterator.hasNext()) {
if (separator != null) {
buf.append(separator);
}
Object obj = iterator.next();
if (obj != null) {
buf.append(obj);
}
}
return buf.toString();
}
}
}
}

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java

@ -25,7 +25,7 @@ import org.apache.dolphinscheduler.plugin.task.util.LoggerUtils;
import org.apache.dolphinscheduler.plugin.task.util.OSUtils;
import org.apache.dolphinscheduler.plugin.task.util.ThreadUtils;
import org.apache.dolphinscheduler.spi.task.TaskConstants;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.io.BufferedReader;

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java

@ -19,7 +19,7 @@ package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.util.LoggerUtils;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import java.util.List;

9
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java

@ -17,10 +17,7 @@
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.slf4j.Logger;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
/**
* abstract yarn task
@ -47,7 +44,7 @@ public abstract class AbstractYarnTask extends AbstractTaskExecutor {
public void handle() throws Exception {
try {
// SHELL task exit code
TaskResponse response = shellCommandExecutor.run(getCommand());
TaskResponse response = shellCommandExecutor.run(buildCommand());
setExitStatusCode(response.getExitStatusCode());
setAppIds(response.getAppIds());
setProcessId(response.getProcessId());
@ -83,7 +80,7 @@ public abstract class AbstractYarnTask extends AbstractTaskExecutor {
* @return String
* @throws Exception exception
*/
protected abstract String getCommand();
protected abstract String buildCommand();
/**
* set main jar name

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ProcessUtils.java

@ -19,7 +19,7 @@ package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.util.OSUtils;
import org.apache.dolphinscheduler.spi.task.TaskConstants;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import java.io.File;
import java.util.ArrayList;

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ShellCommandExecutor.java

@ -18,7 +18,7 @@
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.util.OSUtils;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.commons.io.FileUtils;

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContextCacheManager.java

@ -17,7 +17,7 @@
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

12
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/pom.xml

@ -26,6 +26,7 @@
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-task-datax</artifactId>
<packaging>dolphinscheduler-plugin</packaging>
<dependencies>
<dependency>
@ -39,8 +40,15 @@
<version>${project.version}</version>
</dependency>
</dependencies>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
</dependency>
</dependencies>
<build>
<finalName>dolphinscheduler-task-datax-${project.version}</finalName>
</build>
</project>
</project>

251
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxParameters.java

@ -0,0 +1,251 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import org.apache.dolphinscheduler.spi.enums.Flag;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.ResourceInfo;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* DataX parameter
*/
public class DataxParameters extends AbstractParameters {
/**
* if custom json configeg 0, 1
*/
private int customConfig;
/**
* if customConfig eq 1 ,then json is usable
*/
private String json;
/**
* data source typeeg MYSQL, POSTGRES ...
*/
private String dsType;
/**
* datasource id
*/
private int dataSource;
/**
* data target typeeg MYSQL, POSTGRES ...
*/
private String dtType;
/**
* datatarget id
*/
private int dataTarget;
/**
* sql
*/
private String sql;
/**
* target table
*/
private String targetTable;
/**
* Pre Statements
*/
private List<String> preStatements;
/**
* Post Statements
*/
private List<String> postStatements;
/**
* speed byte num
*/
private int jobSpeedByte;
/**
* speed record count
*/
private int jobSpeedRecord;
/**
* Xms memory
*/
private int xms;
/**
* Xmx memory
*/
private int xmx;
public int getCustomConfig() {
return customConfig;
}
public void setCustomConfig(int customConfig) {
this.customConfig = customConfig;
}
public String getJson() {
return json;
}
public void setJson(String json) {
this.json = json;
}
public String getDsType() {
return dsType;
}
public void setDsType(String dsType) {
this.dsType = dsType;
}
public int getDataSource() {
return dataSource;
}
public void setDataSource(int dataSource) {
this.dataSource = dataSource;
}
public String getDtType() {
return dtType;
}
public void setDtType(String dtType) {
this.dtType = dtType;
}
public int getDataTarget() {
return dataTarget;
}
public void setDataTarget(int dataTarget) {
this.dataTarget = dataTarget;
}
public String getSql() {
return sql;
}
public void setSql(String sql) {
this.sql = sql;
}
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public List<String> getPreStatements() {
return preStatements;
}
public void setPreStatements(List<String> preStatements) {
this.preStatements = preStatements;
}
public List<String> getPostStatements() {
return postStatements;
}
public void setPostStatements(List<String> postStatements) {
this.postStatements = postStatements;
}
public int getJobSpeedByte() {
return jobSpeedByte;
}
public void setJobSpeedByte(int jobSpeedByte) {
this.jobSpeedByte = jobSpeedByte;
}
public int getJobSpeedRecord() {
return jobSpeedRecord;
}
public void setJobSpeedRecord(int jobSpeedRecord) {
this.jobSpeedRecord = jobSpeedRecord;
}
public int getXms() {
return xms;
}
public void setXms(int xms) {
this.xms = xms;
}
public int getXmx() {
return xmx;
}
public void setXmx(int xmx) {
this.xmx = xmx;
}
@Override
public boolean checkParameters() {
if (customConfig == Flag.NO.ordinal()) {
return dataSource != 0
&& dataTarget != 0
&& StringUtils.isNotEmpty(sql)
&& StringUtils.isNotEmpty(targetTable);
} else {
return StringUtils.isNotEmpty(json);
}
}
@Override
public List<ResourceInfo> getResourceFilesList() {
return new ArrayList<>();
}
@Override
public String toString() {
return "DataxParameters{"
+ "customConfig=" + customConfig
+ ", json='" + json + '\''
+ ", dsType='" + dsType + '\''
+ ", dataSource=" + dataSource
+ ", dtType='" + dtType + '\''
+ ", dataTarget=" + dataTarget
+ ", sql='" + sql + '\''
+ ", targetTable='" + targetTable + '\''
+ ", preStatements=" + preStatements
+ ", postStatements=" + postStatements
+ ", jobSpeedByte=" + jobSpeedByte
+ ", jobSpeedRecord=" + jobSpeedRecord
+ ", xms=" + xms
+ ", xmx=" + xmx
+ '}';
}
}

570
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java

@ -0,0 +1,570 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.RWXR_XR_X;
import static org.apache.dolphinscheduler.spi.task.datasource.PasswordUtils.decodePassword;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskResponse;
import org.apache.dolphinscheduler.plugin.task.util.OSUtils;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.enums.Flag;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.task.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.task.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils;
import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils;
import org.apache.dolphinscheduler.spi.task.request.DataxTaskRequest;
import org.apache.dolphinscheduler.spi.utils.CollectionUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.ast.statement.SQLUnionQuery;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class DataxTask extends AbstractTaskExecutor {
/**
* jvm parameters
*/
public static final String JVM_PARAM = " --jvm=\"-Xms%sG -Xmx%sG\" ";
/**
* python process(datax only supports version 2.7 by default)
*/
private static final String DATAX_PYTHON = "python2.7";
private static final Pattern PYTHON_PATH_PATTERN = Pattern.compile("/bin/python[\\d.]*$");
/**
* datax path
*/
private static final String DATAX_PATH = "${DATAX_HOME}/bin/datax.py";
/**
* datax channel count
*/
private static final int DATAX_CHANNEL_COUNT = 1;
/**
* datax parameters
*/
private DataxParameters dataXParameters;
/**
* shell command executor
*/
private ShellCommandExecutor shellCommandExecutor;
/**
* taskExecutionContext
*/
private DataxTaskRequest taskExecutionContext;
/**
* constructor
*
* @param taskExecutionContext taskExecutionContext
*/
public DataxTask(DataxTaskRequest taskExecutionContext) {
super(taskExecutionContext);
this.taskExecutionContext = taskExecutionContext;
this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle,
taskExecutionContext, logger);
}
/**
* init DataX config
*/
@Override
public void init() {
logger.info("datax task params {}", taskExecutionContext.getTaskParams());
dataXParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DataxParameters.class);
if (!dataXParameters.checkParameters()) {
throw new RuntimeException("datax task params is not valid");
}
}
/**
* run DataX process
*
* @throws Exception if error throws Exception
*/
@Override
public void handle() throws Exception {
try {
// set the name of the current thread
String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskExecutionContext.getTaskAppId());
Thread.currentThread().setName(threadLoggerInfoName);
// replace placeholder,and combine local and global parameters
Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext,getParameters());
// run datax procesDataSourceService.s
String jsonFilePath = buildDataxJsonFile(paramsMap);
String shellCommandFilePath = buildShellCommandFile(jsonFilePath, paramsMap);
TaskResponse commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath);
setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(commandExecuteResult.getAppIds());
setProcessId(commandExecuteResult.getProcessId());
} catch (Exception e) {
setExitStatusCode(EXIT_CODE_FAILURE);
throw e;
}
}
/**
* cancel DataX process
*
* @param cancelApplication cancelApplication
* @throws Exception if error throws Exception
*/
@Override
public void cancelApplication(boolean cancelApplication)
throws Exception {
// cancel process
shellCommandExecutor.cancelApplication();
}
/**
* build datax configuration file
*
* @return datax json file name
* @throws Exception if error throws Exception
*/
private String buildDataxJsonFile(Map<String, Property> paramsMap)
throws Exception {
// generate json
String fileName = String.format("%s/%s_job.json",
taskExecutionContext.getExecutePath(),
taskExecutionContext.getTaskAppId());
String json;
Path path = new File(fileName).toPath();
if (Files.exists(path)) {
return fileName;
}
if (dataXParameters.getCustomConfig() == Flag.YES.ordinal()) {
json = dataXParameters.getJson().replaceAll("\\r\\n", "\n");
} else {
ObjectNode job = JSONUtils.createObjectNode();
job.putArray("content").addAll(buildDataxJobContentJson());
job.set("setting", buildDataxJobSettingJson());
ObjectNode root = JSONUtils.createObjectNode();
root.set("job", job);
root.set("core", buildDataxCoreJson());
json = root.toString();
}
// replace placeholder
json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap));
logger.debug("datax job json : {}", json);
// create datax json file
FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8);
return fileName;
}
/**
* build datax job config
*
* @return collection of datax job config JSONObject
* @throws SQLException if error throws SQLException
*/
private List<ObjectNode> buildDataxJobContentJson() {
BaseConnectionParam dataSourceCfg = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.of(taskExecutionContext.getSourcetype()),
taskExecutionContext.getSourceConnectionParams());
BaseConnectionParam dataTargetCfg = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.of(taskExecutionContext.getTargetType()),
taskExecutionContext.getTargetConnectionParams());
List<ObjectNode> readerConnArr = new ArrayList<>();
ObjectNode readerConn = JSONUtils.createObjectNode();
ArrayNode sqlArr = readerConn.putArray("querySql");
for (String sql : new String[]{dataXParameters.getSql()}) {
sqlArr.add(sql);
}
ArrayNode urlArr = readerConn.putArray("jdbcUrl");
urlArr.add(DatasourceUtil.getJdbcUrl(DbType.valueOf(dataXParameters.getDsType()), dataSourceCfg));
readerConnArr.add(readerConn);
ObjectNode readerParam = JSONUtils.createObjectNode();
readerParam.put("username", dataSourceCfg.getUser());
readerParam.put("password", decodePassword(dataSourceCfg.getPassword()));
readerParam.putArray("connection").addAll(readerConnArr);
ObjectNode reader = JSONUtils.createObjectNode();
reader.put("name", DataxUtils.getReaderPluginName(DbType.of(taskExecutionContext.getSourcetype())));
reader.set("parameter", readerParam);
List<ObjectNode> writerConnArr = new ArrayList<>();
ObjectNode writerConn = JSONUtils.createObjectNode();
ArrayNode tableArr = writerConn.putArray("table");
tableArr.add(dataXParameters.getTargetTable());
writerConn.put("jdbcUrl", DatasourceUtil.getJdbcUrl(DbType.valueOf(dataXParameters.getDtType()), dataTargetCfg));
writerConnArr.add(writerConn);
ObjectNode writerParam = JSONUtils.createObjectNode();
writerParam.put("username", dataTargetCfg.getUser());
writerParam.put("password", decodePassword(dataTargetCfg.getPassword()));
String[] columns = parsingSqlColumnNames(DbType.of(taskExecutionContext.getSourcetype()),
DbType.of(taskExecutionContext.getTargetType()),
dataSourceCfg, dataXParameters.getSql());
ArrayNode columnArr = writerParam.putArray("column");
for (String column : columns) {
columnArr.add(column);
}
writerParam.putArray("connection").addAll(writerConnArr);
if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) {
ArrayNode preSqlArr = writerParam.putArray("preSql");
for (String preSql : dataXParameters.getPreStatements()) {
preSqlArr.add(preSql);
}
}
if (CollectionUtils.isNotEmpty(dataXParameters.getPostStatements())) {
ArrayNode postSqlArr = writerParam.putArray("postSql");
for (String postSql : dataXParameters.getPostStatements()) {
postSqlArr.add(postSql);
}
}
ObjectNode writer = JSONUtils.createObjectNode();
writer.put("name", DataxUtils.getWriterPluginName(DbType.of(taskExecutionContext.getTargetType())));
writer.set("parameter", writerParam);
List<ObjectNode> contentList = new ArrayList<>();
ObjectNode content = JSONUtils.createObjectNode();
content.set("reader", reader);
content.set("writer", writer);
contentList.add(content);
return contentList;
}
/**
* build datax setting config
*
* @return datax setting config JSONObject
*/
private ObjectNode buildDataxJobSettingJson() {
ObjectNode speed = JSONUtils.createObjectNode();
speed.put("channel", DATAX_CHANNEL_COUNT);
if (dataXParameters.getJobSpeedByte() > 0) {
speed.put("byte", dataXParameters.getJobSpeedByte());
}
if (dataXParameters.getJobSpeedRecord() > 0) {
speed.put("record", dataXParameters.getJobSpeedRecord());
}
ObjectNode errorLimit = JSONUtils.createObjectNode();
errorLimit.put("record", 0);
errorLimit.put("percentage", 0);
ObjectNode setting = JSONUtils.createObjectNode();
setting.set("speed", speed);
setting.set("errorLimit", errorLimit);
return setting;
}
private ObjectNode buildDataxCoreJson() {
ObjectNode speed = JSONUtils.createObjectNode();
speed.put("channel", DATAX_CHANNEL_COUNT);
if (dataXParameters.getJobSpeedByte() > 0) {
speed.put("byte", dataXParameters.getJobSpeedByte());
}
if (dataXParameters.getJobSpeedRecord() > 0) {
speed.put("record", dataXParameters.getJobSpeedRecord());
}
ObjectNode channel = JSONUtils.createObjectNode();
channel.set("speed", speed);
ObjectNode transport = JSONUtils.createObjectNode();
transport.set("channel", channel);
ObjectNode core = JSONUtils.createObjectNode();
core.set("transport", transport);
return core;
}
/**
* create command
*
* @return shell command file name
* @throws Exception if error throws Exception
*/
private String buildShellCommandFile(String jobConfigFilePath, Map<String, Property> paramsMap)
throws Exception {
// generate scripts
String fileName = String.format("%s/%s_node.%s",
taskExecutionContext.getExecutePath(),
taskExecutionContext.getTaskAppId(),
OSUtils.isWindows() ? "bat" : "sh");
Path path = new File(fileName).toPath();
if (Files.exists(path)) {
return fileName;
}
// datax python command
StringBuilder sbr = new StringBuilder();
sbr.append(getPythonCommand());
sbr.append(" ");
sbr.append(DATAX_PATH);
sbr.append(" ");
sbr.append(loadJvmEnv(dataXParameters));
sbr.append(jobConfigFilePath);
// replace placeholder
String dataxCommand = ParameterUtils.convertParameterPlaceholders(sbr.toString(), ParamUtils.convert(paramsMap));
logger.debug("raw script : {}", dataxCommand);
// create shell command file
Set<PosixFilePermission> perms = PosixFilePermissions.fromString(RWXR_XR_X);
FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms);
if (OSUtils.isWindows()) {
Files.createFile(path);
} else {
Files.createFile(path, attr);
}
Files.write(path, dataxCommand.getBytes(), StandardOpenOption.APPEND);
return fileName;
}
public String getPythonCommand() {
String pythonHome = System.getenv("PYTHON_HOME");
return getPythonCommand(pythonHome);
}
public String getPythonCommand(String pythonHome) {
if (StringUtils.isEmpty(pythonHome)) {
return DATAX_PYTHON;
}
String pythonBinPath = "/bin/" + DATAX_PYTHON;
Matcher matcher = PYTHON_PATH_PATTERN.matcher(pythonHome);
if (matcher.find()) {
return matcher.replaceAll(pythonBinPath);
}
return Paths.get(pythonHome, pythonBinPath).toString();
}
public String loadJvmEnv(DataxParameters dataXParameters) {
int xms = Math.max(dataXParameters.getXms(), 1);
int xmx = Math.max(dataXParameters.getXmx(), 1);
return String.format(JVM_PARAM, xms, xmx);
}
/**
* parsing synchronized column names in SQL statements
*
* @param sourceType the database type of the data source
* @param targetType the database type of the data target
* @param dataSourceCfg the database connection parameters of the data source
* @param sql sql for data synchronization
* @return Keyword converted column names
*/
private String[] parsingSqlColumnNames(DbType sourceType, DbType targetType, BaseConnectionParam dataSourceCfg, String sql) {
String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(sourceType, sql);
if (columnNames == null || columnNames.length == 0) {
logger.info("try to execute sql analysis query column name");
columnNames = tryExecuteSqlResolveColumnNames(sourceType, dataSourceCfg, sql);
}
notNull(columnNames, String.format("parsing sql columns failed : %s", sql));
return DataxUtils.convertKeywordsColumns(targetType, columnNames);
}
/**
* try grammatical parsing column
*
* @param dbType database type
* @param sql sql for data synchronization
* @return column name array
* @throws RuntimeException if error throws RuntimeException
*/
private String[] tryGrammaticalAnalysisSqlColumnNames(DbType dbType, String sql) {
String[] columnNames;
try {
SQLStatementParser parser = DataxUtils.getSqlStatementParser(dbType, sql);
if (parser == null) {
logger.warn("database driver [{}] is not support grammatical analysis sql", dbType);
return new String[0];
}
SQLStatement sqlStatement = parser.parseStatement();
SQLSelectStatement sqlSelectStatement = (SQLSelectStatement) sqlStatement;
SQLSelect sqlSelect = sqlSelectStatement.getSelect();
List<SQLSelectItem> selectItemList = null;
if (sqlSelect.getQuery() instanceof SQLSelectQueryBlock) {
SQLSelectQueryBlock block = (SQLSelectQueryBlock) sqlSelect.getQuery();
selectItemList = block.getSelectList();
} else if (sqlSelect.getQuery() instanceof SQLUnionQuery) {
SQLUnionQuery unionQuery = (SQLUnionQuery) sqlSelect.getQuery();
SQLSelectQueryBlock block = (SQLSelectQueryBlock) unionQuery.getRight();
selectItemList = block.getSelectList();
}
notNull(selectItemList,
String.format("select query type [%s] is not support", sqlSelect.getQuery().toString()));
columnNames = new String[selectItemList.size()];
for (int i = 0; i < selectItemList.size(); i++) {
SQLSelectItem item = selectItemList.get(i);
String columnName = null;
if (item.getAlias() != null) {
columnName = item.getAlias();
} else if (item.getExpr() != null) {
if (item.getExpr() instanceof SQLPropertyExpr) {
SQLPropertyExpr expr = (SQLPropertyExpr) item.getExpr();
columnName = expr.getName();
} else if (item.getExpr() instanceof SQLIdentifierExpr) {
SQLIdentifierExpr expr = (SQLIdentifierExpr) item.getExpr();
columnName = expr.getName();
}
} else {
throw new RuntimeException(
String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
}
if (columnName == null) {
throw new RuntimeException(
String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
}
columnNames[i] = columnName;
}
} catch (Exception e) {
logger.warn(e.getMessage(), e);
return new String[0];
}
return columnNames;
}
/**
* try to execute sql to resolve column names
*
* @param baseDataSource the database connection parameters
* @param sql sql for data synchronization
* @return column name array
*/
public String[] tryExecuteSqlResolveColumnNames(DbType sourceType, BaseConnectionParam baseDataSource, String sql) {
String[] columnNames;
sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql);
sql = sql.replace(";", "");
try (
Connection connection = DatasourceUtil.getConnection(sourceType, baseDataSource);
PreparedStatement stmt = connection.prepareStatement(sql);
ResultSet resultSet = stmt.executeQuery()) {
ResultSetMetaData md = resultSet.getMetaData();
int num = md.getColumnCount();
columnNames = new String[num];
for (int i = 1; i <= num; i++) {
columnNames[i - 1] = md.getColumnName(i);
}
} catch (SQLException e) {
logger.warn(e.getMessage(), e);
return null;
}
return columnNames;
}
@Override
public AbstractParameters getParameters() {
return dataXParameters;
}
private void notNull(Object obj, String message) {
if (obj == null) {
throw new RuntimeException(message);
}
}
}

36
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannel.java

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.request.DataxTaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
public class DataxTaskChannel implements TaskChannel {
@Override
public void cancelApplication(boolean status) {
}
@Override
public AbstractTask createTask(TaskRequest taskRequest) {
return new DataxTask((DataxTaskRequest) taskRequest);
}
}

42
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.TaskChannelFactory;
import java.util.List;
public class DataxTaskChannelFactory implements TaskChannelFactory {
@Override
public String getName() {
return "DATAX";
}
@Override
public List<PluginParams> getParams() {
return null;
}
@Override
public TaskChannel create() {
return new DataxTaskChannel();
}
}

31
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskPlugin.java

@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin;
import org.apache.dolphinscheduler.spi.task.TaskChannelFactory;
import com.google.common.collect.ImmutableList;
public class DataxTaskPlugin implements DolphinSchedulerPlugin {
@Override
public Iterable<TaskChannelFactory> getTaskChannelFactorys() {
return ImmutableList.of(new DataxTaskChannelFactory());
}
}

136
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java

@ -0,0 +1,136 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import org.apache.dolphinscheduler.spi.enums.DbType;
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser;
import com.alibaba.druid.sql.dialect.oracle.parser.OracleStatementParser;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser;
import com.alibaba.druid.sql.dialect.sqlserver.parser.SQLServerStatementParser;
import com.alibaba.druid.sql.parser.SQLStatementParser;
public class DataxUtils {
public static final String DATAX_READER_PLUGIN_MYSQL = "mysqlreader";
public static final String DATAX_READER_PLUGIN_POSTGRESQL = "postgresqlreader";
public static final String DATAX_READER_PLUGIN_ORACLE = "oraclereader";
public static final String DATAX_READER_PLUGIN_SQLSERVER = "sqlserverreader";
public static final String DATAX_READER_PLUGIN_CLICKHOUSE = "clickhousereader";
public static final String DATAX_WRITER_PLUGIN_MYSQL = "mysqlwriter";
public static final String DATAX_WRITER_PLUGIN_POSTGRESQL = "postgresqlwriter";
public static final String DATAX_WRITER_PLUGIN_ORACLE = "oraclewriter";
public static final String DATAX_WRITER_PLUGIN_SQLSERVER = "sqlserverwriter";
public static final String DATAX_WRITER_PLUGIN_CLICKHOUSE = "clickhousewriter";
public static String getReaderPluginName(DbType dbType) {
switch (dbType) {
case MYSQL:
return DATAX_READER_PLUGIN_MYSQL;
case POSTGRESQL:
return DATAX_READER_PLUGIN_POSTGRESQL;
case ORACLE:
return DATAX_READER_PLUGIN_ORACLE;
case SQLSERVER:
return DATAX_READER_PLUGIN_SQLSERVER;
case CLICKHOUSE:
return DATAX_READER_PLUGIN_CLICKHOUSE;
default:
return null;
}
}
public static String getWriterPluginName(DbType dbType) {
switch (dbType) {
case MYSQL:
return DATAX_WRITER_PLUGIN_MYSQL;
case POSTGRESQL:
return DATAX_WRITER_PLUGIN_POSTGRESQL;
case ORACLE:
return DATAX_WRITER_PLUGIN_ORACLE;
case SQLSERVER:
return DATAX_WRITER_PLUGIN_SQLSERVER;
case CLICKHOUSE:
return DATAX_WRITER_PLUGIN_CLICKHOUSE;
default:
return null;
}
}
public static SQLStatementParser getSqlStatementParser(DbType dbType, String sql) {
switch (dbType) {
case MYSQL:
return new MySqlStatementParser(sql);
case POSTGRESQL:
return new PGSQLStatementParser(sql);
case ORACLE:
return new OracleStatementParser(sql);
case SQLSERVER:
return new SQLServerStatementParser(sql);
default:
return null;
}
}
public static String[] convertKeywordsColumns(DbType dbType, String[] columns) {
if (columns == null) {
return null;
}
String[] toColumns = new String[columns.length];
for (int i = 0; i < columns.length; i++) {
toColumns[i] = doConvertKeywordsColumn(dbType, columns[i]);
}
return toColumns;
}
public static String doConvertKeywordsColumn(DbType dbType, String column) {
if (column == null) {
return column;
}
column = column.trim();
column = column.replace("`", "");
column = column.replace("\"", "");
column = column.replace("'", "");
switch (dbType) {
case MYSQL:
return String.format("`%s`", column);
case POSTGRESQL:
return String.format("\"%s\"", column);
case ORACLE:
return String.format("\"%s\"", column);
case SQLSERVER:
return String.format("`%s`", column);
default:
return column;
}
}
}

22
dolphinscheduler-task-plugin/dolphinscheduler-task-flink/pom.xml

@ -27,21 +27,21 @@
<artifactId>dolphinscheduler-task-flink</artifactId>
<packaging>dolphinscheduler-plugin</packaging>
<dependencies>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-spi</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-task-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-spi</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-task-api</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<finalName>dolphinscheduler-task-flink-${project.version}</finalName>
</build>
</project>

87
dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java

@ -18,81 +18,90 @@
package org.apache.dolphinscheduler.plugin.task.flink;
import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.task.ResourceInfo;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils;
import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import java.util.Map;
public class FlinkTask extends AbstractYarnTask {
/**
* flink command
* usage: flink run [OPTIONS] <jar-file> <arguments>
* flink command
* usage: flink run [OPTIONS] <jar-file> <arguments>
*/
private static final String FLINK_COMMAND = "flink";
private static final String FLINK_RUN = "run";
/**
* flink parameters
* flink parameters
*/
private FlinkParameters flinkParameters;
private String command;
private TaskRequest flinkRequest;
/**
* taskExecutionContext
*/
private TaskRequest taskExecutionContext;
public FlinkTask(TaskRequest taskRequest) {
super(taskRequest);
this.flinkRequest = taskRequest;
public FlinkTask(TaskRequest taskExecutionContext) {
super(taskExecutionContext);
this.taskExecutionContext = taskExecutionContext;
}
@Override
public String getPreScript() {
// flink run [OPTIONS] <jar-file> <arguments>
List<String> args = new ArrayList<>();
args.add(FLINK_COMMAND);
args.add(FLINK_RUN);
logger.info("flink task args : {}", args);
// other parameters
args.addAll(FlinkArgsUtils.buildArgs(flinkParameters));
return String.join(" ", args);
public void init() {
}
logger.info("flink task params {}", taskExecutionContext.getTaskParams());
@Override
public void setCommand(String command) {
this.command = command;
flinkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), FlinkParameters.class);
}
@Override
public void init() {
if (flinkParameters == null || !flinkParameters.checkParameters()) {
throw new RuntimeException("flink task params is not valid");
}
flinkParameters.setQueue(taskExecutionContext.getQueue());
setMainJarName();
logger.info("flink task params {}", flinkRequest.getTaskParams());
if (StringUtils.isNotEmpty(flinkParameters.getMainArgs())) {
String args = flinkParameters.getMainArgs();
flinkParameters = JSONUtils.parseObject(flinkRequest.getTaskParams(), FlinkParameters.class);
// combining local and global parameters
Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext,getParameters());
if (!flinkParameters.checkParameters()) {
throw new TaskException("flink task params is not valid");
logger.info("param Map : {}", paramsMap);
if (paramsMap != null) {
args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap));
logger.info("param args : {}", args);
}
flinkParameters.setMainArgs(args);
}
}
/**
* create command
*
* @return command
*/
@Override
protected String getCommand() {
protected String buildCommand() {
// flink run [OPTIONS] <jar-file> <arguments>
List<String> args = new ArrayList<>();
args.add(FLINK_COMMAND);
args.add(FLINK_RUN);
logger.info("flink task args : {}", args);
// other parameters
args.addAll(FlinkArgsUtils.buildArgs(flinkParameters));
String command = ParameterUtils
.convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams());
logger.info("flink task command : {}", command);
return command;
}

2
dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskChannel.java

@ -18,7 +18,7 @@
package org.apache.dolphinscheduler.plugin.task.flink;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
public class FlinkTaskChannel implements TaskChannel {
@Override

12
dolphinscheduler-task-plugin/dolphinscheduler-task-http/pom.xml

@ -40,9 +40,19 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</dependency>
</dependencies>
<build>
<finalName>dolphinscheduler-task-http-${project.version}</finalName>
</build>
</project>
</project>

18
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpMethod.java

@ -21,12 +21,12 @@ package org.apache.dolphinscheduler.plugin.task.http;
* http method
*/
public enum HttpMethod {
/**
* 0 get
* 1 post
* 2 head
* 3 put
* 4 delete
*/
GET, POST, HEAD, PUT, DELETE
}
/**
* 0 get
* 1 post
* 2 head
* 3 put
* 4 delete
*/
GET, POST, HEAD, PUT, DELETE
}

5
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpParameters.java

@ -53,18 +53,17 @@ public class HttpParameters extends AbstractParameters {
*/
private String condition;
/**
* Connect Timeout
* Unit: ms
*/
private int connectTimeout ;
private int connectTimeout;
/**
* Socket Timeout
* Unit: ms
*/
private int socketTimeout ;
private int socketTimeout;
@Override
public boolean checkParameters() {

1
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpParametersType.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.http;
/**

192
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpProperty.java

@ -20,105 +20,103 @@ package org.apache.dolphinscheduler.plugin.task.http;
import java.util.Objects;
public class HttpProperty {
/**
* key
*/
private String prop;
/**
* httpParametersType
*/
private HttpParametersType httpParametersType;
/**
* value
*/
private String value;
public HttpProperty() {
}
public HttpProperty(String prop, HttpParametersType httpParametersType, String value) {
this.prop = prop;
this.httpParametersType = httpParametersType;
this.value = value;
}
/**
* getter method
*
* @return the prop
* @see HttpProperty#prop
*/
public String getProp() {
return prop;
}
/**
* setter method
*
* @param prop the prop to set
* @see HttpProperty#prop
*/
public void setProp(String prop) {
this.prop = prop;
}
/**
* getter method
*
* @return the value
* @see HttpProperty#value
*/
public String getValue() {
return value;
}
/**
* setter method
*
* @param value the value to set
* @see HttpProperty#value
*/
public void setValue(String value) {
this.value = value;
}
public HttpParametersType getHttpParametersType() {
return httpParametersType;
}
public void setHttpParametersType(HttpParametersType httpParametersType) {
this.httpParametersType = httpParametersType;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
/**
* key
*/
private String prop;
/**
* httpParametersType
*/
private HttpParametersType httpParametersType;
/**
* value
*/
private String value;
public HttpProperty() {
}
if (o == null || getClass() != o.getClass()) {
return false;
public HttpProperty(String prop, HttpParametersType httpParametersType, String value) {
this.prop = prop;
this.httpParametersType = httpParametersType;
this.value = value;
}
/**
* getter method
*
* @return the prop
* @see HttpProperty#prop
*/
public String getProp() {
return prop;
}
/**
* setter method
*
* @param prop the prop to set
* @see HttpProperty#prop
*/
public void setProp(String prop) {
this.prop = prop;
}
/**
* getter method
*
* @return the value
* @see HttpProperty#value
*/
public String getValue() {
return value;
}
/**
* setter method
*
* @param value the value to set
* @see HttpProperty#value
*/
public void setValue(String value) {
this.value = value;
}
HttpProperty property = (HttpProperty) o;
return Objects.equals(prop, property.prop) &&
Objects.equals(value, property.value);
}
@Override
public int hashCode() {
return Objects.hash(prop, value);
}
@Override
public String toString() {
return "HttpProperty{" +
"prop='" + prop + '\'' +
", httpParametersType=" + httpParametersType +
", value='" + value + '\'' +
'}';
}
public HttpParametersType getHttpParametersType() {
return httpParametersType;
}
public void setHttpParametersType(HttpParametersType httpParametersType) {
this.httpParametersType = httpParametersType;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HttpProperty property = (HttpProperty) o;
return Objects.equals(prop, property.prop)
&& Objects.equals(value, property.value);
}
@Override
public int hashCode() {
return Objects.hash(prop, value);
}
@Override
public String toString() {
return "HttpProperty{"
+ "prop='" + prop + '\''
+ ", httpParametersType=" + httpParametersType
+ ", value='" + value + '\''
+ '}';
}
}

269
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java

@ -17,20 +17,56 @@
package org.apache.dolphinscheduler.plugin.task.http;
import static org.apache.dolphinscheduler.plugin.task.http.HttpTaskConstants.APPLICATION_JSON;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.TASK_LOG_INFO_FORMAT;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils;
import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.CollectionUtils;
import org.apache.dolphinscheduler.spi.utils.DateUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.io.Charsets;
import org.apache.http.HttpEntity;
import org.apache.http.ParseException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class HttpTask extends AbstractTaskExecutor {
/**
* output
*/
protected String output;
/**
* http parameters
*/
private HttpParameters httpParameters;
/**
* taskExecutionContext
*/
private TaskRequest taskExecutionContext;
private HttpParameters httpParameters;
/**
* constructor
*
@ -38,6 +74,7 @@ public class HttpTask extends AbstractTaskExecutor {
*/
public HttpTask(TaskRequest taskExecutionContext) {
super(taskExecutionContext);
this.taskExecutionContext = taskExecutionContext;
}
@Override
@ -53,10 +90,234 @@ public class HttpTask extends AbstractTaskExecutor {
@Override
public void handle() throws Exception {
String threadLoggerInfoName = String.format(TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId());
Thread.currentThread().setName(threadLoggerInfoName);
long startTime = System.currentTimeMillis();
String formatTimeStamp = DateUtils.formatTimeStamp(startTime);
String statusCode = null;
String body = null;
try (CloseableHttpClient client = createHttpClient();
CloseableHttpResponse response = sendRequest(client)) {
statusCode = String.valueOf(getStatusCode(response));
body = getResponseBody(response);
exitStatusCode = validResponse(body, statusCode);
long costTime = System.currentTimeMillis() - startTime;
logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {} milliseconds, statusCode : {}, body : {}, log : {}",
formatTimeStamp, httpParameters.getUrl(),
httpParameters.getHttpMethod(), costTime, statusCode, body, output);
} catch (Exception e) {
appendMessage(e.toString());
exitStatusCode = -1;
logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:" + output, e);
throw e;
}
}
/**
* send request
*
* @param client client
* @return CloseableHttpResponse
* @throws IOException io exception
*/
protected CloseableHttpResponse sendRequest(CloseableHttpClient client) throws IOException {
RequestBuilder builder = createRequestBuilder();
// replace placeholder,and combine local and global parameters
Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext,getParameters());
List<HttpProperty> httpPropertyList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(httpParameters.getHttpParams())) {
for (HttpProperty httpProperty : httpParameters.getHttpParams()) {
String jsonObject = JSONUtils.toJsonString(httpProperty);
String params = ParameterUtils.convertParameterPlaceholders(jsonObject, ParamUtils.convert(paramsMap));
logger.info("http request params:{}", params);
httpPropertyList.add(JSONUtils.parseObject(params, HttpProperty.class));
}
}
addRequestParams(builder, httpPropertyList);
String requestUrl = ParameterUtils.convertParameterPlaceholders(httpParameters.getUrl(), ParamUtils.convert(paramsMap));
HttpUriRequest request = builder.setUri(requestUrl).build();
setHeaders(request, httpPropertyList);
return client.execute(request);
}
/**
* get response body
*
* @param httpResponse http response
* @return response body
* @throws ParseException parse exception
* @throws IOException io exception
*/
protected String getResponseBody(CloseableHttpResponse httpResponse) throws ParseException, IOException {
if (httpResponse == null) {
return null;
}
HttpEntity entity = httpResponse.getEntity();
if (entity == null) {
return null;
}
return EntityUtils.toString(entity, StandardCharsets.UTF_8.name());
}
/**
* get status code
*
* @param httpResponse http response
* @return status code
*/
protected int getStatusCode(CloseableHttpResponse httpResponse) {
return httpResponse.getStatusLine().getStatusCode();
}
/**
* valid response
*
* @param body body
* @param statusCode status code
* @return exit status code
*/
protected int validResponse(String body, String statusCode) {
int exitStatusCode = 0;
switch (httpParameters.getHttpCheckCondition()) {
case BODY_CONTAINS:
if (StringUtils.isEmpty(body) || !body.contains(httpParameters.getCondition())) {
appendMessage(httpParameters.getUrl() + " doesn contain "
+ httpParameters.getCondition());
exitStatusCode = -1;
}
break;
case BODY_NOT_CONTAINS:
if (StringUtils.isEmpty(body) || body.contains(httpParameters.getCondition())) {
appendMessage(httpParameters.getUrl() + " contains "
+ httpParameters.getCondition());
exitStatusCode = -1;
}
break;
case STATUS_CODE_CUSTOM:
if (!statusCode.equals(httpParameters.getCondition())) {
appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: " + httpParameters.getCondition());
exitStatusCode = -1;
}
break;
default:
if (!"200".equals(statusCode)) {
appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: 200");
exitStatusCode = -1;
}
break;
}
return exitStatusCode;
}
public String getOutput() {
return output;
}
/**
* append message
*
* @param message message
*/
protected void appendMessage(String message) {
if (output == null) {
output = "";
}
if (message != null && !message.trim().isEmpty()) {
output += message;
}
}
/**
* add request params
*
* @param builder buidler
* @param httpPropertyList http property list
*/
protected void addRequestParams(RequestBuilder builder, List<HttpProperty> httpPropertyList) {
if (CollectionUtils.isNotEmpty(httpPropertyList)) {
ObjectNode jsonParam = JSONUtils.createObjectNode();
for (HttpProperty property : httpPropertyList) {
if (property.getHttpParametersType() != null) {
if (property.getHttpParametersType().equals(HttpParametersType.PARAMETER)) {
builder.addParameter(property.getProp(), property.getValue());
} else if (property.getHttpParametersType().equals(HttpParametersType.BODY)) {
jsonParam.put(property.getProp(), property.getValue());
}
}
}
StringEntity postingString = new StringEntity(jsonParam.toString(), Charsets.UTF_8);
postingString.setContentEncoding(StandardCharsets.UTF_8.name());
postingString.setContentType(APPLICATION_JSON);
builder.setEntity(postingString);
}
}
/**
* set headers
*
* @param request request
* @param httpPropertyList http property list
*/
protected void setHeaders(HttpUriRequest request, List<HttpProperty> httpPropertyList) {
if (CollectionUtils.isNotEmpty(httpPropertyList)) {
for (HttpProperty property : httpPropertyList) {
if (HttpParametersType.HEADERS.equals(property.getHttpParametersType())) {
request.addHeader(property.getProp(), property.getValue());
}
}
}
}
/**
* create http client
*
* @return CloseableHttpClient
*/
protected CloseableHttpClient createHttpClient() {
final RequestConfig requestConfig = requestConfig();
HttpClientBuilder httpClientBuilder;
httpClientBuilder = HttpClients.custom().setDefaultRequestConfig(requestConfig);
return httpClientBuilder.build();
}
/**
* request config
*
* @return RequestConfig
*/
private RequestConfig requestConfig() {
return RequestConfig.custom().setSocketTimeout(httpParameters.getSocketTimeout()).setConnectTimeout(httpParameters.getConnectTimeout()).build();
}
/**
* create request builder
*
* @return RequestBuilder
*/
protected RequestBuilder createRequestBuilder() {
if (httpParameters.getHttpMethod().equals(HttpMethod.GET)) {
return RequestBuilder.get();
} else if (httpParameters.getHttpMethod().equals(HttpMethod.POST)) {
return RequestBuilder.post();
} else if (httpParameters.getHttpMethod().equals(HttpMethod.HEAD)) {
return RequestBuilder.head();
} else if (httpParameters.getHttpMethod().equals(HttpMethod.PUT)) {
return RequestBuilder.put();
} else if (httpParameters.getHttpMethod().equals(HttpMethod.DELETE)) {
return RequestBuilder.delete();
} else {
return null;
}
}
@Override
public AbstractParameters getParameters() {
return null;
return this.httpParameters;
}
}

4
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannel.java

@ -19,9 +19,10 @@ package org.apache.dolphinscheduler.plugin.task.http;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.TaskRequest;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
public class HttpTaskChannel implements TaskChannel {
@Override
public void cancelApplication(boolean status) {
@ -31,4 +32,5 @@ public class HttpTaskChannel implements TaskChannel {
public AbstractTask createTask(TaskRequest taskRequest) {
return new HttpTask(taskRequest);
}
}

5
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannelFactory.java

@ -24,9 +24,10 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory;
import java.util.List;
public class HttpTaskChannelFactory implements TaskChannelFactory {
@Override
public String getName() {
return null;
return "HTTP";
}
@Override
@ -36,6 +37,6 @@ public class HttpTaskChannelFactory implements TaskChannelFactory {
@Override
public TaskChannel create() {
return null;
return new HttpTaskChannel();
}
}

8
dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskConstants.java → dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskConstants.java

@ -15,11 +15,9 @@
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.shell;
package org.apache.dolphinscheduler.plugin.task.http;
public class ShellTaskConstants {
public class HttpTaskConstants {
private ShellTaskConstants() {
}
public static final String APPLICATION_JSON = "application/json";
}

3
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskPlugin.java

@ -17,10 +17,11 @@
package org.apache.dolphinscheduler.plugin.task.http;
import com.google.common.collect.ImmutableList;
import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin;
import org.apache.dolphinscheduler.spi.task.TaskChannelFactory;
import com.google.common.collect.ImmutableList;
public class HttpTaskPlugin implements DolphinSchedulerPlugin {
@Override

5
dolphinscheduler-task-plugin/dolphinscheduler-task-mr/pom.xml

@ -26,6 +26,7 @@
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-task-mr</artifactId>
<packaging>dolphinscheduler-plugin</packaging>
<dependencies>
<dependency>
@ -41,4 +42,8 @@
</dependencies>
<build>
<finalName>dolphinscheduler-task-mr-${project.version}</finalName>
</build>
</project>

88
dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceArgsUtils.java

@ -0,0 +1,88 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.mr;
import static org.apache.dolphinscheduler.plugin.task.mr.MapReduceTaskConstants.MR_NAME;
import static org.apache.dolphinscheduler.plugin.task.mr.MapReduceTaskConstants.MR_QUEUE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.D;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.JAR;
import org.apache.dolphinscheduler.plugin.task.util.ArgsUtils;
import org.apache.dolphinscheduler.spi.task.ResourceInfo;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* mapreduce args utils
*/
public class MapReduceArgsUtils {
private MapReduceArgsUtils() {
throw new IllegalStateException("Utility class");
}
/**
* build args
*
* @param param param
* @return argument list
*/
public static List<String> buildArgs(MapReduceParameters param) {
List<String> args = new ArrayList<>();
ResourceInfo mainJar = param.getMainJar();
if (mainJar != null) {
args.add(JAR);
args.add(mainJar.getRes());
}
ProgramType programType = param.getProgramType();
String mainClass = param.getMainClass();
if (programType != null && programType != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)) {
args.add(mainClass);
}
String appName = param.getAppName();
if (StringUtils.isNotEmpty(appName)) {
args.add(String.format("%s%s=%s", D, MR_NAME, ArgsUtils.escape(appName)));
}
String others = param.getOthers();
if (StringUtils.isEmpty(others) || !others.contains(MR_QUEUE)) {
String queue = param.getQueue();
if (StringUtils.isNotEmpty(queue)) {
args.add(String.format("%s%s=%s", D, MR_QUEUE, queue));
}
}
// -conf -archives -files -libjars -D
if (StringUtils.isNotEmpty(others)) {
args.add(others);
}
String mainArgs = param.getMainArgs();
if (StringUtils.isNotEmpty(mainArgs)) {
args.add(mainArgs);
}
return args;
}
}

159
dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceParameters.java

@ -0,0 +1,159 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.mr;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.ResourceInfo;
import java.util.ArrayList;
import java.util.List;
/**
* mapreduce parameters
*/
public class MapReduceParameters extends AbstractParameters {
/**
* major jar
*/
private ResourceInfo mainJar;
/**
* major class
*/
private String mainClass;
/**
* arguments
*/
private String mainArgs;
/**
* other arguments
*/
private String others;
/**
* app name
*/
private String appName;
/**
* queue
*/
private String queue;
/**
* resource list
*/
private List<ResourceInfo> resourceList = new ArrayList<>();
/**
* program type
* 0 JAVA,1 SCALA,2 PYTHON
*/
private ProgramType programType;
public String getMainClass() {
return mainClass;
}
public void setMainClass(String mainClass) {
this.mainClass = mainClass;
}
public String getMainArgs() {
return mainArgs;
}
public void setMainArgs(String mainArgs) {
this.mainArgs = mainArgs;
}
public String getOthers() {
return others;
}
public void setOthers(String others) {
this.others = others;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getQueue() {
return queue;
}
public void setQueue(String queue) {
this.queue = queue;
}
public List<ResourceInfo> getResourceList() {
return this.resourceList;
}
public void setResourceList(List<ResourceInfo> resourceList) {
this.resourceList = resourceList;
}
public void setMainJar(ResourceInfo mainJar) {
this.mainJar = mainJar;
}
public ResourceInfo getMainJar() {
return mainJar;
}
public ProgramType getProgramType() {
return programType;
}
public void setProgramType(ProgramType programType) {
this.programType = programType;
}
@Override
public boolean checkParameters() {
return this.mainJar != null && this.programType != null;
}
@Override
public List<ResourceInfo> getResourceFilesList() {
if (mainJar != null && !resourceList.contains(mainJar)) {
resourceList.add(mainJar);
}
return resourceList;
}
@Override
public String toString() {
return "mainJar= " + mainJar
+ "mainClass=" + mainClass
+ "mainArgs=" + mainArgs
+ "queue=" + queue
+ "other mainArgs=" + others
;
}
}

134
dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTask.java

@ -0,0 +1,134 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.mr;
import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.task.ResourceInfo;
import org.apache.dolphinscheduler.spi.task.TaskConstants;
import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils;
import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* mapreduce task
*/
public class MapReduceTask extends AbstractYarnTask {
/**
* mapreduce command
* usage: hadoop jar <jar> [mainClass] [GENERIC_OPTIONS] args...
*/
private static final String MAPREDUCE_COMMAND = TaskConstants.HADOOP;
/**
* mapreduce parameters
*/
private MapReduceParameters mapreduceParameters;
/**
* taskExecutionContext
*/
private TaskRequest taskExecutionContext;
/**
* constructor
* @param taskExecutionContext taskExecutionContext
*/
public MapReduceTask(TaskRequest taskExecutionContext) {
super(taskExecutionContext);
this.taskExecutionContext = taskExecutionContext;
}
@Override
public void init() {
logger.info("mapreduce task params {}", taskExecutionContext.getTaskParams());
this.mapreduceParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), MapReduceParameters.class);
// check parameters
if (mapreduceParameters == null || !mapreduceParameters.checkParameters()) {
throw new RuntimeException("mapreduce task params is not valid");
}
mapreduceParameters.setQueue(taskExecutionContext.getQueue());
setMainJarName();
// replace placeholder,and combine local and global parameters
Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext,getParameters());
if (paramsMap != null) {
String args = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getMainArgs(), ParamUtils.convert(paramsMap));
mapreduceParameters.setMainArgs(args);
if (mapreduceParameters.getProgramType() != null && mapreduceParameters.getProgramType() == ProgramType.PYTHON) {
String others = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getOthers(), ParamUtils.convert(paramsMap));
mapreduceParameters.setOthers(others);
}
}
}
/**
* build command
* @return command
*/
@Override
protected String buildCommand() {
// hadoop jar <jar> [mainClass] [GENERIC_OPTIONS] args...
List<String> args = new ArrayList<>();
args.add(MAPREDUCE_COMMAND);
// other parameters
args.addAll(MapReduceArgsUtils.buildArgs(mapreduceParameters));
String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args),
taskExecutionContext.getDefinedParams());
logger.info("mapreduce task command: {}", command);
return command;
}
@Override
protected void setMainJarName() {
// main jar
ResourceInfo mainJar = mapreduceParameters.getMainJar();
if (mainJar != null) {
int resourceId = mainJar.getId();
String resourceName;
if (resourceId == 0) {
resourceName = mainJar.getRes();
} else {
//when update resource maybe has error ,也许也可以交给上层去做控制 需要看资源是否可以抽象为共性 目前来讲我认为是可以的
resourceName = mainJar.getResourceName().replaceFirst("/", "");
}
mainJar.setRes(resourceName);
mapreduceParameters.setMainJar(mainJar);
}
}
@Override
public AbstractParameters getParameters() {
return mapreduceParameters;
}
}

34
dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannel.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.mr;
import org.apache.dolphinscheduler.spi.task.AbstractTask;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
public class MapReduceTaskChannel implements TaskChannel {
@Override
public void cancelApplication(boolean status) {
}
@Override
public AbstractTask createTask(TaskRequest taskRequest) {
return new MapReduceTask(taskRequest);
}
}

42
dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannelFactory.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.mr;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.dolphinscheduler.spi.task.TaskChannel;
import org.apache.dolphinscheduler.spi.task.TaskChannelFactory;
import java.util.List;
public class MapReduceTaskChannelFactory implements TaskChannelFactory {
@Override
public String getName() {
return "MR";
}
@Override
public List<PluginParams> getParams() {
return null;
}
@Override
public TaskChannel create() {
return new MapReduceTaskChannel();
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save