Browse Source

[Improvement][API] Create Datasource #5083 (#5116)

* [Improvement][API] Create Datasource #5083

  * Refact create/ update datasource API

* fix Code Smells

* fix code smell

* resolve code smell

* use ConnectionParam to replace BaseDatasource

* solve license

* add ut

* code check

* add ut

* fix ut coverage

* fix ut
pull/3/MERGE
ruanwenjun 4 years ago committed by GitHub
parent
commit
e6d8da484f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 157
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java
  2. 37
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  3. 360
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
  4. 28
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java
  5. 169
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java
  6. 79
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/AbstractDatasourceProcessor.java
  7. 98
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseConnectionParam.java
  8. 161
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseDataSourceParamDTO.java
  9. 57
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsConnectionParam.java
  10. 61
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsDatasourceParamDTO.java
  11. 23
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/ConnectionParam.java
  12. 81
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceProcessor.java
  13. 121
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtil.java
  14. 29
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceParamDTO.java
  15. 125
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessor.java
  16. 34
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickhouseConnectionParam.java
  17. 33
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2ConnectionParam.java
  18. 43
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceParamDTO.java
  19. 126
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessor.java
  20. 38
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveConnectionParam.java
  21. 45
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDataSourceParamDTO.java
  22. 185
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessor.java
  23. 33
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlConnectionParam.java
  24. 43
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceParamDTO.java
  25. 170
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessor.java
  26. 46
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleConnectionParam.java
  27. 46
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceParamDTO.java
  28. 141
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessor.java
  29. 34
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlConnectionParam.java
  30. 41
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceParamDTO.java
  31. 126
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessor.java
  32. 34
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoConnectionParam.java
  33. 43
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceParamDTO.java
  34. 128
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessor.java
  35. 38
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkConnectionParam.java
  36. 47
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceParamDTO.java
  37. 154
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessor.java
  38. 34
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerConnectionParam.java
  39. 47
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceParamDTO.java
  40. 123
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessor.java
  41. 1
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbConnectType.java
  42. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java
  43. 130
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtilTest.java
  44. 83
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java
  45. 83
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessorTest.java
  46. 79
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessorTest.java
  47. 81
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessorTest.java
  48. 83
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessorTest.java
  49. 83
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java
  50. 81
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessorTest.java
  51. 80
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessorTest.java
  52. 81
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java
  53. 278
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java
  54. 108
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java
  55. 120
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java
  56. 120
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java
  57. 195
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSourceTest.java
  58. 89
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSourceTest.java
  59. 102
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSourceTest.java
  60. 74
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSourceTest.java
  61. 33
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java
  62. 160
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java
  63. 13
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
  64. 14
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
  65. 14
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
  66. 9
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
  67. 11
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTaskTest.java
  68. 4
      dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue
  69. 12
      dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js
  70. 10
      pom.xml

157
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java

@ -31,9 +31,12 @@ import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_DATASOURCE_NAM
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.DataSourceService; import org.apache.dolphinscheduler.api.service.DataSourceService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
@ -48,6 +51,7 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.ResponseStatus;
@ -57,6 +61,7 @@ import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
@ -76,58 +81,19 @@ public class DataSourceController extends BaseController {
* create data source * create data source
* *
* @param loginUser login user * @param loginUser login user
* @param name data source name * @param dataSourceParam datasource param
* @param note data source description
* @param type data source type
* @param host host
* @param port port
* @param database data base
* @param principal principal
* @param userName user name
* @param password password
* @param other other arguments
* @return create result code * @return create result code
*/ */
@ApiOperation(value = "createDataSource", notes = "CREATE_DATA_SOURCE_NOTES") @ApiOperation(value = "createDataSource", notes = "CREATE_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"),
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "principal", value = "DATA_SOURCE_PRINCIPAL", dataType = "String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"),
@ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String"),
@ApiImplicitParam(name = "javaSecurityKrb5Conf", value = "DATA_SOURCE_KERBEROS_KRB5_CONF", dataType = "String"),
@ApiImplicitParam(name = "loginUserKeytabUsername", value = "DATA_SOURCE_KERBEROS_KEYTAB_USERNAME", dataType = "String"),
@ApiImplicitParam(name = "loginUserKeytabPath", value = "DATA_SOURCE_KERBEROS_KEYTAB_PATH", dataType = "String")
})
@PostMapping(value = "/create") @PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED) @ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_DATASOURCE_ERROR) @ApiException(CREATE_DATASOURCE_ERROR)
public Result createDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result createDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("name") String name, @ApiParam(name = "DATA_SOURCE_PARAM", required = true)
@RequestParam(value = "note", required = false) String note, @RequestBody BaseDataSourceParamDTO dataSourceParam) {
@RequestParam(value = "type") DbType type, String userName = RegexUtils.escapeNRT(loginUser.getUserName());
@RequestParam(value = "host") String host, logger.info("login user {} create datasource : {}", userName, dataSourceParam);
@RequestParam(value = "port") String port, return dataSourceService.createDataSource(loginUser, dataSourceParam);
@RequestParam(value = "database") String database,
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password,
@RequestParam(value = "connectType") DbConnectType connectType,
@RequestParam(value = "other") String other,
@RequestParam(value = "javaSecurityKrb5Conf", required = false) String javaSecurityKrb5Conf,
@RequestParam(value = "loginUserKeytabUsername", required = false) String loginUserKeytabUsername,
@RequestParam(value = "loginUserKeytabPath", required = false) String loginUserKeytabPath) {
logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {}, port: {}, database : {}, principal: {}, userName : {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, host, port, database, principal, userName, connectType, other);
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other,
javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath);
return dataSourceService.createDataSource(loginUser, name, note, type, parameter);
} }
@ -135,61 +101,21 @@ public class DataSourceController extends BaseController {
* updateProcessInstance data source * updateProcessInstance data source
* *
* @param loginUser login user * @param loginUser login user
* @param name data source name * @param dataSourceParam datasource param
* @param note description
* @param type data source type
* @param other other arguments
* @param id data source di
* @param host host
* @param port port
* @param database database
* @param principal principal
* @param userName user name
* @param password password
* @return update result code * @return update result code
*/ */
@ApiOperation(value = "updateDataSource", notes = "UPDATE_DATA_SOURCE_NOTES") @ApiOperation(value = "updateDataSource", notes = "UPDATE_DATA_SOURCE_NOTES")
@ApiImplicitParams({ @ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"),
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "principal", value = "DATA_SOURCE_PRINCIPAL", dataType = "String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"),
@ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String"),
@ApiImplicitParam(name = "javaSecurityKrb5Conf", value = "DATA_SOURCE_KERBEROS_KRB5_CONF", dataType = "String"),
@ApiImplicitParam(name = "loginUserKeytabUsername", value = "DATA_SOURCE_KERBEROS_KEYTAB_USERNAME", dataType = "String"),
@ApiImplicitParam(name = "loginUserKeytabPath", value = "DATA_SOURCE_KERBEROS_KEYTAB_PATH", dataType = "String")
}) })
@PostMapping(value = "/update") @PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_DATASOURCE_ERROR) @ApiException(UPDATE_DATASOURCE_ERROR)
public Result updateDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result updateDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") int id, @RequestBody BaseDataSourceParamDTO dataSourceParam) {
@RequestParam("name") String name, String userName = RegexUtils.escapeNRT(loginUser.getUserName());
@RequestParam(value = "note", required = false) String note, logger.info("login user {} updateProcessInstance datasource : {}", userName, dataSourceParam);
@RequestParam(value = "type") DbType type, return dataSourceService.updateDataSource(dataSourceParam.getId(), loginUser, dataSourceParam);
@RequestParam(value = "host") String host,
@RequestParam(value = "port") String port,
@RequestParam(value = "database") String database,
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password,
@RequestParam(value = "connectType") DbConnectType connectType,
@RequestParam(value = "other") String other,
@RequestParam(value = "javaSecurityKrb5Conf", required = false) String javaSecurityKrb5Conf,
@RequestParam(value = "loginUserKeytabUsername", required = false) String loginUserKeytabUsername,
@RequestParam(value = "loginUserKeytabPath", required = false) String loginUserKeytabPath) {
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, connectType, other);
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other,
javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath);
return dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter);
} }
/** /**
@ -270,58 +196,23 @@ public class DataSourceController extends BaseController {
* connect datasource * connect datasource
* *
* @param loginUser login user * @param loginUser login user
* @param name data source name * @param dataSourceParam datasource param
* @param note data soruce description
* @param type data source type
* @param other other parameters
* @param host host
* @param port port
* @param database data base
* @param principal principal
* @param userName user name
* @param password password
* @return connect result code * @return connect result code
*/ */
@ApiOperation(value = "connectDataSource", notes = "CONNECT_DATA_SOURCE_NOTES") @ApiOperation(value = "connectDataSource", notes = "CONNECT_DATA_SOURCE_NOTES")
@ApiImplicitParams({ @ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"),
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "principal", value = "DATA_SOURCE_PRINCIPAL", dataType = "String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"),
@ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String"),
@ApiImplicitParam(name = "javaSecurityKrb5Conf", value = "DATA_SOURCE_KERBEROS_KRB5_CONF", dataType = "String"),
@ApiImplicitParam(name = "loginUserKeytabUsername", value = "DATA_SOURCE_KERBEROS_KEYTAB_USERNAME", dataType = "String"),
@ApiImplicitParam(name = "loginUserKeytabPath", value = "DATA_SOURCE_KERBEROS_KEYTAB_PATH", dataType = "String")
}) })
@PostMapping(value = "/connect") @PostMapping(value = "/connect")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(CONNECT_DATASOURCE_FAILURE) @ApiException(CONNECT_DATASOURCE_FAILURE)
public Result connectDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result connectDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("name") String name, @RequestBody BaseDataSourceParamDTO dataSourceParam) {
@RequestParam(value = "note", required = false) String note, String userName = RegexUtils.escapeNRT(loginUser.getUserName());
@RequestParam(value = "type") DbType type, logger.info("login user {}, connect datasource: {}", userName, dataSourceParam);
@RequestParam(value = "host") String host, DatasourceUtil.checkDatasourceParam(dataSourceParam);
@RequestParam(value = "port") String port, ConnectionParam connectionParams = DatasourceUtil.buildConnectionParams(dataSourceParam);
@RequestParam(value = "database") String database, return dataSourceService.checkConnection(dataSourceParam.getType(), connectionParams);
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password,
@RequestParam(value = "connectType") DbConnectType connectType,
@RequestParam(value = "other") String other,
@RequestParam(value = "javaSecurityKrb5Conf", required = false) String javaSecurityKrb5Conf,
@RequestParam(value = "loginUserKeytabUsername", required = false) String loginUserKeytabUsername,
@RequestParam(value = "loginUserKeytabPath", required = false) String loginUserKeytabPath) {
logger.info("login user {}, connect datasource: {}, note: {}, type: {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, connectType, other);
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other,
javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath);
return dataSourceService.checkConnection(type, parameter);
} }
/** /**

37
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -18,7 +18,8 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
@ -33,26 +34,20 @@ public interface DataSourceService {
* create data source * create data source
* *
* @param loginUser login user * @param loginUser login user
* @param name data source name * @param datasourceParam datasource parameter
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @return create result code * @return create result code
*/ */
Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter); Result<Object> createDataSource(User loginUser, BaseDataSourceParamDTO datasourceParam);
/** /**
* updateProcessInstance datasource * updateProcessInstance datasource
* *
* @param loginUser login user * @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @param id data source id * @param id data source id
* @param dataSourceParam data source params
* @return update result code * @return update result code
*/ */
Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter); Result<Object> updateDataSource(int id, User loginUser, BaseDataSourceParamDTO dataSourceParam);
/** /**
* updateProcessInstance datasource * updateProcessInstance datasource
@ -97,7 +92,7 @@ public interface DataSourceService {
* @param parameter data source parameters * @param parameter data source parameters
* @return true if connect successfully, otherwise false * @return true if connect successfully, otherwise false
*/ */
Result<Object> checkConnection(DbType type, String parameter); Result<Object> checkConnection(DbType type, ConnectionParam parameter);
/** /**
* test connection * test connection
@ -107,24 +102,6 @@ public interface DataSourceService {
*/ */
Result<Object> connectionTest(int id); Result<Object> connectionTest(int id);
/**
* build paramters
*
* @param type data source type
* @param host data source host
* @param port data source port
* @param database data source database name
* @param userName user name
* @param password password
* @param other other parameters
* @param principal principal
* @return datasource parameter
*/
String buildParameter(DbType type, String host,
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other,
String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath);
/** /**
* delete datasource * delete datasource
* *

360
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java

@ -18,37 +18,30 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.DataSourceService; import org.apache.dolphinscheduler.api.service.DataSourceService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.MySQLDataSource;
import org.apache.dolphinscheduler.dao.datasource.OracleDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection; import java.sql.Connection;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.regex.Pattern;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -68,25 +61,6 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class);
public static final String NAME = "name";
public static final String NOTE = "note";
public static final String TYPE = "type";
public static final String HOST = "host";
public static final String PORT = "port";
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database";
public static final String USER_NAME = "userName";
public static final String OTHER = "other";
private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$");
private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]]+$");
private static final Pattern DATABASE_PATTER = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$");
private static final Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9]+$");
@Autowired @Autowired
private DataSourceMapper dataSourceMapper; private DataSourceMapper dataSourceMapper;
@ -97,23 +71,23 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
* create data source * create data source
* *
* @param loginUser login user * @param loginUser login user
* @param name data source name * @param datasourceParam datasource parameters
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @return create result code * @return create result code
*/ */
@Override @Override
public Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { public Result<Object> createDataSource(User loginUser, BaseDataSourceParamDTO datasourceParam) {
DatasourceUtil.checkDatasourceParam(datasourceParam);
Result<Object> result = new Result<>(); Result<Object> result = new Result<>();
// check name can use or not // check name can use or not
if (checkName(name)) { if (checkName(datasourceParam.getName())) {
putMsg(result, Status.DATASOURCE_EXIST); putMsg(result, Status.DATASOURCE_EXIST);
return result; return result;
} }
Result<Object> isConnection = checkConnection(type, parameter); // check connect
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(datasourceParam);
Result<Object> isConnection = checkConnection(datasourceParam.getType(), connectionParam);
if (Status.SUCCESS.getCode() != isConnection.getCode()) { if (Status.SUCCESS.getCode() != isConnection.getCode()) {
putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
return result; return result;
} }
@ -121,12 +95,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
DataSource dataSource = new DataSource(); DataSource dataSource = new DataSource();
Date now = new Date(); Date now = new Date();
dataSource.setName(name.trim()); dataSource.setName(datasourceParam.getName().trim());
dataSource.setNote(desc); dataSource.setNote(datasourceParam.getNote());
dataSource.setUserId(loginUser.getId()); dataSource.setUserId(loginUser.getId());
dataSource.setUserName(loginUser.getUserName()); dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type); dataSource.setType(datasourceParam.getType());
dataSource.setConnectionParams(parameter); dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam));
dataSource.setCreateTime(now); dataSource.setCreateTime(now);
dataSource.setUpdateTime(now); dataSource.setUpdateTime(now);
dataSourceMapper.insert(dataSource); dataSourceMapper.insert(dataSource);
@ -148,8 +122,8 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
* @return update result code * @return update result code
*/ */
@Override @Override
public Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { public Result<Object> updateDataSource(int id, User loginUser, BaseDataSourceParamDTO dataSourceParam) {
DatasourceUtil.checkDatasourceParam(dataSourceParam);
Result<Object> result = new Result<>(); Result<Object> result = new Result<>();
// determine whether the data source exists // determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id); DataSource dataSource = dataSourceMapper.selectById(id);
@ -164,33 +138,31 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
} }
//check name can use or not //check name can use or not
if (!name.trim().equals(dataSource.getName()) && checkName(name)) { if (!dataSource.getName().trim().equals(dataSource.getName()) && checkName(dataSource.getName())) {
putMsg(result, Status.DATASOURCE_EXIST); putMsg(result, Status.DATASOURCE_EXIST);
return result; return result;
} }
//check password,if the password is not updated, set to the old password. //check password,if the password is not updated, set to the old password.
ObjectNode paramObject = JSONUtils.parseObject(parameter); BaseConnectionParam connectionParam = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(dataSourceParam);
String password = paramObject.path(Constants.PASSWORD).asText(); String password = connectionParam.getPassword();
if (StringUtils.isBlank(password)) { if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams(); String oldConnectionParams = dataSource.getConnectionParams();
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams); ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText()); connectionParam.setPassword(oldParams.path(Constants.PASSWORD).asText());
} }
// connectionParams json
String connectionParams = paramObject.toString();
Result<Object> isConnection = checkConnection(type, parameter); Result<Object> isConnection = checkConnection(dataSource.getType(), connectionParam);
if (Status.SUCCESS.getCode() != isConnection.getCode()) { if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result; return result;
} }
Date now = new Date(); Date now = new Date();
dataSource.setName(name.trim()); dataSource.setName(dataSource.getName().trim());
dataSource.setNote(desc); dataSource.setNote(dataSource.getNote());
dataSource.setUserName(loginUser.getUserName()); dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type); dataSource.setType(dataSource.getType());
dataSource.setConnectionParams(connectionParams); dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam));
dataSource.setUpdateTime(now); dataSource.setUpdateTime(now);
dataSourceMapper.updateById(dataSource); dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -218,79 +190,13 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
return result; return result;
} }
// type // type
String dataSourceType = dataSource.getType().toString(); BaseDataSourceParamDTO baseDataSourceParamDTO = DatasourceUtil.buildDatasourceParamDTO(
// name dataSource.getType(), dataSource.getConnectionParams());
String dataSourceName = dataSource.getName(); baseDataSourceParamDTO.setId(dataSource.getId());
// desc baseDataSourceParamDTO.setName(dataSource.getName());
String desc = dataSource.getNote(); baseDataSourceParamDTO.setNote(dataSource.getNote());
// parameter
String parameter = dataSource.getConnectionParams(); result.put(Constants.DATA_LIST, baseDataSourceParamDTO);
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
DbConnectType connectType = null;
String hostSeperator = Constants.DOUBLE_SLASH;
if (DbType.ORACLE.equals(dataSource.getType())) {
connectType = ((OracleDataSource) datasourceForm).getConnectType();
if (DbConnectType.ORACLE_SID.equals(connectType)) {
hostSeperator = Constants.AT_SIGN;
}
}
String database = datasourceForm.getDatabase();
// jdbc connection params
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address, hostSeperator);
// ip host
String host = hostsPorts[0];
// prot
String port = hostsPorts[1];
String separator = "";
switch (dataSource.getType()) {
case HIVE:
case SQLSERVER:
separator = ";";
break;
case MYSQL:
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
case PRESTO:
separator = "&";
break;
default:
separator = "&";
break;
}
Map<String, String> otherMap = new LinkedHashMap<>();
if (other != null) {
String[] configs = other.split(separator);
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
}
Map<String, Object> map = new HashMap<>();
map.put(NAME, dataSourceName);
map.put(NOTE, desc);
map.put(TYPE, dataSourceType);
if (connectType != null) {
map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
map.put(HOST, host);
map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf());
map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername());
map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath());
map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser());
map.put(OTHER, otherMap);
result.put(Constants.DATA_LIST, map);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -400,14 +306,9 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
* @return true if connect successfully, otherwise false * @return true if connect successfully, otherwise false
*/ */
@Override @Override
public Result<Object> checkConnection(DbType type, String parameter) { public Result<Object> checkConnection(DbType type, ConnectionParam connectionParam) {
Result<Object> result = new Result<>(); Result<Object> result = new Result<>();
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); try (Connection connection = DatasourceUtil.getConnection(type, connectionParam)) {
if (datasource == null) {
putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type);
return result;
}
try (Connection connection = datasource.getConnection()) {
if (connection == null) { if (connection == null) {
putMsg(result, Status.CONNECTION_TEST_FAILURE); putMsg(result, Status.CONNECTION_TEST_FAILURE);
return result; return result;
@ -415,7 +316,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} catch (Exception e) { } catch (Exception e) {
logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage()); logger.error("datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type, connectionParam, e.getMessage());
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(), e.getMessage()); return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(), e.getMessage());
} }
} }
@ -434,135 +335,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
return result; return result;
} }
return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); return checkConnection(dataSource.getType(), DatasourceUtil.buildConnectionParams(dataSource.getType(), dataSource.getConnectionParams()));
}
/**
* build paramters
*
* @param type data source type
* @param host data source host
* @param port data source port
* @param database data source database name
* @param userName user name
* @param password password
* @param other other parameters
* @param principal principal
* @return datasource parameter
*/
@Override
public String buildParameter(DbType type, String host,
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other,
String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) {
checkParams(type, port, host, database, other);
String address = buildAddress(type, host, port, connectType);
Map<String, Object> parameterMap = new LinkedHashMap<>();
String jdbcUrl;
if (DbType.SQLSERVER == type) {
jdbcUrl = address + ";databaseName=" + database;
} else {
jdbcUrl = address + "/" + database;
}
if (Constants.ORACLE.equals(type.name())) {
parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
jdbcUrl += ";principal=" + principal;
}
String separator = "";
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())
|| Constants.PRESTO.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
|| Constants.DB2.equals(type.name())
|| Constants.SQLSERVER.equals(type.name())) {
separator = ";";
}
parameterMap.put(Constants.ADDRESS, address);
parameterMap.put(Constants.DATABASE, database);
parameterMap.put(Constants.JDBC_URL, jdbcUrl);
parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password));
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
parameterMap.put(Constants.PRINCIPAL, principal);
parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf);
parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername);
parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath);
}
Map<String, String> map = JSONUtils.toMap(other);
if (type == DbType.MYSQL) {
map = MySQLDataSource.buildOtherParams(other);
}
if (MapUtils.isNotEmpty(map)) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry : map.entrySet()) {
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
}
if (!Constants.DB2.equals(type.name())) {
otherSb.deleteCharAt(otherSb.length() - 1);
}
parameterMap.put(Constants.OTHER, otherSb);
}
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap));
}
return JSONUtils.toJsonString(parameterMap);
}
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) {
StringBuilder sb = new StringBuilder();
if (Constants.MYSQL.equals(type.name())) {
sb.append(Constants.JDBC_MYSQL);
sb.append(host).append(":").append(port);
} else if (Constants.POSTGRESQL.equals(type.name())) {
sb.append(Constants.JDBC_POSTGRESQL);
sb.append(host).append(":").append(port);
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) {
sb.append(Constants.JDBC_HIVE_2);
String[] hostArray = host.split(",");
if (hostArray.length > 0) {
for (String zkHost : hostArray) {
sb.append(String.format("%s:%s,", zkHost, port));
}
sb.deleteCharAt(sb.length() - 1);
}
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
if (connectType == DbConnectType.ORACLE_SID) {
sb.append(Constants.JDBC_ORACLE_SID);
} else {
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME);
}
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
} else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
} else if (Constants.PRESTO.equals(type.name())) {
sb.append(Constants.JDBC_PRESTO);
sb.append(host).append(":").append(port);
}
return sb.toString();
} }
/** /**
@ -661,61 +434,4 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
return result; return result;
} }
/**
* get host and port by address
*
* @param address address
* @param separator separator
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address, String separator) {
String[] result = new String[2];
String[] tmpArray = address.split(separator);
String hostsAndPorts = tmpArray[tmpArray.length - 1];
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA);
String port = hostPortArray[0].split(Constants.COLON)[1];
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
result[0] = hosts.toString();
result[1] = port;
return result;
}
private void checkParams(DbType type, String port, String host, String database, String other) {
if (null == DbType.of(type.getCode())) {
throw new ServiceException(Status.DATASOURCE_DB_TYPE_ILLEGAL);
}
if (!isNumeric(port)) {
throw new ServiceException(Status.DATASOURCE_PORT_ILLEGAL);
}
if (!IPV4_PATTERN.matcher(host).matches() || !IPV6_PATTERN.matcher(host).matches()) {
throw new ServiceException(Status.DATASOURCE_HOST_ILLEGAL);
}
if (!DATABASE_PATTER.matcher(database).matches()) {
throw new ServiceException(Status.DATASOURCE_NAME_ILLEGAL);
}
if (StringUtils.isBlank(other)) {
return;
}
Map<String, String> map = JSONUtils.toMap(other);
if (MapUtils.isEmpty(map)) {
return;
}
boolean paramsCheck = map.entrySet().stream().allMatch(p -> PARAMS_PATTER.matcher(p.getValue()).matches());
if (!paramsCheck) {
throw new ServiceException(Status.DATASOURCE_OTHER_PARAMS_ILLEGAL);
}
}
private static boolean isNumeric(String str) {
for (int i = str.length(); --i >= 0; ) {
if (!Character.isDigit(str.charAt(i))) {
return false;
}
}
return true;
}
} }

28
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java

@ -24,8 +24,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.util.HashMap;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
@ -46,20 +49,19 @@ public class DataSourceControllerTest extends AbstractControllerTest{
@Ignore @Ignore
@Test @Test
public void testCreateDataSource() throws Exception { public void testCreateDataSource() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MysqlDatasourceParamDTO mysqlDatasourceParam = new MysqlDatasourceParamDTO();
paramsMap.add("name","mysql"); mysqlDatasourceParam.setName("mysql");
paramsMap.add("node","mysql data source test"); mysqlDatasourceParam.setNote("mysql data source test");
paramsMap.add("type","MYSQL"); mysqlDatasourceParam.setHost("192.168.xxxx.xx");
paramsMap.add("host","192.168.xxxx.xx"); mysqlDatasourceParam.setPort(3306);
paramsMap.add("port","3306"); mysqlDatasourceParam.setDatabase("dolphinscheduler");
paramsMap.add("principal",""); mysqlDatasourceParam.setUserName("root");
paramsMap.add("database","dolphinscheduler"); mysqlDatasourceParam.setPassword("root@123");
paramsMap.add("userName","root"); mysqlDatasourceParam.setOther(new HashMap<>());
paramsMap.add("password","root@123");
paramsMap.add("other","");
MvcResult mvcResult = mockMvc.perform(post("/datasources/create") MvcResult mvcResult = mockMvc.perform(post("/datasources/create")
.header("sessionId", sessionId) .header("sessionId", sessionId)
.params(paramsMap)) .contentType(MediaType.APPLICATION_JSON_UTF8)
.content(JSONUtils.toJsonString(mysqlDatasourceParam)))
.andExpect(status().isCreated()) .andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn(); .andReturn();
@ -91,7 +93,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }

169
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java

@ -21,15 +21,18 @@ import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.DataSourceServiceImpl; import org.apache.dolphinscheduler.api.service.impl.DataSourceServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.datasource.hive.HiveDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.oracle.OracleDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlDatasourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.MySQLDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
@ -37,6 +40,7 @@ import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import java.sql.Connection; import java.sql.Connection;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -56,7 +60,7 @@ import org.powermock.modules.junit4.PowerMockRunner;
*/ */
@RunWith(PowerMockRunner.class) @RunWith(PowerMockRunner.class)
@PowerMockIgnore({"sun.security.*", "javax.net.*"}) @PowerMockIgnore({"sun.security.*", "javax.net.*"})
@PrepareForTest({DataSourceFactory.class, CommonUtils.class}) @PrepareForTest({DatasourceUtil.class, CommonUtils.class})
public class DataSourceServiceTest { public class DataSourceServiceTest {
@InjectMocks @InjectMocks
@ -70,11 +74,17 @@ public class DataSourceServiceTest {
public void createDataSourceTest() { public void createDataSourceTest() {
User loginUser = getAdminUser(); User loginUser = getAdminUser();
String dataSourceName = "dataSource01"; String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource"; String dataSourceDesc = "test dataSource";
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null, null, null, null); PostgreSqlDatasourceParamDTO postgreSqlDatasourceParam = new PostgreSqlDatasourceParamDTO();
postgreSqlDatasourceParam.setDatabase(dataSourceName);
postgreSqlDatasourceParam.setNote(dataSourceDesc);
postgreSqlDatasourceParam.setHost("172.16.133.200");
postgreSqlDatasourceParam.setPort(5432);
postgreSqlDatasourceParam.setDatabase("dolphinscheduler");
postgreSqlDatasourceParam.setUserName("postgres");
postgreSqlDatasourceParam.setPassword("");
// data source exits // data source exits
List<DataSource> dataSourceList = new ArrayList<>(); List<DataSource> dataSourceList = new ArrayList<>();
@ -82,30 +92,30 @@ public class DataSourceServiceTest {
dataSource.setName(dataSourceName); dataSource.setName(dataSourceName);
dataSourceList.add(dataSource); dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList);
Result dataSourceExitsResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result dataSourceExitsResult = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceExitsResult.getCode().intValue()); Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceExitsResult.getCode().intValue());
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(postgreSqlDatasourceParam);
DbType dataSourceType = postgreSqlDatasourceParam.getType();
// data source exits // data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
Result connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(), Status.DATASOURCE_CONNECT_FAILED.getMsg()); Result connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(), Status.DATASOURCE_CONNECT_FAILED.getMsg());
//PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); //PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
PowerMockito.doReturn(connectionResult).when(dataSourceService).checkConnection(dataSourceType, parameter); PowerMockito.doReturn(connectionResult).when(dataSourceService).checkConnection(dataSourceType, connectionParam);
Result connectFailedResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result connectFailedResult = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailedResult.getCode().intValue()); Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailedResult.getCode().intValue());
// data source exits // data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
connectionResult = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); connectionResult = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(null); Result notValidError = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Result notValidError = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), notValidError.getCode().intValue()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), notValidError.getCode().intValue());
// success // success
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(JSONUtils.parseObject(parameter, MySQLDataSource.class)); Result success = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Result success = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue());
} }
@ -115,18 +125,25 @@ public class DataSourceServiceTest {
int dataSourceId = 12; int dataSourceId = 12;
String dataSourceName = "dataSource01"; String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource"; String dataSourceDesc = "test dataSource";
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null, null, null, null); PostgreSqlDatasourceParamDTO postgreSqlDatasourceParam = new PostgreSqlDatasourceParamDTO();
postgreSqlDatasourceParam.setDatabase(dataSourceName);
postgreSqlDatasourceParam.setNote(dataSourceDesc);
postgreSqlDatasourceParam.setHost("172.16.133.200");
postgreSqlDatasourceParam.setPort(5432);
postgreSqlDatasourceParam.setDatabase("dolphinscheduler");
postgreSqlDatasourceParam.setUserName("postgres");
postgreSqlDatasourceParam.setPassword("");
// data source not exits // data source not exits
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Result resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(), resourceNotExits.getCode().intValue()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(), resourceNotExits.getCode().intValue());
// user no operation perm // user no operation perm
DataSource dataSource = new DataSource(); DataSource dataSource = new DataSource();
dataSource.setUserId(0); dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Result userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), userNoOperationPerm.getCode().intValue()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), userNoOperationPerm.getCode().intValue());
// data source name exits // data source name exits
@ -135,23 +152,25 @@ public class DataSourceServiceTest {
dataSourceList.add(dataSource); dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList);
Result dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceNameExist.getCode().intValue()); Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceNameExist.getCode().intValue());
// data source connect failed // data source connect failed
DbType dataSourceType = postgreSqlDatasourceParam.getType();
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(postgreSqlDatasourceParam);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
Result connectionResult = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); Result connectionResult = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
Result connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailed.getCode().intValue()); Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailed.getCode().intValue());
//success //success
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(), Status.DATASOURCE_CONNECT_FAILED.getMsg()); connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(), Status.DATASOURCE_CONNECT_FAILED.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
Result success = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); Result success = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue());
} }
@ -279,40 +298,69 @@ public class DataSourceServiceTest {
@Test @Test
public void buildParameter() { public void buildParameter() {
String param = dataSourceService.buildParameter(DbType.ORACLE, "192.168.9.1", "1521", "im" OracleDatasourceParamDTO oracleDatasourceParamDTO = new OracleDatasourceParamDTO();
, "", "test", "test", DbConnectType.ORACLE_SERVICE_NAME, "", "", "", ""); oracleDatasourceParamDTO.setHost("192.168.9.1");
String expected = "{\"connectType\":\"ORACLE_SERVICE_NAME\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\"," oracleDatasourceParamDTO.setPort(1521);
+ "\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"test\"}"; oracleDatasourceParamDTO.setDatabase("im");
Assert.assertEquals(expected, param); oracleDatasourceParamDTO.setUserName("test");
oracleDatasourceParamDTO.setPassword("test");
oracleDatasourceParamDTO.setConnectType(DbConnectType.ORACLE_SERVICE_NAME);
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(oracleDatasourceParamDTO);
String expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\","
+ "\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"connectType\":\"ORACLE_SERVICE_NAME\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
PowerMockito.mockStatic(CommonUtils.class); PowerMockito.mockStatic(CommonUtils.class);
PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(true); PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(true);
PowerMockito.when(CommonUtils.encodePassword(Mockito.anyString())).thenReturn("test"); PowerMockito.when(CommonUtils.encodePassword(Mockito.anyString())).thenReturn("test");
param = dataSourceService.buildParameter(DbType.HIVE, "192.168.9.1", "10000", "im" HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO();
, "hive/hdfs-mycluster@ESZ.COM", "test", "test", null, "", "/opt/krb5.conf", "test2/hdfs-mycluster@ESZ.COM", "/opt/hdfs.headless.keytab"); hiveDataSourceParamDTO.setHost("192.168.9.1");
expected = "{\"address\":\"jdbc:hive2://192.168.9.1:10000\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:hive2://192.168.9.1:10000/im;principal=hive/hdfs-mycluster@ESZ.COM\"," hiveDataSourceParamDTO.setPort(10000);
+ "\"user\":\"test\",\"password\":\"test\",\"principal\":\"hive/hdfs-mycluster@ESZ.COM\",\"javaSecurityKrb5Conf\":\"/opt/krb5.conf\"," hiveDataSourceParamDTO.setDatabase("im");
+ "\"loginUserKeytabUsername\":\"test2/hdfs-mycluster@ESZ.COM\",\"loginUserKeytabPath\":\"/opt/hdfs.headless.keytab\"}"; hiveDataSourceParamDTO.setPrincipal("hive/hdfs-mycluster@ESZ.COM");
Assert.assertEquals(expected, param); hiveDataSourceParamDTO.setUserName("test");
hiveDataSourceParamDTO.setPassword("test");
hiveDataSourceParamDTO.setJavaSecurityKrb5Conf("/opt/krb5.conf");
hiveDataSourceParamDTO.setLoginUserKeytabPath("/opt/hdfs.headless.keytab");
hiveDataSourceParamDTO.setLoginUserKeytabUsername("test2/hdfs-mycluster@ESZ.COM");
connectionParam = DatasourceUtil.buildConnectionParams(hiveDataSourceParamDTO);
expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:hive2://192.168.9.1:10000\","
+ "\"database\":\"im\",\"jdbcUrl\":\"jdbc:hive2://192.168.9.1:10000/im;principal=hive/hdfs-mycluster@ESZ.COM\",\"principal\":\"hive/hdfs-mycluster@ESZ.COM\","
+ "\"javaSecurityKrb5Conf\":\"/opt/krb5.conf\",\"loginUserKeytabUsername\":\"test2/hdfs-mycluster@ESZ.COM\",\"loginUserKeytabPath\":\"/opt/hdfs.headless.keytab\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
} }
@Test @Test
public void buildParameterWithDecodePassword() { public void buildParameterWithDecodePassword() {
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true"); PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true");
String other = "{\"autoDeserialize\":\"yes\",\"allowUrlInLocalInfile\":\"true\"}"; Map<String, String> other = new HashMap<>();
String param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im" other.put("autoDeserialize", "yes");
, "", "test", "123456", null, other, "", "", ""); other.put("allowUrlInLocalInfile", "true");
String expected = "{\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"," MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
+ "\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\"}"; mysqlDatasourceParamDTO.setHost("192.168.9.1");
Assert.assertEquals(expected, param); mysqlDatasourceParamDTO.setPort(1521);
mysqlDatasourceParamDTO.setDatabase("im");
mysqlDatasourceParamDTO.setUserName("test");
mysqlDatasourceParamDTO.setPassword("123456");
mysqlDatasourceParamDTO.setOther(other);
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO);
String expected = "{\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\",\"address\":\"jdbc:mysql://192.168.9.1:1521\","
+ "\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false"); PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false");
param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im" mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
, "", "test", "123456", null, "", "", "", ""); mysqlDatasourceParamDTO.setHost("192.168.9.1");
expected = "{\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"," mysqlDatasourceParamDTO.setPort(1521);
+ "\"user\":\"test\",\"password\":\"123456\"}"; mysqlDatasourceParamDTO.setDatabase("im");
Assert.assertEquals(expected, param); mysqlDatasourceParamDTO.setUserName("test");
mysqlDatasourceParamDTO.setPassword("123456");
connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO);
expected = "{\"user\":\"test\",\"password\":\"123456\",\"address\":\"jdbc:mysql://192.168.9.1:1521\","
+ "\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
} }
/** /**
@ -334,22 +382,27 @@ public class DataSourceServiceTest {
@Test @Test
public void testCheckConnection() throws Exception { public void testCheckConnection() throws Exception {
DbType dataSourceType = DbType.POSTGRESQL; DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null, null, null, null); String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
PostgreSqlDatasourceParamDTO postgreSqlDatasourceParam = new PostgreSqlDatasourceParamDTO();
postgreSqlDatasourceParam.setDatabase(dataSourceName);
postgreSqlDatasourceParam.setNote(dataSourceDesc);
postgreSqlDatasourceParam.setHost("172.16.133.200");
postgreSqlDatasourceParam.setPort(5432);
postgreSqlDatasourceParam.setDatabase("dolphinscheduler");
postgreSqlDatasourceParam.setUserName("postgres");
postgreSqlDatasourceParam.setPassword("");
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(postgreSqlDatasourceParam);
PowerMockito.mockStatic(DataSourceFactory.class); PowerMockito.mockStatic(DatasourceUtil.class);
PowerMockito.when(DataSourceFactory.getDatasource(Mockito.any(), Mockito.anyString())).thenReturn(null);
Result result = dataSourceService.checkConnection(dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_TYPE_NOT_EXIST.getCode(), result.getCode().intValue());
BaseDataSource dataSource = PowerMockito.mock(BaseDataSource.class); Result result = dataSourceService.checkConnection(dataSourceType, connectionParam);
PowerMockito.when(DataSourceFactory.getDatasource(Mockito.any(), Mockito.anyString())).thenReturn(dataSource);
PowerMockito.when(dataSource.getConnection()).thenReturn(null);
result = dataSourceService.checkConnection(dataSourceType, parameter);
Assert.assertEquals(Status.CONNECTION_TEST_FAILURE.getCode(), result.getCode().intValue()); Assert.assertEquals(Status.CONNECTION_TEST_FAILURE.getCode(), result.getCode().intValue());
Connection connection = PowerMockito.mock(Connection.class); Connection connection = PowerMockito.mock(Connection.class);
PowerMockito.when(dataSource.getConnection()).thenReturn(connection); PowerMockito.when(DatasourceUtil.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection);
result = dataSourceService.checkConnection(dataSourceType, parameter); result = dataSourceService.checkConnection(dataSourceType, connectionParam);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
} }

79
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/AbstractDatasourceProcessor.java

@ -0,0 +1,79 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
import org.apache.commons.collections4.MapUtils;
import java.util.Map;
import java.util.regex.Pattern;
public abstract class AbstractDatasourceProcessor implements DatasourceProcessor {
private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$");
private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]]+$");
private static final Pattern DATABASE_PATTER = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$");
private static final Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9]+$");
@Override
public void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) {
checkHost(baseDataSourceParamDTO.getHost());
checkDatasourcePatter(baseDataSourceParamDTO.getDatabase());
checkOther(baseDataSourceParamDTO.getOther());
}
/**
* Check the host is valid
*
* @param host datasource host
*/
protected void checkHost(String host) {
if (!IPV4_PATTERN.matcher(host).matches() || !IPV6_PATTERN.matcher(host).matches()) {
throw new IllegalArgumentException("datasource host illegal");
}
}
/**
* check database name is valid
*
* @param database database name
*/
protected void checkDatasourcePatter(String database) {
if (!DATABASE_PATTER.matcher(database).matches()) {
throw new IllegalArgumentException("datasource name illegal");
}
}
/**
* check other is valid
*
* @param other other
*/
protected void checkOther(Map<String, String> other) {
if (MapUtils.isEmpty(other)) {
return;
}
boolean paramsCheck = other.entrySet().stream().allMatch(p -> PARAMS_PATTER.matcher(p.getValue()).matches());
if (!paramsCheck) {
throw new IllegalArgumentException("datasource other params illegal");
}
}
}

98
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseConnectionParam.java

@ -0,0 +1,98 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
/**
* The base model of connection param
* <p>
* {@link org.apache.dolphinscheduler.common.datasource.clickhouse.ClickhouseConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.db2.Db2ConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.hive.HiveConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.oracle.OracleConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.presto.PrestoConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.spark.SparkConnectionParam}
* {@link org.apache.dolphinscheduler.common.datasource.sqlserver.SqlServerConnectionParam}
*/
@JsonInclude(Include.NON_NULL)
public abstract class BaseConnectionParam implements ConnectionParam {
protected String user;
protected String password;
protected String address;
protected String database;
protected String jdbcUrl;
protected String other;
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public void setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
}
public String getOther() {
return other;
}
public void setOther(String other) {
this.other = other;
}
}

161
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseDataSourceParamDTO.java

@ -0,0 +1,161 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
import org.apache.dolphinscheduler.common.datasource.clickhouse.ClickHouseDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.db2.Db2DatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.hive.HiveDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.oracle.OracleDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.presto.PrestoDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.spark.SparkDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.sqlserver.SqlServerDatasourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.io.Serializable;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Basic datasource params submitted to api.
* <p>
* see {@link MysqlDatasourceParamDTO}
* see {@link PostgreSqlDatasourceParamDTO}
* see {@link HiveDataSourceParamDTO}
* see {@link SparkDatasourceParamDTO}
* see {@link ClickHouseDatasourceParamDTO}
* see {@link OracleDatasourceParamDTO}
* see {@link SqlServerDatasourceParamDTO}
* see {@link Db2DatasourceParamDTO}
* see {@link PrestoDatasourceParamDTO}
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes(value = {
@JsonSubTypes.Type(value = MysqlDatasourceParamDTO.class, name = "MYSQL"),
@JsonSubTypes.Type(value = PostgreSqlDatasourceParamDTO.class, name = "POSTGRESQL"),
@JsonSubTypes.Type(value = HiveDataSourceParamDTO.class, name = "HIVE"),
@JsonSubTypes.Type(value = SparkDatasourceParamDTO.class, name = "SPARK"),
@JsonSubTypes.Type(value = ClickHouseDatasourceParamDTO.class, name = "CLICKHOUSE"),
@JsonSubTypes.Type(value = OracleDatasourceParamDTO.class, name = "ORACLE"),
@JsonSubTypes.Type(value = SqlServerDatasourceParamDTO.class, name = "SQLSERVER"),
@JsonSubTypes.Type(value = Db2DatasourceParamDTO.class, name = "DB2"),
@JsonSubTypes.Type(value = PrestoDatasourceParamDTO.class, name = "PRESTO"),
})
public abstract class BaseDataSourceParamDTO implements Serializable {
protected Integer id;
protected String name;
protected String note;
protected String host;
protected Integer port;
protected String database;
protected String userName;
protected String password;
protected Map<String, String> other;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getNote() {
return note;
}
public void setNote(String note) {
this.note = note;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Map<String, String> getOther() {
return other;
}
public void setOther(Map<String, String> other) {
this.other = other;
}
/**
* Get the datasource type
* see{@link DbType}
*
* @return datasource type code
*/
public abstract DbType getType();
}

57
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsConnectionParam.java

@ -0,0 +1,57 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
public class BaseHdfsConnectionParam extends BaseConnectionParam {
protected String principal;
protected String javaSecurityKrb5Conf;
protected String loginUserKeytabUsername;
protected String loginUserKeytabPath;
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
public String getJavaSecurityKrb5Conf() {
return javaSecurityKrb5Conf;
}
public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) {
this.javaSecurityKrb5Conf = javaSecurityKrb5Conf;
}
public String getLoginUserKeytabUsername() {
return loginUserKeytabUsername;
}
public void setLoginUserKeytabUsername(String loginUserKeytabUsername) {
this.loginUserKeytabUsername = loginUserKeytabUsername;
}
public String getLoginUserKeytabPath() {
return loginUserKeytabPath;
}
public void setLoginUserKeytabPath(String loginUserKeytabPath) {
this.loginUserKeytabPath = loginUserKeytabPath;
}
}

61
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsDatasourceParamDTO.java

@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
public abstract class BaseHdfsDatasourceParamDTO extends BaseDataSourceParamDTO {
protected String principal;
protected String javaSecurityKrb5Conf;
protected String loginUserKeytabUsername;
protected String loginUserKeytabPath;
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
public String getLoginUserKeytabUsername() {
return loginUserKeytabUsername;
}
public void setLoginUserKeytabUsername(String loginUserKeytabUsername) {
this.loginUserKeytabUsername = loginUserKeytabUsername;
}
public String getLoginUserKeytabPath() {
return loginUserKeytabPath;
}
public void setLoginUserKeytabPath(String loginUserKeytabPath) {
this.loginUserKeytabPath = loginUserKeytabPath;
}
public String getJavaSecurityKrb5Conf() {
return javaSecurityKrb5Conf;
}
public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) {
this.javaSecurityKrb5Conf = javaSecurityKrb5Conf;
}
}

23
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PrestoDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/ConnectionParam.java

@ -14,26 +14,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants; package org.apache.dolphinscheduler.common.datasource;
import org.apache.dolphinscheduler.common.enums.DbType;
public class PrestoDataSource extends BaseDataSource { import java.io.Serializable;
/** /**
* @return driver class * The model of Datasource Connection param
*/ */
@Override public interface ConnectionParam extends Serializable {
public String driverClassSelector() {
return Constants.COM_PRESTO_JDBC_DRIVER;
}
/**
* @return db type
*/
@Override
public DbType dbTypeSelector() {
return DbType.PRESTO;
}
} }

81
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceProcessor.java

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
public interface DatasourceProcessor {
/**
* check datasource param is valid
*/
void checkDatasourceParam(BaseDataSourceParamDTO datasourceParam);
/**
* create BaseDataSourceParamDTO by connectionJson
*
* @param connectionJson see{@link org.apache.dolphinscheduler.dao.entity.Datasource}
* @return {@link BaseDataSourceParamDTO}
*/
BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson);
/**
* create datasource connection parameter which will be stored at DataSource
* <p>
* see {@code org.apache.dolphinscheduler.dao.entity.DataSource.connectionParams}
*/
ConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam);
/**
* deserialize json to datasource connection param
*
* @param connectionJson {@code org.apache.dolphinscheduler.dao.entity.DataSource.connectionParams}
* @return {@link BaseConnectionParam}
*/
ConnectionParam createConnectionParams(String connectionJson);
/**
* get datasource Driver
*/
String getDatasourceDriver();
/**
* get jdbcUrl by connection param, the jdbcUrl is different with ConnectionParam.jdbcUrl, this method will inject
* other to jdbcUrl
*
* @param connectionParam connection param
*/
String getJdbcUrl(ConnectionParam connectionParam);
/**
* get connection by connectionParam
*
* @param connectionParam connectionParam
* @return {@link Connection}
*/
Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException, IOException;
/**
* @return {@link DbType}
*/
DbType getDbType();
}

121
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtil.java

@ -0,0 +1,121 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
import org.apache.dolphinscheduler.common.datasource.clickhouse.ClickHouseDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.db2.Db2DatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.hive.HiveDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.oracle.OracleDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.presto.PrestoDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.spark.SparkDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.sqlserver.SqlServerDatasourceProcessor;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.Connection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DatasourceUtil {
private DatasourceUtil() {
}
private static final Logger logger = LoggerFactory.getLogger(DatasourceUtil.class);
private static final DatasourceProcessor mysqlProcessor = new MysqlDatasourceProcessor();
private static final DatasourceProcessor postgreSqlProcessor = new PostgreSqlDatasourceProcessor();
private static final DatasourceProcessor hiveProcessor = new HiveDatasourceProcessor();
private static final DatasourceProcessor sparkProcessor = new SparkDatasourceProcessor();
private static final DatasourceProcessor clickhouseProcessor = new ClickHouseDatasourceProcessor();
private static final DatasourceProcessor oracleProcessor = new OracleDatasourceProcessor();
private static final DatasourceProcessor sqlServerProcessor = new SqlServerDatasourceProcessor();
private static final DatasourceProcessor db2PROCESSOR = new Db2DatasourceProcessor();
private static final DatasourceProcessor prestoPROCESSOR = new PrestoDatasourceProcessor();
/**
* check datasource param
*
* @param baseDataSourceParamDTO datasource param
*/
public static void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) {
getDatasourceProcessor(baseDataSourceParamDTO.getType()).checkDatasourceParam(baseDataSourceParamDTO);
}
/**
* build connection url
*
* @param baseDataSourceParamDTO datasourceParam
*/
public static ConnectionParam buildConnectionParams(BaseDataSourceParamDTO baseDataSourceParamDTO) {
ConnectionParam connectionParams = getDatasourceProcessor(baseDataSourceParamDTO.getType())
.createConnectionParams(baseDataSourceParamDTO);
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", connectionParams);
}
return connectionParams;
}
public static ConnectionParam buildConnectionParams(DbType dbType, String connectionJson) {
return getDatasourceProcessor(dbType).createConnectionParams(connectionJson);
}
public static Connection getConnection(DbType dbType, ConnectionParam connectionParam) {
try {
return getDatasourceProcessor(dbType).getConnection(connectionParam);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static String getJdbcUrl(DbType dbType, ConnectionParam baseConnectionParam) {
return getDatasourceProcessor(dbType).getJdbcUrl(baseConnectionParam);
}
public static BaseDataSourceParamDTO buildDatasourceParamDTO(DbType dbType, String connectionParams) {
return getDatasourceProcessor(dbType).createDatasourceParamDTO(connectionParams);
}
public static DatasourceProcessor getDatasourceProcessor(DbType dbType) {
switch (dbType) {
case MYSQL:
return mysqlProcessor;
case POSTGRESQL:
return postgreSqlProcessor;
case HIVE:
return hiveProcessor;
case SPARK:
return sparkProcessor;
case CLICKHOUSE:
return clickhouseProcessor;
case ORACLE:
return oracleProcessor;
case SQLSERVER:
return sqlServerProcessor;
case DB2:
return db2PROCESSOR;
case PRESTO:
return prestoPROCESSOR;
default:
throw new IllegalArgumentException("datasource type illegal:" + dbType);
}
}
}

29
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceParamDTO.java

@ -14,29 +14,28 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants; package org.apache.dolphinscheduler.common.datasource.clickhouse;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
/** public class ClickHouseDatasourceParamDTO extends BaseDataSourceParamDTO {
* data source of ClickHouse
*/
public class ClickHouseDataSource extends BaseDataSource {
/**
* @return driver class
*/
@Override @Override
public String driverClassSelector() { public String toString() {
return Constants.COM_CLICKHOUSE_JDBC_DRIVER; return "ClickHouseDatasourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
} }
/**
* @return db type
*/
@Override @Override
public DbType dbTypeSelector() { public DbType getType() {
return DbType.CLICKHOUSE; return DbType.CLICKHOUSE;
} }
} }

125
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessor.java

@ -0,0 +1,125 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.clickhouse;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class ClickHouseDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
ClickhouseConnectionParam connectionParams = (ClickhouseConnectionParam) createConnectionParams(connectionJson);
ClickHouseDatasourceParamDTO clickHouseDatasourceParamDTO = new ClickHouseDatasourceParamDTO();
clickHouseDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
clickHouseDatasourceParamDTO.setUserName(connectionParams.getUser());
clickHouseDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String[] hostSeperator = connectionParams.getAddress().split(Constants.DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(Constants.COMMA);
clickHouseDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
clickHouseDatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
return clickHouseDatasourceParamDTO;
}
@Override
public ConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
ClickHouseDatasourceParamDTO clickHouseParam = (ClickHouseDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", Constants.JDBC_CLICKHOUSE, clickHouseParam.getHost(), clickHouseParam.getPort());
String jdbcUrl = address + "/" + clickHouseParam.getDatabase();
ClickhouseConnectionParam clickhouseConnectionParam = new ClickhouseConnectionParam();
clickhouseConnectionParam.setDatabase(clickHouseParam.getDatabase());
clickhouseConnectionParam.setAddress(address);
clickhouseConnectionParam.setJdbcUrl(jdbcUrl);
clickhouseConnectionParam.setUser(clickHouseParam.getUserName());
clickhouseConnectionParam.setPassword(CommonUtils.encodePassword(clickHouseParam.getPassword()));
clickhouseConnectionParam.setOther(transformOther(clickHouseParam.getOther()));
return clickhouseConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, ClickhouseConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_CLICKHOUSE_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam;
String jdbcUrl = clickhouseConnectionParam.getJdbcUrl();
if (StringUtils.isNotEmpty(clickhouseConnectionParam.getOther())) {
jdbcUrl = String.format("%s?%s", jdbcUrl, clickhouseConnectionParam.getOther());
}
return jdbcUrl;
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(clickhouseConnectionParam),
clickhouseConnectionParam.getUser(), CommonUtils.decodePassword(clickhouseConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.CLICKHOUSE;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s%s", key, value, "&")));
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (other == null) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split("&");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

34
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickhouseConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.clickhouse;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
public class ClickhouseConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "ClickhouseConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

33
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2ConnectionParam.java

@ -14,30 +14,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants; package org.apache.dolphinscheduler.common.datasource.db2;
import org.apache.dolphinscheduler.common.enums.DbType;
/** import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
* data source of DB2 Server
*/
public class DB2ServerDataSource extends BaseDataSource {
/**
* gets the JDBC url for the data source connection
* @return jdbc url
*/
@Override
public String driverClassSelector() {
return Constants.COM_DB2_JDBC_DRIVER;
}
/** public class Db2ConnectionParam extends BaseConnectionParam {
* @return db type
*/
@Override @Override
public DbType dbTypeSelector() { public String toString() {
return DbType.DB2; return "Db2ConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
} }
} }

43
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.db2;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType;
public class Db2DatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "Db2DatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.DB2;
}
}

126
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessor.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.db2;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class Db2DatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
Db2ConnectionParam connectionParams = (Db2ConnectionParam) createConnectionParams(connectionJson);
Db2DatasourceParamDTO db2DatasourceParamDTO = new Db2DatasourceParamDTO();
db2DatasourceParamDTO.setDatabase(connectionParams.getDatabase());
db2DatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
db2DatasourceParamDTO.setUserName(db2DatasourceParamDTO.getUserName());
String[] hostSeperator = connectionParams.getAddress().split(Constants.DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(Constants.COMMA);
db2DatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
db2DatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
return db2DatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
Db2DatasourceParamDTO db2Param = (Db2DatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", Constants.JDBC_DB2, db2Param.getHost(), db2Param.getPort());
String jdbcUrl = String.format("%s/%s", address, db2Param.getDatabase());
Db2ConnectionParam db2ConnectionParam = new Db2ConnectionParam();
db2ConnectionParam.setAddress(address);
db2ConnectionParam.setDatabase(db2Param.getDatabase());
db2ConnectionParam.setJdbcUrl(jdbcUrl);
db2ConnectionParam.setUser(db2Param.getUserName());
db2ConnectionParam.setPassword(CommonUtils.encodePassword(db2Param.getPassword()));
db2ConnectionParam.setOther(transformOther(db2Param.getOther()));
return db2ConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, Db2ConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_DB2_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(db2ConnectionParam.getOther())) {
return String.format("%s;%s", db2ConnectionParam.getJdbcUrl(), db2ConnectionParam.getOther());
}
return db2ConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(db2ConnectionParam),
db2ConnectionParam.getUser(), CommonUtils.decodePassword(db2ConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.DB2;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s%s", key, value, ";")));
stringBuilder.deleteCharAt(stringBuilder.length() - 1);
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (other == null) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split("&")) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

38
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveConnectionParam.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.hive;
import org.apache.dolphinscheduler.common.datasource.BaseHdfsConnectionParam;
public class HiveConnectionParam extends BaseHdfsConnectionParam {
@Override
public String toString() {
return "HiveConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ ", principal='" + principal + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
}

45
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDataSourceParamDTO.java

@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.hive;
import org.apache.dolphinscheduler.common.datasource.BaseHdfsDatasourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType;
public class HiveDataSourceParamDTO extends BaseHdfsDatasourceParamDTO {
@Override
public String toString() {
return "HiveDataSourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", principal='" + principal + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.HIVE;
}
}

185
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessor.java

@ -0,0 +1,185 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.hive;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.HiveConfUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class HiveDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO();
HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) createConnectionParams(connectionJson);
hiveDataSourceParamDTO.setDatabase(hiveConnectionParam.getDatabase());
hiveDataSourceParamDTO.setUserName(hiveConnectionParam.getUser());
hiveDataSourceParamDTO.setOther(parseOther(hiveConnectionParam.getOther()));
hiveDataSourceParamDTO.setLoginUserKeytabUsername(hiveConnectionParam.getLoginUserKeytabUsername());
hiveDataSourceParamDTO.setLoginUserKeytabPath(hiveConnectionParam.getLoginUserKeytabPath());
hiveDataSourceParamDTO.setJavaSecurityKrb5Conf(hiveConnectionParam.getJavaSecurityKrb5Conf());
String[] tmpArray = hiveConnectionParam.getAddress().split(Constants.DOUBLE_SLASH);
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = tmpArray[tmpArray.length - 1].split(Constants.COMMA);
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
hiveDataSourceParamDTO.setHost(hosts.toString());
hiveDataSourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
return hiveDataSourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
HiveDataSourceParamDTO hiveParam = (HiveDataSourceParamDTO) datasourceParam;
StringBuilder address = new StringBuilder();
address.append(Constants.JDBC_HIVE_2);
for (String zkHost : hiveParam.getHost().split(",")) {
address.append(String.format("%s:%s,", zkHost, hiveParam.getPort()));
}
address.deleteCharAt(address.length() - 1);
String jdbcUrl = address.toString() + "/" + hiveParam.getDatabase();
if (CommonUtils.getKerberosStartupState()) {
jdbcUrl += ";principal=" + hiveParam.getPrincipal();
}
HiveConnectionParam hiveConnectionParam = new HiveConnectionParam();
hiveConnectionParam.setDatabase(hiveParam.getDatabase());
hiveConnectionParam.setAddress(address.toString());
hiveConnectionParam.setJdbcUrl(jdbcUrl);
hiveConnectionParam.setUser(hiveParam.getUserName());
hiveConnectionParam.setPassword(CommonUtils.encodePassword(hiveParam.getPassword()));
if (CommonUtils.getKerberosStartupState()) {
hiveConnectionParam.setPrincipal(hiveParam.getPrincipal());
hiveConnectionParam.setJavaSecurityKrb5Conf(hiveParam.getJavaSecurityKrb5Conf());
hiveConnectionParam.setLoginUserKeytabPath(hiveParam.getLoginUserKeytabPath());
hiveConnectionParam.setLoginUserKeytabUsername(hiveParam.getLoginUserKeytabUsername());
}
hiveConnectionParam.setOther(transformOther(hiveParam.getOther()));
return hiveConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, HiveConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam;
String jdbcUrl = hiveConnectionParam.getJdbcUrl();
String otherParams = filterOther(hiveConnectionParam.getOther());
if (StringUtils.isNotEmpty(otherParams) && !"?".equals(otherParams.substring(0, 1))) {
jdbcUrl += ";";
}
return jdbcUrl + otherParams;
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws IOException, ClassNotFoundException, SQLException {
HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam;
CommonUtils.loadKerberosConf(hiveConnectionParam.getJavaSecurityKrb5Conf(),
hiveConnectionParam.getLoginUserKeytabUsername(), hiveConnectionParam.getLoginUserKeytabPath());
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam),
hiveConnectionParam.getUser(), CommonUtils.decodePassword(hiveConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.HIVE;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s;", key, value)));
return stringBuilder.toString();
}
private String filterOther(String otherParams) {
if (StringUtils.isBlank(otherParams)) {
return "";
}
StringBuilder hiveConfListSb = new StringBuilder();
hiveConfListSb.append("?");
StringBuilder sessionVarListSb = new StringBuilder();
String[] otherArray = otherParams.split(";", -1);
for (String conf : otherArray) {
if (HiveConfUtils.isHiveConfVar(conf)) {
hiveConfListSb.append(conf).append(";");
} else {
sessionVarListSb.append(conf).append(";");
}
}
// remove the last ";"
if (sessionVarListSb.length() > 0) {
sessionVarListSb.deleteCharAt(sessionVarListSb.length() - 1);
}
if (hiveConfListSb.length() > 0) {
hiveConfListSb.deleteCharAt(hiveConfListSb.length() - 1);
}
return sessionVarListSb.toString() + hiveConfListSb.toString();
}
private Map<String, String> parseOther(String other) {
if (other == null) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split(";");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

33
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlConnectionParam.java

@ -14,30 +14,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants; package org.apache.dolphinscheduler.common.datasource.mysql;
import org.apache.dolphinscheduler.common.enums.DbType;
/** import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
* data source of postgreSQL
*/
public class PostgreDataSource extends BaseDataSource {
/**
* gets the JDBC url for the data source connection
* @return jdbc url
*/
@Override
public String driverClassSelector() {
return Constants.ORG_POSTGRESQL_DRIVER;
}
/** public class MysqlConnectionParam extends BaseConnectionParam {
* @return db type
*/
@Override @Override
public DbType dbTypeSelector() { public String toString() {
return DbType.POSTGRESQL; return "MysqlConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
} }
} }

43
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.mysql;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType;
public class MysqlDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "MysqlDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.MYSQL;
}
}

170
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessor.java

@ -0,0 +1,170 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.mysql;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MysqlDatasourceProcessor extends AbstractDatasourceProcessor {
private final Logger logger = LoggerFactory.getLogger(MysqlDatasourceProcessor.class);
private static final String ALLOW_LOAD_LOCAL_IN_FILE_NAME = "allowLoadLocalInfile";
private static final String AUTO_DESERIALIZE = "autoDeserialize";
private static final String ALLOW_LOCAL_IN_FILE_NAME = "allowLocalInfile";
private static final String ALLOW_URL_IN_LOCAL_IN_FILE_NAME = "allowUrlInLocalInfile";
private static final String APPEND_PARAMS = "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false";
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
MysqlConnectionParam connectionParams = (MysqlConnectionParam) createConnectionParams(connectionJson);
MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
mysqlDatasourceParamDTO.setUserName(connectionParams.getUser());
mysqlDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
mysqlDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String address = connectionParams.getAddress();
String[] hostSeperator = address.split(Constants.DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(Constants.COMMA);
mysqlDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
mysqlDatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
return mysqlDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO dataSourceParam) {
MysqlDatasourceParamDTO mysqlDatasourceParam = (MysqlDatasourceParamDTO) dataSourceParam;
String address = String.format("%s%s:%s", Constants.JDBC_MYSQL, mysqlDatasourceParam.getHost(), mysqlDatasourceParam.getPort());
String jdbcUrl = String.format("%s/%s", address, mysqlDatasourceParam.getDatabase());
MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam();
mysqlConnectionParam.setJdbcUrl(jdbcUrl);
mysqlConnectionParam.setDatabase(mysqlDatasourceParam.getDatabase());
mysqlConnectionParam.setAddress(address);
mysqlConnectionParam.setUser(mysqlDatasourceParam.getUserName());
mysqlConnectionParam.setPassword(CommonUtils.encodePassword(mysqlDatasourceParam.getPassword()));
mysqlConnectionParam.setOther(transformOther(mysqlDatasourceParam.getOther()));
return mysqlConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, MysqlConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_MYSQL_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
MysqlConnectionParam mysqlConnectionParam = (MysqlConnectionParam) connectionParam;
String jdbcUrl = mysqlConnectionParam.getJdbcUrl();
if (StringUtils.isNotEmpty(mysqlConnectionParam.getOther())) {
return String.format("%s?%s&%s", jdbcUrl, mysqlConnectionParam.getOther(), APPEND_PARAMS);
}
return String.format("%s?%s", jdbcUrl, APPEND_PARAMS);
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
MysqlConnectionParam mysqlConnectionParam = (MysqlConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
String user = mysqlConnectionParam.getUser();
if (user.contains(AUTO_DESERIALIZE)) {
logger.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE);
user = user.replace(AUTO_DESERIALIZE, "");
}
String password = CommonUtils.decodePassword(mysqlConnectionParam.getPassword());
if (password.contains(AUTO_DESERIALIZE)) {
logger.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE);
password = password.replace(AUTO_DESERIALIZE, "");
}
return DriverManager.getConnection(getJdbcUrl(connectionParam), user, password);
}
@Override
public DbType getDbType() {
return DbType.MYSQL;
}
private String transformOther(Map<String, String> paramMap) {
if (MapUtils.isEmpty(paramMap)) {
return null;
}
Map<String, String> otherMap = new HashMap<>();
paramMap.forEach((k, v) -> {
if (!checkKeyIsLegitimate(k)) {
return;
}
otherMap.put(k, v);
});
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s&", key, value)));
return stringBuilder.toString();
}
private static boolean checkKeyIsLegitimate(String key) {
return !key.contains(ALLOW_LOAD_LOCAL_IN_FILE_NAME)
&& !key.contains(AUTO_DESERIALIZE)
&& !key.contains(ALLOW_LOCAL_IN_FILE_NAME)
&& !key.contains(ALLOW_URL_IN_LOCAL_IN_FILE_NAME);
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split("&")) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

46
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleConnectionParam.java

@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.oracle;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
public class OracleConnectionParam extends BaseConnectionParam {
protected DbConnectType connectType;
public DbConnectType getConnectType() {
return connectType;
}
public void setConnectType(DbConnectType connectType) {
this.connectType = connectType;
}
@Override
public String toString() {
return "OracleConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ ", connectType=" + connectType
+ '}';
}
}

46
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceParamDTO.java

@ -14,16 +14,14 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants; package org.apache.dolphinscheduler.common.datasource.oracle;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
/** public class OracleDatasourceParamDTO extends BaseDataSourceParamDTO {
* data source of Oracle
*/
public class OracleDataSource extends BaseDataSource {
private DbConnectType connectType; private DbConnectType connectType;
@ -35,33 +33,23 @@ public class OracleDataSource extends BaseDataSource {
this.connectType = connectType; this.connectType = connectType;
} }
/**
* @return driver class
*/
@Override
public String driverClassSelector() {
return Constants.COM_ORACLE_JDBC_DRIVER;
}
/**
* append service name or SID
*/
@Override @Override
protected void appendDatabase(StringBuilder jdbcUrl) { public String toString() {
if (getConnectType() == DbConnectType.ORACLE_SID) { return "OracleDatasourceParamDTO{"
jdbcUrl.append(":"); + "name='" + name + '\''
} else { + ", note='" + note + '\''
jdbcUrl.append("/"); + ", host='" + host + '\''
} + ", port=" + port
jdbcUrl.append(getDatabase()); + ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", connectType=" + connectType
+ ", other='" + other + '\''
+ '}';
} }
/**
* @return db type
*/
@Override @Override
public DbType dbTypeSelector() { public DbType getType() {
return DbType.ORACLE; return DbType.ORACLE;
} }
} }

141
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessor.java

@ -0,0 +1,141 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.oracle;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class OracleDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
OracleConnectionParam connectionParams = (OracleConnectionParam) createConnectionParams(connectionJson);
OracleDatasourceParamDTO oracleDatasourceParamDTO = new OracleDatasourceParamDTO();
oracleDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
oracleDatasourceParamDTO.setUserName(connectionParams.getUser());
oracleDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String hostSeperator = Constants.DOUBLE_SLASH;
if (DbConnectType.ORACLE_SID.equals(connectionParams.connectType)) {
hostSeperator = Constants.AT_SIGN;
}
String[] hostPort = connectionParams.getAddress().split(hostSeperator);
String[] hostPortArray = hostPort[hostPort.length - 1].split(Constants.COMMA);
oracleDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
oracleDatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
return oracleDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
OracleDatasourceParamDTO oracleParam = (OracleDatasourceParamDTO) datasourceParam;
String address;
if (DbConnectType.ORACLE_SID.equals(oracleParam.getConnectType())) {
address = String.format("%s%s:%s",
Constants.JDBC_ORACLE_SID, oracleParam.getHost(), oracleParam.getPort());
} else {
address = String.format("%s%s:%s",
Constants.JDBC_ORACLE_SERVICE_NAME, oracleParam.getHost(), oracleParam.getPort());
}
String jdbcUrl = address + "/" + oracleParam.getDatabase();
OracleConnectionParam oracleConnectionParam = new OracleConnectionParam();
oracleConnectionParam.setUser(oracleParam.getUserName());
oracleConnectionParam.setPassword(CommonUtils.encodePassword(oracleParam.getPassword()));
oracleConnectionParam.setAddress(address);
oracleConnectionParam.setJdbcUrl(jdbcUrl);
oracleConnectionParam.setDatabase(oracleParam.getDatabase());
oracleConnectionParam.setConnectType(oracleParam.getConnectType());
oracleConnectionParam.setOther(transformOther(oracleParam.getOther()));
return oracleConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, OracleConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_ORACLE_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(oracleConnectionParam.getOther())) {
return String.format("%s?%s", oracleConnectionParam.getJdbcUrl(), oracleConnectionParam.getOther());
}
return oracleConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam),
oracleConnectionParam.getUser(), CommonUtils.decodePassword(oracleConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.ORACLE;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
List<String> list = new ArrayList<>();
otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value)));
return String.join("&", list);
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split("&");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

34
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.postgresql;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
public class PostgreSqlConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "PostgreSqlConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

41
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceParamDTO.java

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.postgresql;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType;
public class PostgreSqlDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "PostgreSqlDatasourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.POSTGRESQL;
}
}

126
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessor.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.postgresql;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class PostgreSqlDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
PostgreSqlConnectionParam connectionParams = (PostgreSqlConnectionParam) createConnectionParams(connectionJson);
PostgreSqlDatasourceParamDTO postgreSqlDatasourceParamDTO = new PostgreSqlDatasourceParamDTO();
postgreSqlDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
postgreSqlDatasourceParamDTO.setUserName(connectionParams.getUser());
postgreSqlDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
String address = connectionParams.getAddress();
String[] hostSeperator = address.split(Constants.DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(Constants.COMMA);
postgreSqlDatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
postgreSqlDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
return postgreSqlDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
PostgreSqlDatasourceParamDTO postgreSqlParam = (PostgreSqlDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", Constants.JDBC_POSTGRESQL, postgreSqlParam.getHost(), postgreSqlParam.getPort());
String jdbcUrl = String.format("%s/%s", address, postgreSqlParam.getDatabase());
PostgreSqlConnectionParam postgreSqlConnectionParam = new PostgreSqlConnectionParam();
postgreSqlConnectionParam.setJdbcUrl(jdbcUrl);
postgreSqlConnectionParam.setAddress(address);
postgreSqlConnectionParam.setDatabase(postgreSqlParam.getDatabase());
postgreSqlConnectionParam.setUser(postgreSqlParam.getUserName());
postgreSqlConnectionParam.setPassword(CommonUtils.encodePassword(postgreSqlParam.getPassword()));
postgreSqlConnectionParam.setOther(transformOther(postgreSqlParam.getOther()));
return postgreSqlConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, PostgreSqlConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.ORG_POSTGRESQL_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(postgreSqlConnectionParam.getOther())) {
return String.format("%s?%s", postgreSqlConnectionParam.getJdbcUrl(), postgreSqlConnectionParam.getOther());
}
return postgreSqlConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(postgreSqlConnectionParam),
postgreSqlConnectionParam.getUser(), CommonUtils.decodePassword(postgreSqlConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.POSTGRESQL;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s&", key, value)));
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split("&")) {
String[] split = config.split("=");
otherMap.put(split[0], split[1]);
}
return otherMap;
}
}

34
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.presto;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
public class PrestoConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "PrestoConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

43
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceParamDTO.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.presto;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType;
public class PrestoDatasourceParamDTO extends BaseDataSourceParamDTO {
@Override
public String toString() {
return "PrestoDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
}
@Override
public DbType getType() {
return DbType.PRESTO;
}
}

128
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessor.java

@ -0,0 +1,128 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.presto;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class PrestoDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
PrestoConnectionParam connectionParams = (PrestoConnectionParam) createConnectionParams(connectionJson);
String[] hostSeperator = connectionParams.getAddress().split(Constants.DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(Constants.COMMA);
PrestoDatasourceParamDTO prestoDatasourceParamDTO = new PrestoDatasourceParamDTO();
prestoDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
prestoDatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
prestoDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
prestoDatasourceParamDTO.setUserName(connectionParams.getUser());
prestoDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
return prestoDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
PrestoDatasourceParamDTO prestoParam = (PrestoDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", Constants.JDBC_PRESTO, prestoParam.getHost(), prestoParam.getPort());
String jdbcUrl = address + "/" + prestoParam.getDatabase();
PrestoConnectionParam prestoConnectionParam = new PrestoConnectionParam();
prestoConnectionParam.setUser(prestoParam.getUserName());
prestoConnectionParam.setPassword(CommonUtils.encodePassword(prestoParam.getPassword()));
prestoConnectionParam.setOther(transformOther(prestoParam.getOther()));
prestoConnectionParam.setAddress(address);
prestoConnectionParam.setJdbcUrl(jdbcUrl);
prestoConnectionParam.setDatabase(prestoParam.getDatabase());
return prestoConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, PrestoConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_PRESTO_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(prestoConnectionParam.getOther())) {
return String.format("%s?%s", prestoConnectionParam.getJdbcUrl(), prestoConnectionParam.getOther());
}
return prestoConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam),
prestoConnectionParam.getUser(), CommonUtils.decodePassword(prestoConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.PRESTO;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isNotEmpty(otherMap)) {
List<String> list = new ArrayList<>();
otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value)));
return String.join("&", list);
}
return null;
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split("&");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

38
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkConnectionParam.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.spark;
import org.apache.dolphinscheduler.common.datasource.BaseHdfsConnectionParam;
public class SparkConnectionParam extends BaseHdfsConnectionParam {
@Override
public String toString() {
return "SparkConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ ", principal='" + principal + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
}
}

47
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceParamDTO.java

@ -15,44 +15,31 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource; package org.apache.dolphinscheduler.common.datasource.spark;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.datasource.BaseHdfsDatasourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import java.sql.Connection; public class SparkDatasourceParamDTO extends BaseHdfsDatasourceParamDTO {
/**
* data source of spark
*/
public class SparkDataSource extends BaseDataSource {
/**
* gets the JDBC url for the data source connection
* @return jdbc url
*/
@Override @Override
public String driverClassSelector() { public String toString() {
return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; return "SparkDatasourceParamDTO{"
+ "host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", principal='" + principal + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\''
+ ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\''
+ ", loginUserKeytabPath='" + loginUserKeytabPath + '\''
+ '}';
} }
/**
* @return db type
*/
@Override @Override
public DbType dbTypeSelector() { public DbType getType() {
return DbType.SPARK; return DbType.SPARK;
} }
/**
* the data source test connection
* @return Connection Connection
* @throws Exception Exception
*/
@Override
public Connection getConnection() throws Exception {
CommonUtils.loadKerberosConf(getJavaSecurityKrb5Conf(), getLoginUserKeytabUsername(), getLoginUserKeytabPath());
return super.getConnection();
}
} }

154
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessor.java

@ -0,0 +1,154 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.spark;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class SparkDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
SparkConnectionParam connectionParams = (SparkConnectionParam) createConnectionParams(connectionJson);
SparkDatasourceParamDTO sparkDatasourceParamDTO = new SparkDatasourceParamDTO();
sparkDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
sparkDatasourceParamDTO.setUserName(connectionParams.getUser());
sparkDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
sparkDatasourceParamDTO.setJavaSecurityKrb5Conf(connectionParams.getJavaSecurityKrb5Conf());
sparkDatasourceParamDTO.setLoginUserKeytabPath(connectionParams.getLoginUserKeytabPath());
sparkDatasourceParamDTO.setLoginUserKeytabUsername(connectionParams.getLoginUserKeytabUsername());
StringBuilder hosts = new StringBuilder();
String[] tmpArray = connectionParams.getAddress().split(Constants.DOUBLE_SLASH);
String[] hostPortArray = tmpArray[tmpArray.length - 1].split(Constants.COMMA);
Arrays.stream(hostPortArray).forEach(hostPort -> hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA));
hosts.deleteCharAt(hosts.length() - 1);
sparkDatasourceParamDTO.setHost(hosts.toString());
sparkDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
return sparkDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO dataSourceParam) {
StringBuilder address = new StringBuilder();
SparkDatasourceParamDTO sparkDatasourceParam = (SparkDatasourceParamDTO) dataSourceParam;
address.append(Constants.JDBC_HIVE_2);
for (String zkHost : sparkDatasourceParam.getHost().split(",")) {
address.append(String.format("%s:%s,", zkHost, sparkDatasourceParam.getPort()));
}
address.deleteCharAt(address.length() - 1);
String jdbcUrl = address + "/" + sparkDatasourceParam.getDatabase();
if (CommonUtils.getKerberosStartupState()) {
jdbcUrl += ";principal=" + sparkDatasourceParam.getPrincipal();
}
SparkConnectionParam sparkConnectionParam = new SparkConnectionParam();
sparkConnectionParam.setPassword(CommonUtils.encodePassword(sparkDatasourceParam.getPassword()));
sparkConnectionParam.setUser(sparkDatasourceParam.getUserName());
sparkConnectionParam.setOther(transformOther(sparkDatasourceParam.getOther()));
sparkConnectionParam.setDatabase(sparkDatasourceParam.getDatabase());
sparkConnectionParam.setAddress(address.toString());
sparkConnectionParam.setJdbcUrl(jdbcUrl);
if (CommonUtils.getKerberosStartupState()) {
sparkConnectionParam.setPrincipal(sparkDatasourceParam.getPrincipal());
sparkConnectionParam.setJavaSecurityKrb5Conf(sparkDatasourceParam.getJavaSecurityKrb5Conf());
sparkConnectionParam.setLoginUserKeytabPath(sparkDatasourceParam.getLoginUserKeytabPath());
sparkConnectionParam.setLoginUserKeytabUsername(sparkDatasourceParam.getLoginUserKeytabUsername());
}
return sparkConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, SparkConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(sparkConnectionParam.getOther())) {
return String.format("%s;%s", sparkConnectionParam.getJdbcUrl(), sparkConnectionParam.getOther());
}
return sparkConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws IOException, ClassNotFoundException, SQLException {
SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam;
CommonUtils.loadKerberosConf(sparkConnectionParam.getJavaSecurityKrb5Conf(),
sparkConnectionParam.getLoginUserKeytabUsername(), sparkConnectionParam.getLoginUserKeytabPath());
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(sparkConnectionParam),
sparkConnectionParam.getUser(), CommonUtils.decodePassword(sparkConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.SPARK;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
List<String> stringBuilder = otherMap.entrySet().stream()
.map(entry -> String.format("%s=%s", entry.getKey(), entry.getValue())).collect(Collectors.toList());
return String.join(";", stringBuilder);
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
String[] configs = other.split(";");
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

34
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerConnectionParam.java

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.sqlserver;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
public class SqlServerConnectionParam extends BaseConnectionParam {
@Override
public String toString() {
return "SqlServerConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", other='" + other + '\''
+ '}';
}
}

47
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceParamDTO.java

@ -15,46 +15,29 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.dao.datasource; package org.apache.dolphinscheduler.common.datasource.sqlserver;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.StringUtils;
/** public class SqlServerDatasourceParamDTO extends BaseDataSourceParamDTO {
* data source of SQL Server
*/
public class SQLServerDataSource extends BaseDataSource {
/**
* gets the JDBC url for the data source connection
* @return jdbc url
*/
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
jdbcUrl += ";databaseName=" + getDatabase();
if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += ";" + getOther();
}
return jdbcUrl;
}
/**
* @return driver class
*/
@Override @Override
public String driverClassSelector() { public String toString() {
return Constants.COM_SQLSERVER_JDBC_DRIVER; return "SqlServerDatasourceParamDTO{"
+ "name='" + name + '\''
+ ", note='" + note + '\''
+ ", host='" + host + '\''
+ ", port=" + port
+ ", database='" + database + '\''
+ ", userName='" + userName + '\''
+ ", password='" + password + '\''
+ ", other='" + other + '\''
+ '}';
} }
/**
* @return db type
*/
@Override @Override
public DbType dbTypeSelector() { public DbType getType() {
return DbType.SQLSERVER; return DbType.SQLSERVER;
} }
} }

123
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessor.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.sqlserver;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
public class SqlServerDatasourceProcessor extends AbstractDatasourceProcessor {
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
SqlServerConnectionParam connectionParams = (SqlServerConnectionParam) createConnectionParams(connectionJson);
String[] hostSeperator = connectionParams.getAddress().split(Constants.DOUBLE_SLASH);
String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(Constants.COMMA);
SqlServerDatasourceParamDTO sqlServerDatasourceParamDTO = new SqlServerDatasourceParamDTO();
sqlServerDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
sqlServerDatasourceParamDTO.setUserName(connectionParams.getUser());
sqlServerDatasourceParamDTO.setOther(parseOther(connectionParams.getOther()));
sqlServerDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1]));
sqlServerDatasourceParamDTO.setHost(hostPortArray[0].split(Constants.COLON)[0]);
return sqlServerDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
SqlServerDatasourceParamDTO sqlServerParam = (SqlServerDatasourceParamDTO) datasourceParam;
String address = String.format("%s%s:%s", Constants.JDBC_SQLSERVER, sqlServerParam.getHost(), sqlServerParam.getPort());
String jdbcUrl = address + ";databaseName=" + sqlServerParam.getDatabase();
SqlServerConnectionParam sqlServerConnectionParam = new SqlServerConnectionParam();
sqlServerConnectionParam.setAddress(address);
sqlServerConnectionParam.setDatabase(sqlServerParam.getDatabase());
sqlServerConnectionParam.setJdbcUrl(jdbcUrl);
sqlServerConnectionParam.setOther(transformOther(sqlServerParam.getOther()));
sqlServerConnectionParam.setUser(sqlServerParam.getUserName());
sqlServerConnectionParam.setPassword(CommonUtils.encodePassword(sqlServerParam.getPassword()));
return sqlServerConnectionParam;
}
@Override
public BaseConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, SqlServerConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_SQLSERVER_JDBC_DRIVER;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam;
if (StringUtils.isNotEmpty(sqlServerConnectionParam.getOther())) {
return String.format("%s;%s", sqlServerConnectionParam.getJdbcUrl(), sqlServerConnectionParam.getOther());
}
return sqlServerConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam;
Class.forName(getDatasourceDriver());
return DriverManager.getConnection(getJdbcUrl(connectionParam), sqlServerConnectionParam.getUser(),
CommonUtils.decodePassword(sqlServerConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.SQLSERVER;
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isEmpty(otherMap)) {
return null;
}
StringBuilder stringBuilder = new StringBuilder();
otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s;", key, value)));
return stringBuilder.toString();
}
private Map<String, String> parseOther(String other) {
if (StringUtils.isEmpty(other)) {
return null;
}
Map<String, String> otherMap = new LinkedHashMap<>();
for (String config : other.split(";")) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
return otherMap;
}
}

1
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbConnectType.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.common.enums; package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue; import com.baomidou.mybatisplus.annotation.EnumValue;

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java

@ -24,6 +24,7 @@ import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.net.URL; import java.net.URL;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -103,9 +104,9 @@ public class CommonUtils {
* @param javaSecurityKrb5Conf javaSecurityKrb5Conf * @param javaSecurityKrb5Conf javaSecurityKrb5Conf
* @param loginUserKeytabUsername loginUserKeytabUsername * @param loginUserKeytabUsername loginUserKeytabUsername
* @param loginUserKeytabPath loginUserKeytabPath * @param loginUserKeytabPath loginUserKeytabPath
* @throws Exception errors * @throws IOException errors
*/ */
public static void loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) throws Exception { public static void loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) throws IOException {
if (CommonUtils.getKerberosStartupState()) { if (CommonUtils.getKerberosStartupState()) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, StringUtils.defaultIfBlank(javaSecurityKrb5Conf, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH))); System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, StringUtils.defaultIfBlank(javaSecurityKrb5Conf, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)));
Configuration configuration = new Configuration(); Configuration configuration = new Configuration();

130
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtilTest.java

@ -0,0 +1,130 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO;
import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceProcessor;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class, MysqlDatasourceProcessor.class})
public class DatasourceUtilTest {
@Test
public void testCheckDatasourceParam() {
MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
mysqlDatasourceParamDTO.setHost("localhost");
mysqlDatasourceParamDTO.setDatabase("default");
mysqlDatasourceParamDTO.setOther(null);
DatasourceUtil.checkDatasourceParam(mysqlDatasourceParamDTO);
Assert.assertTrue(true);
}
@Test
public void testBuildConnectionParams() {
MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
mysqlDatasourceParamDTO.setHost("localhost");
mysqlDatasourceParamDTO.setDatabase("default");
mysqlDatasourceParamDTO.setUserName("root");
mysqlDatasourceParamDTO.setPort(3306);
mysqlDatasourceParamDTO.setPassword("123456");
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO);
Assert.assertNotNull(connectionParam);
}
@Test
public void testBuildConnectionParams2() {
MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
mysqlDatasourceParamDTO.setHost("localhost");
mysqlDatasourceParamDTO.setDatabase("default");
mysqlDatasourceParamDTO.setUserName("root");
mysqlDatasourceParamDTO.setPort(3306);
mysqlDatasourceParamDTO.setPassword("123456");
ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(DbType.MYSQL, JSONUtils.toJsonString(mysqlDatasourceParamDTO));
Assert.assertNotNull(connectionParam);
}
@Test
public void testGetConnection() throws ClassNotFoundException, SQLException {
PowerMockito.mockStatic(Class.class);
PowerMockito.when(Class.forName(Mockito.any())).thenReturn(null);
PowerMockito.mockStatic(DriverManager.class);
PowerMockito.when(DriverManager.getConnection(Mockito.any(), Mockito.any(), Mockito.any())).thenReturn(null);
MysqlConnectionParam connectionParam = new MysqlConnectionParam();
connectionParam.setUser("root");
connectionParam.setPassword("123456");
Connection connection = DatasourceUtil.getConnection(DbType.MYSQL, connectionParam);
Assert.assertNull(connection);
}
@Test
public void testGetJdbcUrl() {
MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam();
mysqlConnectionParam.setJdbcUrl("jdbc:mysql://localhost:3308");
String jdbcUrl = DatasourceUtil.getJdbcUrl(DbType.MYSQL, mysqlConnectionParam);
Assert.assertEquals("jdbc:mysql://localhost:3308?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false",
jdbcUrl);
}
@Test
public void testBuildDatasourceParamDTO() {
MysqlConnectionParam connectionParam = new MysqlConnectionParam();
connectionParam.setJdbcUrl("jdbc:mysql://localhost:3308?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false");
connectionParam.setAddress("jdbc:mysql://localhost:3308");
connectionParam.setUser("root");
connectionParam.setPassword("123456");
Assert.assertNotNull(DatasourceUtil.buildDatasourceParamDTO(DbType.MYSQL, JSONUtils.toJsonString(connectionParam)));
}
@Test
public void testGetDatasourceProcessor() {
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.MYSQL));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.POSTGRESQL));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.HIVE));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.SPARK));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.CLICKHOUSE));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.ORACLE));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.SQLSERVER));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.DB2));
Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.PRESTO));
}
@Test(expected = Exception.class)
public void testGetDatasourceProcessorError() {
DatasourceUtil.getDatasourceProcessor(null);
}
}

83
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.clickhouse;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class ClickHouseDatasourceProcessorTest {
private ClickHouseDatasourceProcessor clickHouseDatasourceProcessor = new ClickHouseDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
ClickHouseDatasourceParamDTO clickhouseConnectionParam = new ClickHouseDatasourceParamDTO();
clickhouseConnectionParam.setUserName("user");
clickhouseConnectionParam.setPassword("password");
clickhouseConnectionParam.setHost("localhost");
clickhouseConnectionParam.setPort(8123);
clickhouseConnectionParam.setDatabase("default");
ClickhouseConnectionParam connectionParams = (ClickhouseConnectionParam) clickHouseDatasourceProcessor
.createConnectionParams(clickhouseConnectionParam);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("jdbc:clickhouse://localhost:8123", connectionParams.getAddress());
Assert.assertEquals("jdbc:clickhouse://localhost:8123/default", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionParamJson = "{\"address\":\"jdbc:clickhouse://localhost:8123\",\"database\":\"default\","
+ "\"jdbcUrl\":\"jdbc:clickhouse://localhost:8123/default\",\"user\":\"default\",\"password\":\"123456\"}";
ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) clickHouseDatasourceProcessor
.createConnectionParams(connectionParamJson);
Assert.assertNotNull(clickhouseConnectionParam);
Assert.assertEquals("default", clickhouseConnectionParam.getUser());
Assert.assertEquals("123456", clickhouseConnectionParam.getPassword());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertNotNull(clickHouseDatasourceProcessor.getDatasourceDriver());
Assert.assertEquals(Constants.COM_CLICKHOUSE_JDBC_DRIVER, clickHouseDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
ClickhouseConnectionParam connectionParam = new ClickhouseConnectionParam();
connectionParam.setUser("default");
connectionParam.setJdbcUrl("jdbc:clickhouse://localhost:8123/default");
connectionParam.setOther("other=other1");
String jdbcUrl = clickHouseDatasourceProcessor.getJdbcUrl(connectionParam);
Assert.assertEquals("jdbc:clickhouse://localhost:8123/default?other=other1", jdbcUrl);
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.CLICKHOUSE, clickHouseDatasourceProcessor.getDbType());
}
}

83
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessorTest.java

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.db2;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class Db2DatasourceProcessorTest {
private Db2DatasourceProcessor db2DatasourceProcessor = new Db2DatasourceProcessor();
@Test
public void testCreateConnectionParams() {
Db2DatasourceParamDTO db2DatasourceParamDTO = new Db2DatasourceParamDTO();
db2DatasourceParamDTO.setUserName("root");
db2DatasourceParamDTO.setPassword("123456");
db2DatasourceParamDTO.setHost("localhost");
db2DatasourceParamDTO.setPort(5142);
db2DatasourceParamDTO.setDatabase("default");
Db2ConnectionParam connectionParams = (Db2ConnectionParam) db2DatasourceProcessor
.createConnectionParams(db2DatasourceParamDTO);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("jdbc:db2://localhost:5142", connectionParams.getAddress());
Assert.assertEquals("jdbc:db2://localhost:5142/default", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:db2://localhost:5142\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:db2://localhost:5142/default\"}";
Db2ConnectionParam connectionParams = (Db2ConnectionParam) db2DatasourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionJson);
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.COM_DB2_JDBC_DRIVER, db2DatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
Db2ConnectionParam db2ConnectionParam = new Db2ConnectionParam();
db2ConnectionParam.setJdbcUrl("jdbc:db2://localhost:5142/default");
db2ConnectionParam.setOther("other=other");
String jdbcUrl = db2DatasourceProcessor.getJdbcUrl(db2ConnectionParam);
Assert.assertEquals("jdbc:db2://localhost:5142/default;other=other", jdbcUrl);
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.DB2, db2DatasourceProcessor.getDbType());
}
}

79
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessorTest.java

@ -0,0 +1,79 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.hive;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class HiveDatasourceProcessorTest {
private HiveDatasourceProcessor hiveDatasourceProcessor = new HiveDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO();
hiveDataSourceParamDTO.setHost("localhost1,localhost2");
hiveDataSourceParamDTO.setPort(5142);
hiveDataSourceParamDTO.setUserName("default");
hiveDataSourceParamDTO.setDatabase("default");
HiveConnectionParam connectionParams = (HiveConnectionParam) hiveDatasourceProcessor
.createConnectionParams(hiveDataSourceParamDTO);
System.out.println(JSONUtils.toJsonString(connectionParams));
Assert.assertNotNull(connectionParams);
Assert.assertEquals("jdbc:hive2://localhost1:5142,localhost2:5142", connectionParams.getAddress());
}
@Test
public void testCreateConnectionParams2() {
String connectionParam = "{\"user\":\"default\",\"address\":\"jdbc:hive2://localhost1:5142,localhost2:5142\""
+ ",\"jdbcUrl\":\"jdbc:hive2://localhost1:5142,localhost2:5142/default\"}";
HiveConnectionParam connectionParams = (HiveConnectionParam) hiveDatasourceProcessor
.createConnectionParams(connectionParam);
Assert.assertNotNull(connectionParam);
Assert.assertEquals("default", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER, hiveDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
HiveConnectionParam connectionParam = new HiveConnectionParam();
connectionParam.setJdbcUrl("jdbc:hive2://localhost1:5142,localhost2:5142/default");
Assert.assertEquals("jdbc:hive2://localhost1:5142,localhost2:5142/default",
hiveDatasourceProcessor.getJdbcUrl(connectionParam));
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.HIVE, hiveDatasourceProcessor.getDbType());
}
}

81
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessorTest.java

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.mysql;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class MysqlDatasourceProcessorTest {
private MysqlDatasourceProcessor mysqlDatasourceProcessor = new MysqlDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO();
mysqlDatasourceParamDTO.setUserName("root");
mysqlDatasourceParamDTO.setPassword("123456");
mysqlDatasourceParamDTO.setHost("localhost");
mysqlDatasourceParamDTO.setPort(3306);
mysqlDatasourceParamDTO.setDatabase("default");
MysqlConnectionParam connectionParams = (MysqlConnectionParam) mysqlDatasourceProcessor
.createConnectionParams(mysqlDatasourceParamDTO);
System.out.println(JSONUtils.toJsonString(connectionParams));
Assert.assertEquals("jdbc:mysql://localhost:3306", connectionParams.getAddress());
Assert.assertEquals("jdbc:mysql://localhost:3306/default", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://localhost:3306\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:mysql://localhost:3306/default\"}";
MysqlConnectionParam connectionParams = (MysqlConnectionParam) mysqlDatasourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionJson);
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.COM_MYSQL_JDBC_DRIVER, mysqlDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam();
mysqlConnectionParam.setJdbcUrl("jdbc:mysql://localhost:3306/default");
Assert.assertEquals("jdbc:mysql://localhost:3306/default?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false",
mysqlDatasourceProcessor.getJdbcUrl(mysqlConnectionParam));
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.MYSQL, mysqlDatasourceProcessor.getDbType());
}
}

83
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessorTest.java

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.oracle;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class OracleDatasourceProcessorTest {
private OracleDatasourceProcessor oracleDatasourceProcessor = new OracleDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
OracleDatasourceParamDTO oracleDatasourceParamDTO = new OracleDatasourceParamDTO();
oracleDatasourceParamDTO.setConnectType(DbConnectType.ORACLE_SID);
oracleDatasourceParamDTO.setHost("localhost");
oracleDatasourceParamDTO.setPort(3308);
oracleDatasourceParamDTO.setUserName("root");
oracleDatasourceParamDTO.setPassword("123456");
oracleDatasourceParamDTO.setDatabase("default");
OracleConnectionParam connectionParams = (OracleConnectionParam) oracleDatasourceProcessor
.createConnectionParams(oracleDatasourceParamDTO);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("jdbc:oracle:thin:@localhost:3308", connectionParams.getAddress());
Assert.assertEquals("jdbc:oracle:thin:@localhost:3308/default", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:oracle:thin:@localhost:3308\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:oracle:thin:@localhost:3308/default\",\"connectType\":\"ORACLE_SID\"}";
OracleConnectionParam connectionParams = (OracleConnectionParam) oracleDatasourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.COM_ORACLE_JDBC_DRIVER, oracleDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
OracleConnectionParam oracleConnectionParam = new OracleConnectionParam();
oracleConnectionParam.setJdbcUrl("jdbc:oracle:thin:@localhost:3308/default");
oracleConnectionParam.setOther("other=other");
Assert.assertEquals("jdbc:oracle:thin:@localhost:3308/default?other=other",
oracleDatasourceProcessor.getJdbcUrl(oracleConnectionParam));
}
@Test
public void getDbType() {
Assert.assertEquals(DbType.ORACLE, oracleDatasourceProcessor.getDbType());
}
}

83
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.postgresql;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class PostgreSqlDatasourceProcessorTest {
private PostgreSqlDatasourceProcessor postgreSqlDatasourceProcessor = new PostgreSqlDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
PostgreSqlDatasourceParamDTO postgreSqlDatasourceParamDTO = new PostgreSqlDatasourceParamDTO();
postgreSqlDatasourceParamDTO.setUserName("root");
postgreSqlDatasourceParamDTO.setPassword("123456");
postgreSqlDatasourceParamDTO.setHost("localhost");
postgreSqlDatasourceParamDTO.setPort(3308);
postgreSqlDatasourceParamDTO.setDatabase("default");
PostgreSqlConnectionParam connectionParams = (PostgreSqlConnectionParam) postgreSqlDatasourceProcessor
.createConnectionParams(postgreSqlDatasourceParamDTO);
Assert.assertEquals("jdbc:postgresql://localhost:3308", connectionParams.getAddress());
Assert.assertEquals("jdbc:postgresql://localhost:3308/default", connectionParams.getJdbcUrl());
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:postgresql://localhost:3308\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:postgresql://localhost:3308/default\"}";
PostgreSqlConnectionParam connectionParams = (PostgreSqlConnectionParam) postgreSqlDatasourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.ORG_POSTGRESQL_DRIVER, postgreSqlDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
PostgreSqlConnectionParam postgreSqlConnectionParam = new PostgreSqlConnectionParam();
postgreSqlConnectionParam.setJdbcUrl("jdbc:postgresql://localhost:3308/default");
postgreSqlConnectionParam.setOther("other");
String jdbcUrl = postgreSqlDatasourceProcessor.getJdbcUrl(postgreSqlConnectionParam);
Assert.assertEquals("jdbc:postgresql://localhost:3308/default?other", jdbcUrl);
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.POSTGRESQL, postgreSqlDatasourceProcessor.getDbType());
}
}

81
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessorTest.java

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.presto;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class PrestoDatasourceProcessorTest {
private PrestoDatasourceProcessor prestoDatasourceProcessor = new PrestoDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
PrestoDatasourceParamDTO prestoDatasourceParamDTO = new PrestoDatasourceParamDTO();
prestoDatasourceParamDTO.setHost("localhost");
prestoDatasourceParamDTO.setPort(1234);
prestoDatasourceParamDTO.setDatabase("default");
prestoDatasourceParamDTO.setUserName("root");
prestoDatasourceParamDTO.setPassword("123456");
PrestoConnectionParam connectionParams = (PrestoConnectionParam) prestoDatasourceProcessor
.createConnectionParams(prestoDatasourceParamDTO);
Assert.assertEquals("jdbc:presto://localhost:1234", connectionParams.getAddress());
Assert.assertEquals("jdbc:presto://localhost:1234/default", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:presto://localhost:1234\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:presto://localhost:1234/default\"}";
PrestoConnectionParam connectionParams = (PrestoConnectionParam) prestoDatasourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.COM_PRESTO_JDBC_DRIVER, prestoDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
PrestoConnectionParam prestoConnectionParam = new PrestoConnectionParam();
prestoConnectionParam.setJdbcUrl("jdbc:postgresql://localhost:1234/default");
prestoConnectionParam.setOther("other");
Assert.assertEquals("jdbc:postgresql://localhost:1234/default?other",
prestoDatasourceProcessor.getJdbcUrl(prestoConnectionParam));
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.PRESTO, prestoDatasourceProcessor.getDbType());
}
}

80
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessorTest.java

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.spark;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class SparkDatasourceProcessorTest {
private SparkDatasourceProcessor sparkDatasourceProcessor = new SparkDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
SparkDatasourceParamDTO sparkDatasourceParamDTO = new SparkDatasourceParamDTO();
sparkDatasourceParamDTO.setUserName("root");
sparkDatasourceParamDTO.setPassword("12345");
sparkDatasourceParamDTO.setHost("localhost1,localhost2");
sparkDatasourceParamDTO.setPort(1234);
sparkDatasourceParamDTO.setDatabase("default");
SparkConnectionParam connectionParams = (SparkConnectionParam) sparkDatasourceProcessor
.createConnectionParams(sparkDatasourceParamDTO);
Assert.assertEquals("jdbc:hive2://localhost1:1234,localhost2:1234", connectionParams.getAddress());
Assert.assertEquals("jdbc:hive2://localhost1:1234,localhost2:1234/default", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"12345\",\"address\":\"jdbc:hive2://localhost1:1234,localhost2:1234\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:hive2://localhost1:1234,localhost2:1234/default\"}";
SparkConnectionParam connectionParams = (SparkConnectionParam) sparkDatasourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER, sparkDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
SparkConnectionParam sparkConnectionParam = new SparkConnectionParam();
sparkConnectionParam.setJdbcUrl("jdbc:hive2://localhost1:1234,localhost2:1234/default");
sparkConnectionParam.setOther("other");
Assert.assertEquals("jdbc:hive2://localhost1:1234,localhost2:1234/default;other",
sparkDatasourceProcessor.getJdbcUrl(sparkConnectionParam));
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.SPARK, sparkDatasourceProcessor.getDbType());
}
}

81
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.datasource.sqlserver;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.sql.DriverManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class})
public class SqlServerDatasourceProcessorTest {
private SqlServerDatasourceProcessor sqlServerDatasourceProcessor = new SqlServerDatasourceProcessor();
@Test
public void testCreateConnectionParams() {
SqlServerDatasourceParamDTO sqlServerDatasourceParamDTO = new SqlServerDatasourceParamDTO();
sqlServerDatasourceParamDTO.setUserName("root");
sqlServerDatasourceParamDTO.setPassword("123456");
sqlServerDatasourceParamDTO.setDatabase("default");
sqlServerDatasourceParamDTO.setHost("localhost");
sqlServerDatasourceParamDTO.setPort(1234);
SqlServerConnectionParam connectionParams = (SqlServerConnectionParam) sqlServerDatasourceProcessor
.createConnectionParams(sqlServerDatasourceParamDTO);
Assert.assertEquals("jdbc:sqlserver://localhost:1234", connectionParams.getAddress());
Assert.assertEquals("jdbc:sqlserver://localhost:1234;databaseName=default", connectionParams.getJdbcUrl());
Assert.assertEquals("root", connectionParams.getUser());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:sqlserver://localhost:1234\""
+ ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:sqlserver://localhost:1234;databaseName=default\"}";
SqlServerConnectionParam sqlServerConnectionParam = JSONUtils.parseObject(connectionJson, SqlServerConnectionParam.class);
Assert.assertNotNull(sqlServerConnectionParam);
Assert.assertEquals("root", sqlServerConnectionParam.getUser());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.COM_SQLSERVER_JDBC_DRIVER, sqlServerDatasourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
SqlServerConnectionParam sqlServerConnectionParam = new SqlServerConnectionParam();
sqlServerConnectionParam.setJdbcUrl("jdbc:sqlserver://localhost:1234;databaseName=default");
sqlServerConnectionParam.setOther("other");
Assert.assertEquals("jdbc:sqlserver://localhost:1234;databaseName=default;other",
sqlServerDatasourceProcessor.getJdbcUrl(sqlServerConnectionParam));
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.SQLSERVER, sqlServerDatasourceProcessor.getDbType());
}
}

278
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java

@ -1,278 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import static org.apache.dolphinscheduler.common.Constants.PASSWORD;
import static org.apache.dolphinscheduler.common.Constants.USER;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* data source base class
*/
public abstract class BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(BaseDataSource.class);
/**
* user name
*/
protected String user;
/**
* user password
*/
protected String password;
/**
* data source address
*/
private String address;
/**
* database name
*/
private String database;
/**
* other connection parameters for the data source
*/
private String other;
/**
* principal
*/
private String principal;
/**
* java.security.krb5.conf
*/
private String javaSecurityKrb5Conf;
/**
* login.user.keytab.username
*/
private String loginUserKeytabUsername;
/**
* login.user.keytab.path
*/
private String loginUserKeytabPath;
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
/**
* @return driver class
*/
public abstract String driverClassSelector();
/**
* @return db type
*/
public abstract DbType dbTypeSelector();
/**
* gets the JDBC url for the data source connection
* @return getJdbcUrl
*/
public String getJdbcUrl() {
StringBuilder jdbcUrl = new StringBuilder(getAddress());
appendDatabase(jdbcUrl);
appendPrincipal(jdbcUrl);
appendOther(jdbcUrl);
return jdbcUrl.toString();
}
/**
* append database
* @param jdbcUrl jdbc url
*/
protected void appendDatabase(StringBuilder jdbcUrl) {
if (dbTypeSelector() == DbType.SQLSERVER) {
jdbcUrl.append(";databaseName=").append(getDatabase());
} else {
if (getAddress().lastIndexOf('/') != (jdbcUrl.length() - 1)) {
jdbcUrl.append("/");
}
jdbcUrl.append(getDatabase());
}
}
/**
* append principal
* @param jdbcUrl jdbc url
*/
private void appendPrincipal(StringBuilder jdbcUrl) {
boolean tag = dbTypeSelector() == DbType.HIVE || dbTypeSelector() == DbType.SPARK;
if (tag && StringUtils.isNotEmpty(getPrincipal())) {
jdbcUrl.append(";principal=").append(getPrincipal());
}
}
/**
* append other
* @param jdbcUrl jdbc url
*/
private void appendOther(StringBuilder jdbcUrl) {
String otherParams = filterOther(getOther());
if (StringUtils.isNotEmpty(otherParams)) {
String separator = "";
switch (dbTypeSelector()) {
case CLICKHOUSE:
case MYSQL:
case ORACLE:
case POSTGRESQL:
case PRESTO:
separator = "?";
break;
case DB2:
separator = ":";
break;
case HIVE:
if ("?".equals(otherParams.substring(0, 1))) {
break;
}
separator = ";";
break;
case SPARK:
case SQLSERVER:
separator = ";";
break;
default:
logger.error("Db type mismatch!");
}
jdbcUrl.append(separator).append(otherParams);
}
}
/**
* the data source test connection
* @return Connection Connection
* @throws Exception Exception
*/
public Connection getConnection() throws Exception {
Class.forName(driverClassSelector());
return DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword());
}
/**
* the data source test connection
* @param info Properties
* @return Connection Connection
* @throws Exception Exception
*/
public Connection getConnection(Properties info) throws Exception {
Class.forName(driverClassSelector());
info.setProperty(USER, getUser());
info.setProperty(PASSWORD, getPassword());
return DriverManager.getConnection(getJdbcUrl(), info);
}
protected String filterOther(String otherParams) {
return otherParams;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
/**
* password need decode
* @return
*/
public String getPassword() {
return CommonUtils.decodePassword(password);
}
public void setPassword(String password) {
this.password = password;
}
public void setAddress(String address) {
this.address = address;
}
public String getAddress() {
return address;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getOther() {
return other;
}
public void setOther(String other) {
this.other = other;
}
public void setConnParams(String connParams) {
}
public String getJavaSecurityKrb5Conf() {
return javaSecurityKrb5Conf;
}
public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) {
this.javaSecurityKrb5Conf = javaSecurityKrb5Conf;
}
public String getLoginUserKeytabUsername() {
return loginUserKeytabUsername;
}
public void setLoginUserKeytabUsername(String loginUserKeytabUsername) {
this.loginUserKeytabUsername = loginUserKeytabUsername;
}
public String getLoginUserKeytabPath() {
return loginUserKeytabPath;
}
public void setLoginUserKeytabPath(String loginUserKeytabPath) {
this.loginUserKeytabPath = loginUserKeytabPath;
}
}

108
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java

@ -1,108 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* produce datasource in this custom defined datasource factory.
*/
public class DataSourceFactory {
private static final Logger logger = LoggerFactory.getLogger(DataSourceFactory.class);
/**
* getDatasource
* @param dbType dbType
* @param parameter parameter
* @return getDatasource
*/
public static BaseDataSource getDatasource(DbType dbType, String parameter) {
try {
switch (dbType) {
case MYSQL:
return JSONUtils.parseObject(parameter, MySQLDataSource.class);
case POSTGRESQL:
return JSONUtils.parseObject(parameter, PostgreDataSource.class);
case HIVE:
return JSONUtils.parseObject(parameter, HiveDataSource.class);
case SPARK:
return JSONUtils.parseObject(parameter, SparkDataSource.class);
case CLICKHOUSE:
return JSONUtils.parseObject(parameter, ClickHouseDataSource.class);
case ORACLE:
return JSONUtils.parseObject(parameter, OracleDataSource.class);
case SQLSERVER:
return JSONUtils.parseObject(parameter, SQLServerDataSource.class);
case DB2:
return JSONUtils.parseObject(parameter, DB2ServerDataSource.class);
case PRESTO:
return JSONUtils.parseObject(parameter, PrestoDataSource.class);
default:
return null;
}
} catch (Exception e) {
logger.error("get datasource object error", e);
return null;
}
}
/**
* load class
* @param dbType
* @throws Exception
*/
public static void loadClass(DbType dbType) throws Exception{
switch (dbType){
case MYSQL :
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER);
break;
case POSTGRESQL :
Class.forName(Constants.ORG_POSTGRESQL_DRIVER);
break;
case HIVE :
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case SPARK :
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case CLICKHOUSE :
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break;
case ORACLE :
Class.forName(Constants.COM_ORACLE_JDBC_DRIVER);
break;
case SQLSERVER:
Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER);
break;
case DB2:
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
case PRESTO:
Class.forName(Constants.COM_PRESTO_JDBC_DRIVER);
break;
default:
logger.error("not support sql type: {},can't load class", dbType);
throw new IllegalArgumentException("not support sql type,can't load class");
}
}
}

120
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java

@ -1,120 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import static org.apache.dolphinscheduler.common.Constants.SEMICOLON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.HiveConfUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import java.sql.Connection;
import java.util.Map;
/**
* data source of hive
*/
public class HiveDataSource extends BaseDataSource {
/**
* gets the JDBC url for the data source connection
* @return jdbc url
*/
@Override
public String driverClassSelector() {
return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
}
/**
* @return db type
*/
@Override
public DbType dbTypeSelector() {
return DbType.HIVE;
}
/**
* build hive jdbc params,append : ?hive_conf_list
*
* hive jdbc url template:
*
* jdbc:hive2://<host1>:<port1>,<host2>:<port2>/dbName;initFile=<file>;sess_var_list?hive_conf_list#hive_var_list
*
* @param otherParams otherParams
* @return filter otherParams
*/
@Override
protected String filterOther(String otherParams) {
if (StringUtils.isBlank(otherParams)) {
return "";
}
StringBuilder hiveConfListSb = new StringBuilder();
hiveConfListSb.append("?");
StringBuilder sessionVarListSb = new StringBuilder();
String[] otherArray = otherParams.split(";", -1);
for (String conf : otherArray) {
if (HiveConfUtils.isHiveConfVar(conf)) {
hiveConfListSb.append(conf).append(";");
} else {
sessionVarListSb.append(conf).append(";");
}
}
// remove the last ";"
if (sessionVarListSb.length() > 0) {
sessionVarListSb.deleteCharAt(sessionVarListSb.length() - 1);
}
if (hiveConfListSb.length() > 0) {
hiveConfListSb.deleteCharAt(hiveConfListSb.length() - 1);
}
return sessionVarListSb.toString() + hiveConfListSb.toString();
}
/**
* the data source test connection
* @return Connection Connection
* @throws Exception Exception
*/
@Override
public Connection getConnection() throws Exception {
CommonUtils.loadKerberosConf(getJavaSecurityKrb5Conf(), getLoginUserKeytabUsername(), getLoginUserKeytabPath());
return super.getConnection();
}
@Override
public void setConnParams(String connParams) {
// Verification parameters
Map<String, String> connParamMap = CollectionUtils.stringToMap(connParams, SEMICOLON);
if (connParamMap.isEmpty()) {
return;
}
StringBuilder otherSb = new StringBuilder();
connParamMap.forEach((k, v) -> otherSb.append(String.format("%s=%s%s", k, v, SEMICOLON)));
StringBuilder otherAppend = StringUtils.isNotBlank(getOther()) ? otherSb.append(getOther()) : otherSb.deleteCharAt(otherSb.length() - 1);
super.setOther(otherAppend.toString());
}
}

120
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java

@ -1,120 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.commons.collections4.MapUtils;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* data source of mySQL
*/
public class MySQLDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(MySQLDataSource.class);
private static final String ALLOW_LOAD_LOCAL_IN_FILE_NAME = "allowLoadLocalInfile";
private static final String AUTO_DESERIALIZE = "autoDeserialize";
private static final String ALLOW_LOCAL_IN_FILE_NAME = "allowLocalInfile";
private static final String ALLOW_URL_IN_LOCAL_IN_FILE_NAME = "allowUrlInLocalInfile";
private static final String APPEND_PARAMS = "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false";
private static boolean checkKeyIsLegitimate(String key) {
return !key.contains(ALLOW_LOAD_LOCAL_IN_FILE_NAME) && !key.contains(AUTO_DESERIALIZE) && !key.contains(ALLOW_LOCAL_IN_FILE_NAME) && !key.contains(ALLOW_URL_IN_LOCAL_IN_FILE_NAME);
}
/**
* gets the JDBC url for the data source connection
*
* @return jdbc url
*/
@Override
public String driverClassSelector() {
return Constants.COM_MYSQL_JDBC_DRIVER;
}
/**
* @return db type
*/
@Override
public DbType dbTypeSelector() {
return DbType.MYSQL;
}
public static Map<String, String> buildOtherParams(String other) {
if (StringUtils.isBlank(other)) {
return null;
}
Map<String, String> paramMap = JSONUtils.toMap(other);
if (MapUtils.isEmpty(paramMap)) {
return null;
}
Map<String, String> newParamMap = new HashMap<>();
paramMap.forEach((k, v) -> {
if (!checkKeyIsLegitimate(k)) {
return;
}
newParamMap.put(k, v);
});
return newParamMap;
}
@Override
public String getUser() {
if (user.contains(AUTO_DESERIALIZE)) {
logger.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE);
user = user.replace(AUTO_DESERIALIZE, "");
}
logger.debug("username : {}", user);
return user;
}
@Override
protected String filterOther(String otherParams) {
if (StringUtils.isBlank(otherParams)) {
return APPEND_PARAMS;
}
char symbol = '&';
return otherParams + symbol + APPEND_PARAMS;
}
@Override
public String getPassword() {
// password need decode
password = super.getPassword();
if (password.contains(AUTO_DESERIALIZE)) {
logger.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE);
password = password.replace(AUTO_DESERIALIZE, "");
}
return password;
}
}

195
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSourceTest.java

@ -1,195 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.junit.Assert;
import org.junit.Test;
public class BaseDataSourceTest {
@Test
public void testDriverClassSelector() {
String mysqlDriverClass = new MySQLDataSource().driverClassSelector();
Assert.assertEquals(Constants.COM_MYSQL_JDBC_DRIVER, mysqlDriverClass);
String clickHouseDriverClass = new ClickHouseDataSource().driverClassSelector();
Assert.assertEquals(Constants.COM_CLICKHOUSE_JDBC_DRIVER, clickHouseDriverClass);
String db2ServerDriverClass = new DB2ServerDataSource().driverClassSelector();
Assert.assertEquals(Constants.COM_DB2_JDBC_DRIVER, db2ServerDriverClass);
String oracleDriverClass = new OracleDataSource().driverClassSelector();
Assert.assertEquals(Constants.COM_ORACLE_JDBC_DRIVER, oracleDriverClass);
String postgreDriverClass = new PostgreDataSource().driverClassSelector();
Assert.assertEquals(Constants.ORG_POSTGRESQL_DRIVER, postgreDriverClass);
String sqlServerDriverClass = new SQLServerDataSource().driverClassSelector();
Assert.assertEquals(Constants.COM_SQLSERVER_JDBC_DRIVER, sqlServerDriverClass);
String hiveDriverClass = new HiveDataSource().driverClassSelector();
Assert.assertEquals(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER, hiveDriverClass);
String sparkDriverClass = new SparkDataSource().driverClassSelector();
Assert.assertEquals(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER, sparkDriverClass);
}
@Test
public void testGetJdbcUrl() {
BaseDataSource hiveDataSource = new HiveDataSource();
hiveDataSource.setAddress("jdbc:hive2://127.0.0.1:10000");
hiveDataSource.setDatabase("test");
hiveDataSource.setPassword("123456");
hiveDataSource.setUser("test");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test", hiveDataSource.getJdbcUrl());
//set principal
hiveDataSource.setPrincipal("hive/test.com@TEST.COM");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test;principal=hive/test.com@TEST.COM",
hiveDataSource.getJdbcUrl());
//set fake other
hiveDataSource.setOther("charset=UTF-8");
Assert.assertEquals(
"jdbc:hive2://127.0.0.1:10000/test;principal=hive/test.com@TEST.COM;charset=UTF-8",
hiveDataSource.getJdbcUrl());
BaseDataSource clickHouseDataSource = new ClickHouseDataSource();
clickHouseDataSource.setAddress("jdbc:clickhouse://127.0.0.1:8123");
clickHouseDataSource.setDatabase("test");
clickHouseDataSource.setPassword("123456");
clickHouseDataSource.setUser("test");
Assert.assertEquals("jdbc:clickhouse://127.0.0.1:8123/test", clickHouseDataSource.getJdbcUrl());
//set fake principal
clickHouseDataSource.setPrincipal("fake principal");
Assert.assertEquals("jdbc:clickhouse://127.0.0.1:8123/test", clickHouseDataSource.getJdbcUrl());
//set fake other
clickHouseDataSource.setOther("charset=UTF-8");
Assert.assertEquals("jdbc:clickhouse://127.0.0.1:8123/test?charset=UTF-8",
clickHouseDataSource.getJdbcUrl());
BaseDataSource sqlServerDataSource = new SQLServerDataSource();
sqlServerDataSource.setAddress("jdbc:sqlserver://127.0.0.1:1433");
sqlServerDataSource.setDatabase("test");
sqlServerDataSource.setPassword("123456");
sqlServerDataSource.setUser("test");
Assert.assertEquals("jdbc:sqlserver://127.0.0.1:1433;databaseName=test",
sqlServerDataSource.getJdbcUrl());
//set fake principal
sqlServerDataSource.setPrincipal("fake principal");
Assert.assertEquals("jdbc:sqlserver://127.0.0.1:1433;databaseName=test",
sqlServerDataSource.getJdbcUrl());
//set fake other
sqlServerDataSource.setOther("charset=UTF-8");
Assert.assertEquals("jdbc:sqlserver://127.0.0.1:1433;databaseName=test;charset=UTF-8",
sqlServerDataSource.getJdbcUrl());
BaseDataSource db2DataSource = new DB2ServerDataSource();
db2DataSource.setAddress("jdbc:db2://127.0.0.1:50000");
db2DataSource.setDatabase("test");
db2DataSource.setPassword("123456");
db2DataSource.setUser("test");
Assert.assertEquals("jdbc:db2://127.0.0.1:50000/test", db2DataSource.getJdbcUrl());
//set fake principal
db2DataSource.setPrincipal("fake principal");
Assert.assertEquals("jdbc:db2://127.0.0.1:50000/test", db2DataSource.getJdbcUrl());
//set fake other
db2DataSource.setOther("charset=UTF-8");
Assert.assertEquals("jdbc:db2://127.0.0.1:50000/test:charset=UTF-8", db2DataSource.getJdbcUrl());
}
@Test
public void testGetPassword() {
BaseDataSource dataSource = new BaseDataSource() {
@Override
public String driverClassSelector() {
return null;
}
@Override
public DbType dbTypeSelector() {
return null;
}
};
String password = "";
dataSource.setPassword(password);
Assert.assertEquals("", dataSource.getPassword());
password = "IUAjJCVeJipNVEl6TkRVMg==";
dataSource.setPassword(password);
Assert.assertNotNull(dataSource.getPassword());
Assert.assertNotNull(dataSource.getPassword());
dataSource.setPassword(password);
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"true");
Assert.assertEquals("123456", dataSource.getPassword());
dataSource.setPassword(password);
Assert.assertEquals("123456", dataSource.getPassword());
Assert.assertEquals("123456", dataSource.getPassword());
Assert.assertEquals("123456", dataSource.getPassword());
dataSource.setPassword(password);
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"false");
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
dataSource.setPassword(password);
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
}
@Test
public void testSetConnParams() {
BaseDataSource hiveDataSource = new HiveDataSource();
hiveDataSource.setAddress("jdbc:hive2://127.0.0.1:10000");
hiveDataSource.setDatabase("test");
hiveDataSource.setPassword("123456");
hiveDataSource.setUser("test");
hiveDataSource.setConnParams("");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test", hiveDataSource.getJdbcUrl());
//set fake other
hiveDataSource.setConnParams("hive.tez.container.size=20000;");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test?hive.tez.container.size=20000", hiveDataSource.getJdbcUrl());
hiveDataSource.setOther(null);
hiveDataSource.setConnParams("hive.tez.container.size=20000");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test?hive.tez.container.size=20000", hiveDataSource.getJdbcUrl());
hiveDataSource.setOther(null);
hiveDataSource.setConnParams("hive.tez.container.size=20000;hive.zzz=100");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test;hive.zzz=100?hive.tez.container.size=20000", hiveDataSource.getJdbcUrl());
hiveDataSource.setOther("charset=UTF-8");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test;charset=UTF-8", hiveDataSource.getJdbcUrl());
hiveDataSource.setConnParams("hive.tez.container.size=20000;hive.zzz=100");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test;hive.zzz=100;charset=UTF-8?hive.tez.container.size=20000", hiveDataSource.getJdbcUrl());
hiveDataSource.setOther("charset=UTF-8;hive.exec.stagingdir=/tmp");
hiveDataSource.setConnParams("hive.tez.container.size=20000;hive.zzz=100");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test;hive.zzz=100;charset=UTF-8?hive.tez.container.size=20000;hive.exec.stagingdir=/tmp", hiveDataSource.getJdbcUrl());
}
}

89
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSourceTest.java

@ -1,89 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.junit.Assert;
import org.junit.Test;
/**
* test data source of hive
*/
public class HiveDataSourceTest {
@Test
public void testFilterOther() {
BaseDataSource hiveDataSource = new HiveDataSource();
// not contain hive_site_conf
String other = hiveDataSource.filterOther("charset=UTF-8");
Assert.assertEquals("charset=UTF-8", other);
// not contain
other = hiveDataSource.filterOther("");
Assert.assertEquals("", other);
// only contain hive_site_conf
other = hiveDataSource.filterOther("hive.mapred.mode=strict");
Assert.assertEquals("?hive.mapred.mode=strict", other);
// contain hive_site_conf at the first
other = hiveDataSource.filterOther("hive.mapred.mode=strict;charset=UTF-8");
Assert.assertEquals("charset=UTF-8?hive.mapred.mode=strict", other);
// contain hive_site_conf in the middle
other = hiveDataSource.filterOther("charset=UTF-8;hive.mapred.mode=strict;foo=bar");
Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict", other);
// contain hive_site_conf at the end
other = hiveDataSource.filterOther("charset=UTF-8;foo=bar;hive.mapred.mode=strict");
Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict", other);
// contain multi hive_site_conf
other = hiveDataSource.filterOther("charset=UTF-8;foo=bar;hive.mapred.mode=strict;hive.exec.parallel=true");
Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict;hive.exec.parallel=true", other);
// the security authorization hive conf var
other = hiveDataSource.filterOther("tez.queue.name=tezTest");
Assert.assertEquals("?tez.queue.name=tezTest", other);
}
@Test
public void testGetHiveJdbcUrlOther() {
BaseDataSource hiveDataSource = new HiveDataSource();
hiveDataSource.setAddress("jdbc:hive2://127.0.0.1:10000");
hiveDataSource.setDatabase("test");
hiveDataSource.setPassword("123456");
hiveDataSource.setUser("test");
Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test", hiveDataSource.getJdbcUrl());
hiveDataSource.setOther("charset=UTF-8;hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2");
Assert.assertEquals(
"jdbc:hive2://127.0.0.1:10000/test;charset=UTF-8?hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2",
hiveDataSource.getJdbcUrl());
hiveDataSource.setOther("hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2");
Assert.assertEquals(
"jdbc:hive2://127.0.0.1:10000/test?hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2",
hiveDataSource.getJdbcUrl());
}
}

102
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSourceTest.java

@ -1,102 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.junit.Assert;
import org.junit.Test;
/**
* test data source of mySQL
*/
public class MySQLDataSourceTest {
@Test
public void testGetUser() {
MySQLDataSource dataSource = new MySQLDataSource();
String safeUsername = "test123";
dataSource.setUser(safeUsername);
Assert.assertEquals("test123", dataSource.getUser());
String sensitiveUsername = "test123?autoDeserialize=true";
dataSource.setUser(sensitiveUsername);
Assert.assertEquals("test123?=true", dataSource.getUser());
}
@Test
public void testGetPassword() {
MySQLDataSource dataSource = new MySQLDataSource();
String safePwd = "test_pwd";
dataSource.setPassword(safePwd);
Assert.assertEquals("test_pwd", dataSource.getPassword());
String sensitivePwd = "test_pwd?autoDeserialize=true";
dataSource.setPassword(sensitivePwd);
Assert.assertEquals("test_pwd?=true", dataSource.getPassword());
}
@Test
public void testFilterOther() {
MySQLDataSource dataSource = new MySQLDataSource();
String other = dataSource.filterOther("serverTimezone=Asia/Shanghai&characterEncoding=utf8");
Assert.assertEquals("serverTimezone=Asia/Shanghai&characterEncoding=utf8&allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", other);
//at the first
other = dataSource.filterOther("serverTimezone=Asia/Shanghai&characterEncoding=utf8");
Assert.assertEquals("serverTimezone=Asia/Shanghai&characterEncoding=utf8&allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", other);
//at the end
other = dataSource.filterOther("serverTimezone=Asia/Shanghai&characterEncoding=utf8");
Assert.assertEquals("serverTimezone=Asia/Shanghai&characterEncoding=utf8&allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", other);
//in the middle
other = dataSource.filterOther("serverTimezone=Asia/Shanghai&characterEncoding=utf8");
Assert.assertEquals("serverTimezone=Asia/Shanghai&characterEncoding=utf8&allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", other);
other = dataSource.filterOther(null);
Assert.assertEquals("allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", other);
}
@Test
public void testGetPasswordWithDecodePassword() {
MySQLDataSource dataSource = new MySQLDataSource();
String password = "";
dataSource.setPassword(password);
Assert.assertEquals("", dataSource.getPassword());
password = "IUAjJCVeJipNVEl6TkRVMg==";
dataSource.setPassword(password);
Assert.assertNotNull(dataSource.getPassword());
Assert.assertNotNull(dataSource.getPassword());
dataSource.setPassword(password);
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true");
Assert.assertEquals("123456", dataSource.getPassword());
dataSource.setPassword(password);
Assert.assertEquals("123456", dataSource.getPassword());
Assert.assertEquals("123456", dataSource.getPassword());
Assert.assertEquals("123456", dataSource.getPassword());
dataSource.setPassword(password);
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false");
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
dataSource.setPassword(password);
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==", dataSource.getPassword());
}
}

74
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSourceTest.java

@ -1,74 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.junit.Assert;
import org.junit.Test;
public class OracleDataSourceTest {
@Test
public void testGetOracleJdbcUrl() {
OracleDataSource oracleDataSource = new OracleDataSource();
oracleDataSource.setConnectType(DbConnectType.ORACLE_SERVICE_NAME);
oracleDataSource.setAddress("jdbc:oracle:thin:@//127.0.0.1:1521");
oracleDataSource.setDatabase("test");
oracleDataSource.setPassword("123456");
oracleDataSource.setUser("test");
Assert.assertEquals("jdbc:oracle:thin:@//127.0.0.1:1521/test", oracleDataSource.getJdbcUrl());
//set fake principal
oracleDataSource.setPrincipal("fake principal");
Assert.assertEquals("jdbc:oracle:thin:@//127.0.0.1:1521/test", oracleDataSource.getJdbcUrl());
//set fake other
oracleDataSource.setOther("charset=UTF-8");
Assert.assertEquals("jdbc:oracle:thin:@//127.0.0.1:1521/test?charset=UTF-8", oracleDataSource.getJdbcUrl());
OracleDataSource oracleDataSource2 = new OracleDataSource();
oracleDataSource2.setAddress("jdbc:oracle:thin:@127.0.0.1:1521");
oracleDataSource2.setDatabase("orcl");
oracleDataSource2.setPassword("123456");
oracleDataSource2.setUser("test");
oracleDataSource2.setConnectType(DbConnectType.ORACLE_SID);
Assert.assertEquals("jdbc:oracle:thin:@127.0.0.1:1521:orcl", oracleDataSource2.getJdbcUrl());
//set fake principal
oracleDataSource2.setPrincipal("fake principal");
Assert.assertEquals("jdbc:oracle:thin:@127.0.0.1:1521:orcl", oracleDataSource2.getJdbcUrl());
//set fake other
oracleDataSource2.setOther("charset=UTF-8");
Assert.assertEquals("jdbc:oracle:thin:@127.0.0.1:1521:orcl?charset=UTF-8", oracleDataSource2.getJdbcUrl());
}
@Test
public void testAppendDatabase() {
OracleDataSource oracleDataSource = new OracleDataSource();
oracleDataSource.setAddress("jdbc:oracle:thin:@//127.0.0.1:1521");
oracleDataSource.setDatabase("test");
oracleDataSource.setConnectType(DbConnectType.ORACLE_SERVICE_NAME);
StringBuilder jdbcUrl = new StringBuilder(oracleDataSource.getAddress());
oracleDataSource.appendDatabase(jdbcUrl);
Assert.assertEquals("jdbc:oracle:thin:@//127.0.0.1:1521/test", jdbcUrl.toString());
OracleDataSource oracleDataSource2 = new OracleDataSource();
oracleDataSource2.setAddress("jdbc:oracle:thin:@127.0.0.1:1521");
oracleDataSource2.setDatabase("orcl");
oracleDataSource2.setConnectType(DbConnectType.ORACLE_SID);
StringBuilder jdbcUrl2 = new StringBuilder(oracleDataSource2.getAddress());
oracleDataSource2.appendDatabase(jdbcUrl2);
Assert.assertEquals("jdbc:oracle:thin:@127.0.0.1:1521:orcl", jdbcUrl2.toString());
}
}

33
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java

@ -18,6 +18,8 @@
package org.apache.dolphinscheduler.server.worker.task.datax; package org.apache.dolphinscheduler.server.worker.task.datax;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Flag;
@ -25,11 +27,10 @@ import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.utils.DataxUtils;
@ -49,7 +50,6 @@ import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.PosixFilePermissions;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.ResultSetMetaData; import java.sql.ResultSetMetaData;
@ -235,10 +235,12 @@ public class DataxTask extends AbstractTask {
DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext(); DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext();
BaseDataSource dataSourceCfg = DataSourceFactory.getDatasource(DbType.of(dataxTaskExecutionContext.getSourcetype()), BaseConnectionParam dataSourceCfg = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.of(dataxTaskExecutionContext.getSourcetype()),
dataxTaskExecutionContext.getSourceConnectionParams()); dataxTaskExecutionContext.getSourceConnectionParams());
BaseDataSource dataTargetCfg = DataSourceFactory.getDatasource(DbType.of(dataxTaskExecutionContext.getTargetType()), BaseConnectionParam dataTargetCfg = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.of(dataxTaskExecutionContext.getTargetType()),
dataxTaskExecutionContext.getTargetConnectionParams()); dataxTaskExecutionContext.getTargetConnectionParams());
List<ObjectNode> readerConnArr = new ArrayList<>(); List<ObjectNode> readerConnArr = new ArrayList<>();
@ -250,15 +252,13 @@ public class DataxTask extends AbstractTask {
} }
ArrayNode urlArr = readerConn.putArray("jdbcUrl"); ArrayNode urlArr = readerConn.putArray("jdbcUrl");
for (String url : new String[]{dataSourceCfg.getJdbcUrl()}) { urlArr.add(DatasourceUtil.getJdbcUrl(DbType.valueOf(dataXParameters.getDtType()), dataSourceCfg));
urlArr.add(url);
}
readerConnArr.add(readerConn); readerConnArr.add(readerConn);
ObjectNode readerParam = JSONUtils.createObjectNode(); ObjectNode readerParam = JSONUtils.createObjectNode();
readerParam.put("username", dataSourceCfg.getUser()); readerParam.put("username", dataSourceCfg.getUser());
readerParam.put("password", dataSourceCfg.getPassword()); readerParam.put("password", CommonUtils.decodePassword(dataSourceCfg.getPassword()));
readerParam.putArray("connection").addAll(readerConnArr); readerParam.putArray("connection").addAll(readerConnArr);
ObjectNode reader = JSONUtils.createObjectNode(); ObjectNode reader = JSONUtils.createObjectNode();
@ -268,16 +268,14 @@ public class DataxTask extends AbstractTask {
List<ObjectNode> writerConnArr = new ArrayList<>(); List<ObjectNode> writerConnArr = new ArrayList<>();
ObjectNode writerConn = JSONUtils.createObjectNode(); ObjectNode writerConn = JSONUtils.createObjectNode();
ArrayNode tableArr = writerConn.putArray("table"); ArrayNode tableArr = writerConn.putArray("table");
for (String table : new String[]{dataXParameters.getTargetTable()}) { tableArr.add(dataXParameters.getTargetTable());
tableArr.add(table);
}
writerConn.put("jdbcUrl", dataTargetCfg.getJdbcUrl()); writerConn.put("jdbcUrl", DatasourceUtil.getJdbcUrl(DbType.valueOf(dataXParameters.getDsType()), dataTargetCfg));
writerConnArr.add(writerConn); writerConnArr.add(writerConn);
ObjectNode writerParam = JSONUtils.createObjectNode(); ObjectNode writerParam = JSONUtils.createObjectNode();
writerParam.put("username", dataTargetCfg.getUser()); writerParam.put("username", dataTargetCfg.getUser());
writerParam.put("password", dataTargetCfg.getPassword()); writerParam.put("password", CommonUtils.decodePassword(dataTargetCfg.getPassword()));
String[] columns = parsingSqlColumnNames(DbType.of(dataxTaskExecutionContext.getSourcetype()), String[] columns = parsingSqlColumnNames(DbType.of(dataxTaskExecutionContext.getSourcetype()),
DbType.of(dataxTaskExecutionContext.getTargetType()), DbType.of(dataxTaskExecutionContext.getTargetType()),
@ -436,7 +434,7 @@ public class DataxTask extends AbstractTask {
* @param sql sql for data synchronization * @param sql sql for data synchronization
* @return Keyword converted column names * @return Keyword converted column names
*/ */
private String[] parsingSqlColumnNames(DbType dsType, DbType dtType, BaseDataSource dataSourceCfg, String sql) { private String[] parsingSqlColumnNames(DbType dsType, DbType dtType, BaseConnectionParam dataSourceCfg, String sql) {
String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(dsType, sql); String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(dsType, sql);
if (columnNames == null || columnNames.length == 0) { if (columnNames == null || columnNames.length == 0) {
@ -527,14 +525,13 @@ public class DataxTask extends AbstractTask {
* @param sql sql for data synchronization * @param sql sql for data synchronization
* @return column name array * @return column name array
*/ */
public String[] tryExecuteSqlResolveColumnNames(BaseDataSource baseDataSource, String sql) { public String[] tryExecuteSqlResolveColumnNames(BaseConnectionParam baseDataSource, String sql) {
String[] columnNames; String[] columnNames;
sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql); sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql);
sql = sql.replace(";", ""); sql = sql.replace(";", "");
try ( try (
Connection connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), baseDataSource.getUser(), Connection connection = DatasourceUtil.getConnection(DbType.valueOf(dataXParameters.getDtType()), baseDataSource);
baseDataSource.getPassword());
PreparedStatement stmt = connection.prepareStatement(sql); PreparedStatement stmt = connection.prepareStatement(sql);
ResultSet resultSet = stmt.executeQuery()) { ResultSet resultSet = stmt.executeQuery()) {

160
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java

@ -14,31 +14,39 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.processdure; package org.apache.dolphinscheduler.server.worker.task.processdure;
import com.cronutils.utils.StringUtils;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.datasource.ConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Direct;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.slf4j.Logger;
import java.sql.*; import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import static org.apache.dolphinscheduler.common.enums.DataType.*; import org.slf4j.Logger;
/** /**
* procedure task * procedure task
@ -50,12 +58,6 @@ public class ProcedureTask extends AbstractTask {
*/ */
private ProcedureParameters procedureParameters; private ProcedureParameters procedureParameters;
/**
* base datasource
*/
private BaseDataSource baseDataSource;
/** /**
* taskExecutionContext * taskExecutionContext
*/ */
@ -98,17 +100,13 @@ public class ProcedureTask extends AbstractTask {
CallableStatement stmt = null; CallableStatement stmt = null;
try { try {
// load class // load class
DataSourceFactory.loadClass(DbType.valueOf(procedureParameters.getType())); DbType dbType = DbType.valueOf(procedureParameters.getType());
// get datasource // get datasource
baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(procedureParameters.getType()), ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(DbType.valueOf(procedureParameters.getType()),
taskExecutionContext.getProcedureTaskExecutionContext().getConnectionParams()); taskExecutionContext.getProcedureTaskExecutionContext().getConnectionParams());
// get jdbc connection // get jdbc connection
connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), connection = DatasourceUtil.getConnection(dbType, connectionParam);
baseDataSource.getUser(),
baseDataSource.getPassword());
@ -119,10 +117,9 @@ public class ProcedureTask extends AbstractTask {
CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), CommandType.of(taskExecutionContext.getCmdTypeIfComplement()),
taskExecutionContext.getScheduleTime()); taskExecutionContext.getScheduleTime());
Collection<Property> userDefParamsList = null; Collection<Property> userDefParamsList = null;
if (procedureParameters.getLocalParametersMap() != null){ if (procedureParameters.getLocalParametersMap() != null) {
userDefParamsList = procedureParameters.getLocalParametersMap().values(); userDefParamsList = procedureParameters.getLocalParametersMap().values();
} }
@ -139,7 +136,6 @@ public class ProcedureTask extends AbstractTask {
// outParameterMap // outParameterMap
Map<Integer, Property> outParameterMap = getOutParameterMap(stmt, paramsMap, userDefParamsList); Map<Integer, Property> outParameterMap = getOutParameterMap(stmt, paramsMap, userDefParamsList);
stmt.executeUpdate(); stmt.executeUpdate();
/** /**
@ -148,13 +144,12 @@ public class ProcedureTask extends AbstractTask {
printOutParameter(stmt, outParameterMap); printOutParameter(stmt, outParameterMap);
setExitStatusCode(Constants.EXIT_CODE_SUCCESS); setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
}catch (Exception e){ } catch (Exception e) {
setExitStatusCode(Constants.EXIT_CODE_FAILURE); setExitStatusCode(Constants.EXIT_CODE_FAILURE);
logger.error("procedure task error",e); logger.error("procedure task error", e);
throw e; throw e;
} } finally {
finally { close(stmt, connection);
close(stmt,connection);
} }
} }
@ -165,17 +160,17 @@ public class ProcedureTask extends AbstractTask {
*/ */
private String getCallMethod(Collection<Property> userDefParamsList) { private String getCallMethod(Collection<Property> userDefParamsList) {
String method;// no parameters String method;// no parameters
if (CollectionUtils.isEmpty(userDefParamsList)){ if (CollectionUtils.isEmpty(userDefParamsList)) {
method = "{call " + procedureParameters.getMethod() + "}"; method = "{call " + procedureParameters.getMethod() + "}";
}else { // exists parameters } else { // exists parameters
int size = userDefParamsList.size(); int size = userDefParamsList.size();
StringBuilder parameter = new StringBuilder(); StringBuilder parameter = new StringBuilder();
parameter.append("("); parameter.append("(");
for (int i = 0 ;i < size - 1; i++){ for (int i = 0; i < size - 1; i++) {
parameter.append("?,"); parameter.append("?,");
} }
parameter.append("?)"); parameter.append("?)");
method = "{call " + procedureParameters.getMethod() + parameter.toString()+ "}"; method = "{call " + procedureParameters.getMethod() + parameter.toString() + "}";
} }
return method; return method;
} }
@ -189,7 +184,7 @@ public class ProcedureTask extends AbstractTask {
private void printOutParameter(CallableStatement stmt, private void printOutParameter(CallableStatement stmt,
Map<Integer, Property> outParameterMap) throws SQLException { Map<Integer, Property> outParameterMap) throws SQLException {
Iterator<Map.Entry<Integer, Property>> iter = outParameterMap.entrySet().iterator(); Iterator<Map.Entry<Integer, Property>> iter = outParameterMap.entrySet().iterator();
while (iter.hasNext()){ while (iter.hasNext()) {
Map.Entry<Integer, Property> en = iter.next(); Map.Entry<Integer, Property> en = iter.next();
int index = en.getKey(); int index = en.getKey();
@ -214,21 +209,21 @@ public class ProcedureTask extends AbstractTask {
Map<String, Property> paramsMap, Map<String, Property> paramsMap,
Collection<Property> userDefParamsList) throws Exception { Collection<Property> userDefParamsList) throws Exception {
Map<Integer,Property> outParameterMap = new HashMap<>(); Map<Integer,Property> outParameterMap = new HashMap<>();
if (userDefParamsList != null && userDefParamsList.size() > 0){ if (userDefParamsList != null && userDefParamsList.size() > 0) {
int index = 1; int index = 1;
for (Property property : userDefParamsList){ for (Property property : userDefParamsList) {
logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}"
,property.getProp(), , property.getProp(),
property.getDirect(), property.getDirect(),
property.getType(), property.getType(),
property.getValue()); property.getValue());
// set parameters // set parameters
if (property.getDirect().equals(Direct.IN)){ if (property.getDirect().equals(Direct.IN)) {
ParameterUtils.setInParameter(index, stmt, property.getType(), paramsMap.get(property.getProp()).getValue()); ParameterUtils.setInParameter(index, stmt, property.getType(), paramsMap.get(property.getProp()).getValue());
}else if (property.getDirect().equals(Direct.OUT)){ } else if (property.getDirect().equals(Direct.OUT)) {
setOutParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); setOutParameter(index, stmt, property.getType(), paramsMap.get(property.getProp()).getValue());
property.setValue(paramsMap.get(property.getProp()).getValue()); property.setValue(paramsMap.get(property.getProp()).getValue());
outParameterMap.put(index,property); outParameterMap.put(index, property);
} }
index++; index++;
} }
@ -244,7 +239,7 @@ public class ProcedureTask extends AbstractTask {
private void setTimeout(CallableStatement stmt) throws SQLException { private void setTimeout(CallableStatement stmt) throws SQLException {
Boolean failed = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED; Boolean failed = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED;
Boolean warnfailed = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED; Boolean warnfailed = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED;
if(failed || warnfailed){ if (failed || warnfailed) {
stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout()); stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout());
} }
} }
@ -256,7 +251,7 @@ public class ProcedureTask extends AbstractTask {
* @param connection * @param connection
*/ */
private void close(PreparedStatement stmt, private void close(PreparedStatement stmt,
Connection connection){ Connection connection) {
if (stmt != null) { if (stmt != null) {
try { try {
stmt.close(); stmt.close();
@ -282,15 +277,15 @@ public class ProcedureTask extends AbstractTask {
* @throws SQLException * @throws SQLException
*/ */
private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException {
switch (dataType){ switch (dataType) {
case VARCHAR: case VARCHAR:
logger.info("out prameter varchar key : {} , value : {}",prop,stmt.getString(index)); logger.info("out prameter varchar key : {} , value : {}", prop, stmt.getString(index));
break; break;
case INTEGER: case INTEGER:
logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index)); logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index));
break; break;
case LONG: case LONG:
logger.info("out prameter long key : {} , value : {}",prop,stmt.getLong(index)); logger.info("out prameter long key : {} , value : {}", prop, stmt.getLong(index));
break; break;
case FLOAT: case FLOAT:
logger.info("out prameter float key : {} , value : {}",prop,stmt.getFloat(index)); logger.info("out prameter float key : {} , value : {}",prop,stmt.getFloat(index));
@ -322,72 +317,73 @@ public class ProcedureTask extends AbstractTask {
/** /**
* set out parameter * set out parameter
*
* @param index index * @param index index
* @param stmt stmt * @param stmt stmt
* @param dataType dataType * @param dataType dataType
* @param value value * @param value value
* @throws Exception exception * @throws Exception exception
*/ */
private void setOutParameter(int index,CallableStatement stmt,DataType dataType,String value)throws Exception{ private void setOutParameter(int index, CallableStatement stmt, DataType dataType, String value) throws Exception {
if (dataType.equals(VARCHAR)){ if (dataType.equals(DataType.VARCHAR)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.VARCHAR); stmt.registerOutParameter(index, Types.VARCHAR);
}else { } else {
stmt.registerOutParameter(index, Types.VARCHAR, value); stmt.registerOutParameter(index, Types.VARCHAR, value);
} }
}else if (dataType.equals(INTEGER)){ } else if (dataType.equals(DataType.INTEGER)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.INTEGER); stmt.registerOutParameter(index, Types.INTEGER);
}else { } else {
stmt.registerOutParameter(index, Types.INTEGER, value); stmt.registerOutParameter(index, Types.INTEGER, value);
} }
}else if (dataType.equals(LONG)){ } else if (dataType.equals(DataType.LONG)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index,Types.INTEGER); stmt.registerOutParameter(index, Types.INTEGER);
}else { } else {
stmt.registerOutParameter(index,Types.INTEGER ,value); stmt.registerOutParameter(index, Types.INTEGER, value);
} }
}else if (dataType.equals(FLOAT)){ } else if (dataType.equals(DataType.FLOAT)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.FLOAT); stmt.registerOutParameter(index, Types.FLOAT);
}else { } else {
stmt.registerOutParameter(index, Types.FLOAT,value); stmt.registerOutParameter(index, Types.FLOAT, value);
} }
}else if (dataType.equals(DOUBLE)){ } else if (dataType.equals(DataType.DOUBLE)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.DOUBLE); stmt.registerOutParameter(index, Types.DOUBLE);
}else { } else {
stmt.registerOutParameter(index, Types.DOUBLE , value); stmt.registerOutParameter(index, Types.DOUBLE, value);
} }
}else if (dataType.equals(DATE)){ } else if (dataType.equals(DataType.DATE)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.DATE); stmt.registerOutParameter(index, Types.DATE);
}else { } else {
stmt.registerOutParameter(index, Types.DATE , value); stmt.registerOutParameter(index, Types.DATE, value);
} }
}else if (dataType.equals(TIME)){ } else if (dataType.equals(DataType.TIME)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.TIME); stmt.registerOutParameter(index, Types.TIME);
}else { } else {
stmt.registerOutParameter(index, Types.TIME , value); stmt.registerOutParameter(index, Types.TIME, value);
} }
}else if (dataType.equals(TIMESTAMP)){ } else if (dataType.equals(DataType.TIMESTAMP)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.TIMESTAMP); stmt.registerOutParameter(index, Types.TIMESTAMP);
}else { } else {
stmt.registerOutParameter(index, Types.TIMESTAMP , value); stmt.registerOutParameter(index, Types.TIMESTAMP, value);
} }
}else if (dataType.equals(BOOLEAN)){ } else if (dataType.equals(DataType.BOOLEAN)) {
if (StringUtils.isEmpty(value)){ if (StringUtils.isEmpty(value)) {
stmt.registerOutParameter(index, Types.BOOLEAN); stmt.registerOutParameter(index, Types.BOOLEAN);
}else { } else {
stmt.registerOutParameter(index, Types.BOOLEAN , value); stmt.registerOutParameter(index, Types.BOOLEAN, value);
} }
} }
} }

13
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java

@ -18,6 +18,8 @@
package org.apache.dolphinscheduler.server.worker.task.sql; package org.apache.dolphinscheduler.server.worker.task.sql;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.Direct;
@ -32,8 +34,6 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand; import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand;
import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
@ -79,7 +79,7 @@ public class SqlTask extends AbstractTask {
/** /**
* base datasource * base datasource
*/ */
private BaseDataSource baseDataSource; private BaseConnectionParam baseConnectionParam;
/** /**
* taskExecutionContext * taskExecutionContext
@ -129,7 +129,8 @@ public class SqlTask extends AbstractTask {
SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext(); SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext();
// get datasource // get datasource
baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(sqlParameters.getType()), baseConnectionParam = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.valueOf(sqlParameters.getType()),
sqlTaskExecutionContext.getConnectionParams()); sqlTaskExecutionContext.getConnectionParams());
// ready to execute SQL and parameter entity Map // ready to execute SQL and parameter entity Map
@ -244,10 +245,8 @@ public class SqlTask extends AbstractTask {
ResultSet resultSet = null; ResultSet resultSet = null;
try { try {
baseDataSource.setConnParams(sqlParameters.getConnParams());
// create connection // create connection
connection = baseDataSource.getConnection(); connection = DatasourceUtil.getConnection(DbType.valueOf(sqlParameters.getType()), baseConnectionParam);
// create temp function // create temp function
if (CollectionUtils.isNotEmpty(createFuncs)) { if (CollectionUtils.isNotEmpty(createFuncs)) {
createTempFunction(connection, createFuncs); createTempFunction(connection, createFuncs);

14
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java

@ -18,15 +18,16 @@
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.SqoopQueryType; import org.apache.dolphinscheduler.common.enums.SqoopQueryType;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
@ -54,17 +55,20 @@ public class MysqlSourceGenerator implements ISourceGenerator {
SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext();
if (null != sourceMysqlParameter) { if (null != sourceMysqlParameter) {
BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getSourcetype()), BaseConnectionParam baseDataSource = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.of(sqoopTaskExecutionContext.getSourcetype()),
sqoopTaskExecutionContext.getSourceConnectionParams()); sqoopTaskExecutionContext.getSourceConnectionParams());
if (null != baseDataSource) { if (null != baseDataSource) {
mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT) mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT)
.append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getJdbcUrl()).append(Constants.DOUBLE_QUOTES) .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES)
.append(DatasourceUtil.getJdbcUrl(DbType.MYSQL, baseDataSource)).append(Constants.DOUBLE_QUOTES)
.append(Constants.SPACE).append(SqoopConstants.DB_USERNAME) .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME)
.append(Constants.SPACE).append(baseDataSource.getUser()) .append(Constants.SPACE).append(baseDataSource.getUser())
.append(Constants.SPACE).append(SqoopConstants.DB_PWD) .append(Constants.SPACE).append(SqoopConstants.DB_PWD)
.append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES); .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES)
.append(CommonUtils.decodePassword(baseDataSource.getPassword())).append(Constants.DOUBLE_QUOTES);
//sqoop table & sql query //sqoop table & sql query
if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) { if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {

14
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java

@ -18,13 +18,14 @@
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
@ -54,17 +55,20 @@ public class MysqlTargetGenerator implements ITargetGenerator {
if (null != targetMysqlParameter && targetMysqlParameter.getTargetDatasource() != 0) { if (null != targetMysqlParameter && targetMysqlParameter.getTargetDatasource() != 0) {
// get datasource // get datasource
BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getTargetType()), BaseConnectionParam baseDataSource = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
DbType.of(sqoopTaskExecutionContext.getTargetType()),
sqoopTaskExecutionContext.getTargetConnectionParams()); sqoopTaskExecutionContext.getTargetConnectionParams());
if (null != baseDataSource) { if (null != baseDataSource) {
mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT) mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT)
.append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getJdbcUrl()).append(Constants.DOUBLE_QUOTES) .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES)
.append(DatasourceUtil.getJdbcUrl(DbType.MYSQL, baseDataSource)).append(Constants.DOUBLE_QUOTES)
.append(Constants.SPACE).append(SqoopConstants.DB_USERNAME) .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME)
.append(Constants.SPACE).append(baseDataSource.getUser()) .append(Constants.SPACE).append(baseDataSource.getUser())
.append(Constants.SPACE).append(SqoopConstants.DB_PWD) .append(Constants.SPACE).append(SqoopConstants.DB_PWD)
.append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES) .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES)
.append(CommonUtils.decodePassword(baseDataSource.getPassword())).append(Constants.DOUBLE_QUOTES)
.append(Constants.SPACE).append(SqoopConstants.TABLE) .append(Constants.SPACE).append(SqoopConstants.TABLE)
.append(Constants.SPACE).append(targetMysqlParameter.getTargetTable()); .append(Constants.SPACE).append(targetMysqlParameter.getTargetTable());

9
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java

@ -19,11 +19,11 @@ package org.apache.dolphinscheduler.server.worker.task.datax;
import static org.apache.dolphinscheduler.common.enums.CommandType.START_PROCESS; import static org.apache.dolphinscheduler.common.enums.CommandType.START_PROCESS;
import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext;
@ -253,10 +253,11 @@ public class DataxTaskTest {
public void testParsingSqlColumnNames() public void testParsingSqlColumnNames()
throws Exception { throws Exception {
try { try {
BaseDataSource dataSource = DataSourceFactory.getDatasource(getDataSource().getType(), BaseConnectionParam dataSource = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(
getDataSource().getType(),
getDataSource().getConnectionParams()); getDataSource().getConnectionParams());
Method method = DataxTask.class.getDeclaredMethod("parsingSqlColumnNames", DbType.class, DbType.class, BaseDataSource.class, String.class); Method method = DataxTask.class.getDeclaredMethod("parsingSqlColumnNames", DbType.class, DbType.class, BaseConnectionParam.class, String.class);
method.setAccessible(true); method.setAccessible(true);
String[] columns = (String[]) method.invoke(dataxTask, DbType.MYSQL, DbType.MYSQL, dataSource, "select 1 as a, 2 as `table` from dual"); String[] columns = (String[]) method.invoke(dataxTask, DbType.MYSQL, DbType.MYSQL, dataSource, "select 1 as a, 2 as `table` from dual");

11
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTaskTest.java

@ -18,9 +18,9 @@
package org.apache.dolphinscheduler.server.worker.task.sql; package org.apache.dolphinscheduler.server.worker.task.sql;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.datasource.DatasourceUtil;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand; import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand;
import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
@ -29,7 +29,6 @@ import org.apache.dolphinscheduler.service.alert.AlertClientService;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.ResultSetMetaData; import java.sql.ResultSetMetaData;
@ -51,8 +50,8 @@ import org.slf4j.LoggerFactory;
* sql task test * sql task test
*/ */
@RunWith(PowerMockRunner.class) @RunWith(PowerMockRunner.class)
@PrepareForTest(value = {SqlTask.class, DriverManager.class, SpringApplicationContext.class, @PrepareForTest(value = {SqlTask.class, DatasourceUtil.class, SpringApplicationContext.class,
ParameterUtils.class, AlertSendResponseCommand.class, BaseDataSource.class}) ParameterUtils.class, AlertSendResponseCommand.class})
public class SqlTaskTest { public class SqlTaskTest {
private static final Logger logger = LoggerFactory.getLogger(SqlTaskTest.class); private static final Logger logger = LoggerFactory.getLogger(SqlTaskTest.class);
@ -110,12 +109,12 @@ public class SqlTaskTest {
@Test @Test
public void testHandle() throws Exception { public void testHandle() throws Exception {
Connection connection = PowerMockito.mock(Connection.class); Connection connection = PowerMockito.mock(Connection.class);
PowerMockito.mockStatic(DriverManager.class);
PowerMockito.when(DriverManager.getConnection(Mockito.any(), Mockito.any(), Mockito.any())).thenReturn(connection);
PreparedStatement preparedStatement = PowerMockito.mock(PreparedStatement.class); PreparedStatement preparedStatement = PowerMockito.mock(PreparedStatement.class);
PowerMockito.when(connection.prepareStatement(Mockito.any())).thenReturn(preparedStatement); PowerMockito.when(connection.prepareStatement(Mockito.any())).thenReturn(preparedStatement);
PowerMockito.mockStatic(ParameterUtils.class); PowerMockito.mockStatic(ParameterUtils.class);
PowerMockito.when(ParameterUtils.replaceScheduleTime(Mockito.any(), Mockito.any())).thenReturn("insert into tb_1 values('1','2')"); PowerMockito.when(ParameterUtils.replaceScheduleTime(Mockito.any(), Mockito.any())).thenReturn("insert into tb_1 values('1','2')");
PowerMockito.mockStatic(DatasourceUtil.class);
PowerMockito.when(DatasourceUtil.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection);
sqlTask.handle(); sqlTask.handle();
Assert.assertEquals(Constants.EXIT_CODE_SUCCESS, sqlTask.getExitStatusCode()); Assert.assertEquals(Constants.EXIT_CODE_SUCCESS, sqlTask.getExitStatusCode());

4
dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue

@ -312,7 +312,7 @@
userName: this.userName, userName: this.userName,
password: this.password, password: this.password,
connectType: this.connectType, connectType: this.connectType,
other: this.other other: this.other === '' ? null : JSON.parse(this.other)
} }
}, },
/** /**
@ -424,7 +424,7 @@
this.userName = res.userName this.userName = res.userName
this.password = res.password this.password = res.password
this.connectType = res.connectType this.connectType = res.connectType
this.other = JSON.stringify(res.other) === '{}' ? '' : JSON.stringify(res.other) this.other = res.other === null ? '' : JSON.stringify(res.other)
}).catch(e => { }).catch(e => {
this.$message.error(e.msg || '') this.$message.error(e.msg || '')
}) })

12
dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js

@ -29,7 +29,9 @@ export default {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
io.post('datasources/create', payload, res => { io.post('datasources/create', payload, res => {
resolve(res) resolve(res)
}).catch(e => { }, () => {
// do nothing
}, { emulateJSON: false }).catch(e => {
reject(e) reject(e)
}) })
}) })
@ -42,7 +44,9 @@ export default {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
io.post('datasources/connect', payload, res => { io.post('datasources/connect', payload, res => {
resolve(res) resolve(res)
}).catch(e => { }, () => {
// do nothing
}, { emulateJSON: false }).catch(e => {
reject(e) reject(e)
}) })
}) })
@ -94,7 +98,9 @@ export default {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
io.post('datasources/update', payload, res => { io.post('datasources/update', payload, res => {
resolve(res) resolve(res)
}).catch(e => { }, () => {
// do nothing
}, { emulateJSON: false }).catch(e => {
reject(e) reject(e)
}) })
}) })

10
pom.xml

@ -891,6 +891,16 @@
<include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include>
<include>**/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java</include>
<include>**/common/datasource/db2/Db2DatasourceProcessorTest.java</include>
<include>**/common/datasource/hive/HiveDatasourceProcessorTest.java</include>
<include>**/common/datasource/mysql/MysqlDatasourceProcessorTest.java</include>
<include>**/common/datasource/oracle/OracleDatasourceProcessorTest.java</include>
<include>**/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java</include>
<include>**/common/datasource/presto/PrestoDatasourceProcessorTest.java</include>
<include>**/common/datasource/spark/SparkDatasourceProcessorTest.java</include>
<include>**/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java</include>
<include>**/common/datasource/DatasourceUtilTest.java</include>
<include>**/common/enums/ExecutionStatusTest</include> <include>**/common/enums/ExecutionStatusTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include>

Loading…
Cancel
Save