Browse Source

Merge branch 'dev' into removeUnnecessaryLock

pull/2/head
Rubik-W 4 years ago committed by GitHub
parent
commit
c0ddadef4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .github/workflows/ci_e2e.yml
  2. 2
      .github/workflows/ci_ut.yml
  3. 2
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java
  4. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  5. 16
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  6. 408
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  7. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  8. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java
  9. 3
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  10. 21
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/exportprocess/DataSourceParam.java
  11. 40
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/exportprocess/DependentParam.java
  12. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/exportprocess/ProcessAddTaskParam.java
  13. 5
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java
  14. 8
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  15. 19
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/exportprocess/DataSourceParamTest.java
  16. 34
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/exportprocess/DependentParamTest.java
  17. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  18. 20
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java
  19. 16
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java
  20. 17
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java
  21. 41
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopJobType.java
  22. 13
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java
  23. 27
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java
  24. 99
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java
  25. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java
  26. 59
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
  27. 388
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java
  28. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java
  29. 36
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StreamUtils.java
  30. 14
      dolphinscheduler-common/src/main/resources/common.properties
  31. 91
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/HttpParametersTest.java
  32. 6
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java
  33. 36
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java
  34. 8
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java
  35. 39
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StreamUtilsTest.java
  36. 101
      dolphinscheduler-microbench/pom.xml
  37. 123
      dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java
  38. 112
      dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/EnumBenchMark.java
  39. 32
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
  40. 18
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java
  41. 9
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java
  42. 39
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java
  43. 22
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java
  44. 14
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
  45. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
  46. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java
  47. 208
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTaskTest.java
  48. 126
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
  49. 2
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue
  50. 67
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue
  51. 58
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue
  52. 962
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue
  53. 16
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  54. 16
      dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
  55. 34
      e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java
  56. 36
      e2e/src/test/java/org/apache/dolphinscheduler/data/security/AlertManageData.java
  57. 42
      e2e/src/test/java/org/apache/dolphinscheduler/data/security/QueueManageData.java
  58. 24
      e2e/src/test/java/org/apache/dolphinscheduler/data/security/TokenManageData.java
  59. 35
      e2e/src/test/java/org/apache/dolphinscheduler/locator/security/AlertManageLocator.java
  60. 31
      e2e/src/test/java/org/apache/dolphinscheduler/locator/security/QueueManageLocator.java
  61. 41
      e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TokenManageLocator.java
  62. 77
      e2e/src/test/java/org/apache/dolphinscheduler/page/security/AlertManagePage.java
  63. 85
      e2e/src/test/java/org/apache/dolphinscheduler/page/security/QueueManagePage.java
  64. 94
      e2e/src/test/java/org/apache/dolphinscheduler/page/security/TokenManagePage.java
  65. 4
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/TestLogin.java
  66. 42
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteAlert.java
  67. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteProject.java
  68. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteTenant.java
  69. 42
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteToken.java
  70. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteUser.java
  71. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteWorkflow.java
  72. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testProject/TestCreateProject.java
  73. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testProject/TestCreateWorkflow.java
  74. 36
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestAlertManage.java
  75. 42
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestQueueManage.java
  76. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTenantManage.java
  77. 43
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTokenManage.java
  78. 6
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestUserManage.java
  79. 28
      e2e/testng.xml
  80. 4
      pom.xml

2
.github/workflows/ci_e2e.yml

@ -69,6 +69,6 @@ jobs:
uses: actions/upload-artifact@v1
with:
name: dslogs
path: /var/lib/docker/volumes/docker-swarm_dolphinscheduler-logs/_data
path: /var/lib/docker/volumes/dolphinscheduler-logs/_data

2
.github/workflows/ci_ut.yml

@ -78,7 +78,7 @@ jobs:
-Dsonar.core.codeCoveragePlugin=jacoco
-Dsonar.projectKey=apache-dolphinscheduler
-Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682
-Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js
-Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

2
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.alert.utils;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
@ -109,4 +110,5 @@ public class JSONUtilsTest {
}
}

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -218,7 +218,7 @@ public enum Status {
DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid", "流程节点[%s]参数无效"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node {0} parameter invalid", "流程节点[{0}]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"),

16
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -21,6 +21,7 @@ import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
@ -159,8 +160,18 @@ public class DataSourceService extends BaseService{
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
//check password,if the password is not updated, set to the old password.
JSONObject paramObject = JSON.parseObject(parameter);
String password = paramObject.getString(Constants.PASSWORD);
if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams();
JSONObject oldParams = JSON.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.getString(Constants.PASSWORD));
}
// connectionParams json
String connectionParams = paramObject.toJSONString();
Boolean isConnection = checkConnection(type, parameter);
Boolean isConnection = checkConnection(type, connectionParams);
if (!isConnection) {
logger.info("connect failed, type:{}, parameter:{}", type, parameter);
putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
@ -172,7 +183,7 @@ public class DataSourceService extends BaseService{
dataSource.setNote(desc);
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(parameter);
dataSource.setConnectionParams(connectionParams);
dataSource.setUpdateTime(now);
dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS);
@ -257,7 +268,6 @@ public class DataSourceService extends BaseService{
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser());
map.put(PASSWORD, datasourceForm.getPassword());
map.put(OTHER, otherMap);
result.put(Constants.DATA_LIST, map);
putMsg(result, Status.SUCCESS);

408
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

@ -17,11 +17,12 @@
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
@ -73,6 +74,11 @@ public class ProcessDefinitionService extends BaseDAGService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionService.class);
private static final String PROCESSDEFINITIONID = "processDefinitionId";
private static final String RELEASESTATE = "releaseState";
private static final String TASKS = "tasks";
@Autowired
private ProjectMapper projectMapper;
@ -99,13 +105,13 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
* @throws JsonProcessingException JsonProcessingException
*/
@ -159,30 +165,34 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
processDefineMapper.insert(processDefine);
// return processDefinition object with ID
result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId()));
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",processDefine.getId());
result.put("processDefinitionId", processDefine.getId());
return result;
}
/**
* get resource ids
*
* @param processData process data
* @return resource ids
*/
private String getResourceIds(ProcessData processData) {
List<TaskNode> tasks = processData.getTasks();
Set<Integer> resourceIds = new HashSet<>();
for(TaskNode taskNode : tasks){
for (TaskNode taskNode : tasks) {
String taskParameter = taskNode.getParams();
AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(),taskParameter);
AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter);
if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) {
Set<Integer> tempSet = params.getResourceFilesList().stream().map(t->t.getId()).collect(Collectors.toSet());
Set<Integer> tempSet = params.getResourceFilesList().stream().map(t -> t.getId()).collect(Collectors.toSet());
resourceIds.addAll(tempSet);
}
}
StringBuilder sb = new StringBuilder();
for(int i : resourceIds) {
for (int i : resourceIds) {
if (sb.length() > 0) {
sb.append(",");
}
@ -195,7 +205,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* query process definition list
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
@ -221,12 +231,12 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* query process definition list paging
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
public Map<String, Object> queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) {
@ -242,10 +252,10 @@ public class ProcessDefinitionService extends BaseDAGService {
Page<ProcessDefinition> page = new Page(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefineMapper.queryDefineListPaging(
page, searchVal, userId, project.getId(),isAdmin(loginUser));
page, searchVal, userId, project.getId(), isAdmin(loginUser));
PageInfo pageInfo = new PageInfo<ProcessData>(pageNo, pageSize);
pageInfo.setTotalCount((int)processDefinitionIPage.getTotal());
pageInfo.setTotalCount((int) processDefinitionIPage.getTotal());
pageInfo.setLists(processDefinitionIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
@ -256,9 +266,9 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* query datail of process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processId process definition id
* @return process definition detail
*/
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
@ -286,12 +296,12 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* copy process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processId process definition id
* @return copy result code
*/
public Map<String, Object> copyProcessDefinition(User loginUser, String projectName, Integer processId) throws JsonProcessingException{
public Map<String, Object> copyProcessDefinition(User loginUser, String projectName, Integer processId) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
@ -310,7 +320,7 @@ public class ProcessDefinitionService extends BaseDAGService {
return createProcessDefinition(
loginUser,
projectName,
processDefinition.getName()+"_copy_"+System.currentTimeMillis(),
processDefinition.getName() + "_copy_" + System.currentTimeMillis(),
processDefinition.getProcessDefinitionJson(),
processDefinition.getDescription(),
processDefinition.getLocations(),
@ -321,14 +331,14 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
public Map<String, Object> updateProcessDefinition(User loginUser, String projectName, int id, String name,
@ -397,9 +407,9 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* verify process definition name unique
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
public Map<String, Object> verifyProcessDefinitionName(User loginUser, String projectName, String name) {
@ -424,8 +434,8 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@ -456,22 +466,22 @@ public class ProcessDefinitionService extends BaseDAGService {
// check process definition is already online
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE,processDefinitionId);
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, processDefinitionId);
return result;
}
// get the timing according to the process definition
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (!schedules.isEmpty() && schedules.size() > 1) {
logger.warn("scheduler num is {},Greater than 1",schedules.size());
logger.warn("scheduler num is {},Greater than 1", schedules.size());
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR);
return result;
}else if(schedules.size() == 1){
} else if (schedules.size() == 1) {
Schedule schedule = schedules.get(0);
if(schedule.getReleaseState() == ReleaseState.OFFLINE){
if (schedule.getReleaseState() == ReleaseState.OFFLINE) {
scheduleMapper.deleteById(schedule.getId());
}else if(schedule.getReleaseState() == ReleaseState.ONLINE){
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId());
} else if (schedule.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId());
return result;
}
}
@ -489,9 +499,9 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param releaseState release state
* @return release result code
*/
@ -510,7 +520,7 @@ public class ProcessDefinitionService extends BaseDAGService {
// check state
if (null == state) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
@ -522,12 +532,12 @@ public class ProcessDefinitionService extends BaseDAGService {
String resourceIds = processDefinition.getResourceIds();
if (StringUtils.isNotBlank(resourceIds)) {
Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,processService,resourceIdArray,loginUser.getId(),logger);
PermissionCheck<Integer> permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE_ID, processService, resourceIdArray, loginUser.getId(), logger);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, "releaseState");
logger.error(e.getMessage(), e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, RELEASESTATE);
return result;
}
}
@ -542,7 +552,7 @@ public class ProcessDefinitionService extends BaseDAGService {
new int[]{processDefinition.getId()}
);
for(Schedule schedule:scheduleList){
for (Schedule schedule : scheduleList) {
logger.info("set schedule offline, project id: {}, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id);
// set status
schedule.setReleaseState(ReleaseState.OFFLINE);
@ -551,7 +561,7 @@ public class ProcessDefinitionService extends BaseDAGService {
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
@ -561,14 +571,15 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* batch export process definition by ids
*
* @param loginUser
* @param projectName
* @param processDefinitionIds
* @param response
*/
public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response){
public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) {
if(StringUtils.isEmpty(processDefinitionIds)){
if (StringUtils.isEmpty(processDefinitionIds)) {
return;
}
@ -579,24 +590,25 @@ public class ProcessDefinitionService extends BaseDAGService {
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if(resultStatus != Status.SUCCESS){
if (resultStatus != Status.SUCCESS) {
return;
}
List<ProcessMeta> processDefinitionList =
getProcessDefinitionList(processDefinitionIds);
if(CollectionUtils.isNotEmpty(processDefinitionList)){
if (CollectionUtils.isNotEmpty(processDefinitionList)) {
downloadProcessDefinitionFile(response, processDefinitionList);
}
}
/**
* get process definition list by ids
*
* @param processDefinitionIds
* @return
*/
private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds){
private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds) {
List<ProcessMeta> processDefinitionList = new ArrayList<>();
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId : processDefinitionIdArray) {
@ -613,6 +625,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* download the process definition file
*
* @param response
* @param processDefinitionList
*/
@ -628,7 +641,7 @@ public class ProcessDefinitionService extends BaseDAGService {
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
}finally {
} finally {
if (null != buff) {
try {
buff.close();
@ -648,19 +661,21 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @param processDefinition process definition
* @return export process metadata string
*/
public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId,processDefinition));
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition));
}
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @param processDefinition process definition
* @return export process metadata string
*/
public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) {
@ -696,17 +711,18 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* correct task param which has datasource or dependent
*
* @param processDefinitionJson processDefinitionJson
* @return correct processDefinitionJson
*/
public String addExportTaskNodeSpecialParam(String processDefinitionJson) {
JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson);
JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.path(TASKS);
for (int i = 0; i < jsonArray.size(); i++) {
JSONObject taskNode = jsonArray.getJSONObject(i);
if (StringUtils.isNotEmpty(taskNode.getString("type"))) {
String taskType = taskNode.getString("type");
JsonNode taskNode = jsonArray.path(i);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
if (null != addTaskParam) {
@ -714,12 +730,13 @@ public class ProcessDefinitionService extends BaseDAGService {
}
}
}
jsonObject.put("tasks", jsonArray);
jsonObject.set(TASKS, jsonArray);
return jsonObject.toString();
}
/**
* check task if has sub process
*
* @param taskType task type
* @return if task has sub process return true else false
*/
@ -729,8 +746,9 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* import process definition
* @param loginUser login user
* @param file process metadata json file
*
* @param loginUser login user
* @param file process metadata json file
* @param currentProjectName current project name
* @return import process
*/
@ -738,7 +756,7 @@ public class ProcessDefinitionService extends BaseDAGService {
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(5);
String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSON.parseArray(processMetaJson,ProcessMeta.class);
List<ProcessMeta> processMetaList = JSON.parseArray(processMetaJson, ProcessMeta.class);
//check file content
if (CollectionUtils.isEmpty(processMetaList)) {
@ -746,9 +764,9 @@ public class ProcessDefinitionService extends BaseDAGService {
return result;
}
for(ProcessMeta processMeta:processMetaList){
for (ProcessMeta processMeta : processMetaList) {
if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)){
if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)) {
return result;
}
}
@ -758,6 +776,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* check and import process definition
*
* @param loginUser
* @param currentProjectName
* @param result
@ -766,7 +785,7 @@ public class ProcessDefinitionService extends BaseDAGService {
*/
private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map<String, Object> result, ProcessMeta processMeta) {
if(!checkImportanceParams(processMeta,result)){
if (!checkImportanceParams(processMeta, result)) {
return false;
}
@ -774,7 +793,7 @@ public class ProcessDefinitionService extends BaseDAGService {
String processDefinitionName = processMeta.getProcessDefinitionName();
//use currentProjectName to query
Project targetProject = projectMapper.queryByName(currentProjectName);
if(null != targetProject){
if (null != targetProject) {
processDefinitionName = recursionProcessDefinitionName(targetProject.getId(),
processDefinitionName, 1);
}
@ -798,14 +817,14 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefinitionName,
addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject));
if(createProcessResult == null){
if (createProcessResult == null) {
return false;
}
//create process definition
Integer processDefinitionId =
Objects.isNull(createProcessResult.get("processDefinitionId"))?
null:Integer.parseInt(createProcessResult.get("processDefinitionId").toString());
Objects.isNull(createProcessResult.get(PROCESSDEFINITIONID)) ?
null : Integer.parseInt(createProcessResult.get(PROCESSDEFINITIONID).toString());
//scheduler param
return getImportProcessScheduleResult(loginUser,
@ -819,6 +838,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* get create process result
*
* @param loginUser
* @param currentProjectName
* @param result
@ -832,12 +852,12 @@ public class ProcessDefinitionService extends BaseDAGService {
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
String importProcessParam){
String importProcessParam) {
Map<String, Object> createProcessResult = null;
try {
createProcessResult = createProcessDefinition(loginUser
,currentProjectName,
processDefinitionName+"_import_"+System.currentTimeMillis(),
, currentProjectName,
processDefinitionName + "_import_" + System.currentTimeMillis(),
importProcessParam,
processMeta.getProcessDefinitionDescription(),
processMeta.getProcessDefinitionLocations(),
@ -853,6 +873,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* get import process schedule result
*
* @param loginUser
* @param currentProjectName
* @param result
@ -884,11 +905,12 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* check importance params
*
* @param processMeta
* @param result
* @return
*/
private boolean checkImportanceParams(ProcessMeta processMeta,Map<String, Object> result){
private boolean checkImportanceParams(ProcessMeta processMeta, Map<String, Object> result) {
if (StringUtils.isEmpty(processMeta.getProjectName())) {
putMsg(result, Status.DATA_IS_NULL, "projectName");
return false;
@ -907,18 +929,19 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* import process add special task param
* @param loginUser login user
*
* @param loginUser login user
* @param processDefinitionJson process definition json
* @param targetProject target project
* @param targetProject target project
* @return import process param
*/
private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) {
JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson);
JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.get(TASKS);
//add sql and dependent param
for (int i = 0; i < jsonArray.size(); i++) {
JSONObject taskNode = jsonArray.getJSONObject(i);
String taskType = taskNode.getString("type");
JsonNode taskNode = jsonArray.path(i);
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
if (null != addTaskParam) {
addTaskParam.addImportSpecialParam(taskNode);
@ -928,25 +951,26 @@ public class ProcessDefinitionService extends BaseDAGService {
//recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
List<Object> subProcessList = jsonArray.stream()
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).getString("type")))
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, jsonArray, subProcessIdMap);
}
jsonObject.put("tasks", jsonArray);
jsonObject.set(TASKS, jsonArray);
return jsonObject.toString();
}
/**
* import process schedule
* @param loginUser login user
* @param currentProjectName current project name
* @param processMeta process meta data
*
* @param loginUser login user
* @param currentProjectName current project name
* @param processMeta process meta data
* @param processDefinitionName process definition name
* @param processDefinitionId process definition id
* @param processDefinitionId process definition id
* @return insert schedule flag
*/
public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta,
@ -995,84 +1019,87 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* check import process has sub process
* recursion create sub process
* @param loginUser login user
* @param targetProject target project
* @param jsonArray process task array
*
* @param loginUser login user
* @param targetProject target project
* @param jsonArray process task array
* @param subProcessIdMap correct sub process id map
*/
public void importSubProcess(User loginUser, Project targetProject, JSONArray jsonArray, Map<Integer, Integer> subProcessIdMap) {
public void importSubProcess(User loginUser, Project targetProject, ArrayNode jsonArray, Map<Integer, Integer> subProcessIdMap) {
for (int i = 0; i < jsonArray.size(); i++) {
JSONObject taskNode = jsonArray.getJSONObject(i);
String taskType = taskNode.getString("type");
if (checkTaskHasSubProcess(taskType)) {
//get sub process info
JSONObject subParams = JSONUtils.parseObject(taskNode.getString("params"));
Integer subProcessId = subParams.getInteger("processDefinitionId");
ProcessDefinition subProcess = processDefineMapper.queryByDefineId(subProcessId);
//check is sub process exist in db
if (null != subProcess) {
String subProcessJson = subProcess.getProcessDefinitionJson();
//check current project has sub process
ProcessDefinition currentProjectSubProcess = processDefineMapper.queryByDefineName(targetProject.getId(), subProcess.getName());
if (null == currentProjectSubProcess) {
JSONArray subJsonArray = (JSONArray) JSONUtils.parseObject(subProcess.getProcessDefinitionJson()).get("tasks");
List<Object> subProcessList = subJsonArray.stream()
.filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).getString("type")))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap);
//sub process processId correct
if (!subProcessIdMap.isEmpty()) {
for (Map.Entry<Integer, Integer> entry : subProcessIdMap.entrySet()) {
String oldSubProcessId = "\"processDefinitionId\":" + entry.getKey();
String newSubProcessId = "\"processDefinitionId\":" + entry.getValue();
subProcessJson = subProcessJson.replaceAll(oldSubProcessId, newSubProcessId);
}
subProcessIdMap.clear();
}
}
ObjectNode taskNode = (ObjectNode) jsonArray.path(i);
String taskType = taskNode.path("type").asText();
//if sub-process recursion
Date now = new Date();
//create sub process in target project
ProcessDefinition processDefine = new ProcessDefinition();
processDefine.setName(subProcess.getName());
processDefine.setVersion(subProcess.getVersion());
processDefine.setReleaseState(subProcess.getReleaseState());
processDefine.setProjectId(targetProject.getId());
processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(subProcessJson);
processDefine.setDescription(subProcess.getDescription());
processDefine.setLocations(subProcess.getLocations());
processDefine.setConnects(subProcess.getConnects());
processDefine.setTimeout(subProcess.getTimeout());
processDefine.setTenantId(subProcess.getTenantId());
processDefine.setGlobalParams(subProcess.getGlobalParams());
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(subProcess.getFlag());
processDefine.setReceivers(subProcess.getReceivers());
processDefine.setReceiversCc(subProcess.getReceiversCc());
processDefineMapper.insert(processDefine);
logger.info("create sub process, project: {}, process name: {}", targetProject.getName(), processDefine.getName());
//modify task node
ProcessDefinition newSubProcessDefine = processDefineMapper.queryByDefineName(processDefine.getProjectId(),processDefine.getName());
if (null != newSubProcessDefine) {
subProcessIdMap.put(subProcessId, newSubProcessDefine.getId());
subParams.put("processDefinitionId", newSubProcessDefine.getId());
taskNode.put("params", subParams);
if (!checkTaskHasSubProcess(taskType)) {
continue;
}
//get sub process info
ObjectNode subParams = (ObjectNode) taskNode.path("params");
Integer subProcessId = subParams.path(PROCESSDEFINITIONID).asInt();
ProcessDefinition subProcess = processDefineMapper.queryByDefineId(subProcessId);
//check is sub process exist in db
if (null == subProcess) {
continue;
}
String subProcessJson = subProcess.getProcessDefinitionJson();
//check current project has sub process
ProcessDefinition currentProjectSubProcess = processDefineMapper.queryByDefineName(targetProject.getId(), subProcess.getName());
if (null == currentProjectSubProcess) {
ArrayNode subJsonArray = (ArrayNode) JSONUtils.parseObject(subProcess.getProcessDefinitionJson()).get(TASKS);
List<Object> subProcessList = StreamUtils.asStream(subJsonArray.elements())
.filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).path("type").asText()))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap);
//sub process processId correct
if (!subProcessIdMap.isEmpty()) {
for (Map.Entry<Integer, Integer> entry : subProcessIdMap.entrySet()) {
String oldSubProcessId = "\"processDefinitionId\":" + entry.getKey();
String newSubProcessId = "\"processDefinitionId\":" + entry.getValue();
subProcessJson = subProcessJson.replaceAll(oldSubProcessId, newSubProcessId);
}
subProcessIdMap.clear();
}
}
//if sub-process recursion
Date now = new Date();
//create sub process in target project
ProcessDefinition processDefine = new ProcessDefinition();
processDefine.setName(subProcess.getName());
processDefine.setVersion(subProcess.getVersion());
processDefine.setReleaseState(subProcess.getReleaseState());
processDefine.setProjectId(targetProject.getId());
processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(subProcessJson);
processDefine.setDescription(subProcess.getDescription());
processDefine.setLocations(subProcess.getLocations());
processDefine.setConnects(subProcess.getConnects());
processDefine.setTimeout(subProcess.getTimeout());
processDefine.setTenantId(subProcess.getTenantId());
processDefine.setGlobalParams(subProcess.getGlobalParams());
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(subProcess.getFlag());
processDefine.setReceivers(subProcess.getReceivers());
processDefine.setReceiversCc(subProcess.getReceiversCc());
processDefineMapper.insert(processDefine);
logger.info("create sub process, project: {}, process name: {}", targetProject.getName(), processDefine.getName());
//modify task node
ProcessDefinition newSubProcessDefine = processDefineMapper.queryByDefineName(processDefine.getProjectId(), processDefine.getName());
if (null != newSubProcessDefine) {
subProcessIdMap.put(subProcessId, newSubProcessDefine.getId());
subParams.put(PROCESSDEFINITIONID, newSubProcessDefine.getId());
taskNode.set("params", subParams);
}
}
}
}
@ -1081,7 +1108,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* check the process definition node meets the specifications
*
* @param processData process data
* @param processData process data
* @param processDefinitionJson process definition json
* @return check result code
*/
@ -1091,7 +1118,7 @@ public class ProcessDefinitionService extends BaseDAGService {
try {
if (processData == null) {
logger.error("process data is null");
putMsg(result,Status.DATA_IS_NOT_VALID, processDefinitionJson);
putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson);
return result;
}
@ -1122,7 +1149,7 @@ public class ProcessDefinitionService extends BaseDAGService {
// check extra params
CheckUtils.checkOtherParams(taskNode.getExtras());
}
putMsg(result,Status.SUCCESS);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
@ -1135,9 +1162,8 @@ public class ProcessDefinitionService extends BaseDAGService {
*
* @param defineId define id
* @return task node list
* @throws Exception exception
*/
public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) throws Exception {
public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
@ -1155,7 +1181,7 @@ public class ProcessDefinitionService extends BaseDAGService {
//process data check
if (null == processData) {
logger.error("process data is null");
putMsg(result,Status.DATA_IS_NOT_VALID, processDefinitionJson);
putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson);
return result;
}
@ -1173,15 +1199,14 @@ public class ProcessDefinitionService extends BaseDAGService {
*
* @param defineIdList define id list
* @return task node list
* @throws Exception exception
*/
public Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList) throws Exception {
public Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList) {
Map<String, Object> result = new HashMap<>();
Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>();
String[] idList = defineIdList.split(",");
List<Integer> idIntList = new ArrayList<>();
for(String definitionId : idList) {
for (String definitionId : idList) {
idIntList.add(Integer.parseInt(definitionId));
}
Integer[] idArray = idIntList.toArray(new Integer[idIntList.size()]);
@ -1192,7 +1217,7 @@ public class ProcessDefinitionService extends BaseDAGService {
return result;
}
for(ProcessDefinition processDefinition : processDefinitionList){
for (ProcessDefinition processDefinition : processDefinitionList) {
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks();
@ -1228,7 +1253,7 @@ public class ProcessDefinitionService extends BaseDAGService {
* Encapsulates the TreeView structure
*
* @param processId process definition id
* @param limit limit
* @param limit limit
* @return tree view json data
* @throws Exception exception
*/
@ -1238,7 +1263,7 @@ public class ProcessDefinitionService extends BaseDAGService {
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (null == processDefinition) {
logger.info("process define not exists");
putMsg(result,Status.PROCESS_DEFINE_NOT_EXIST, processDefinition);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition);
return result;
}
DAG<String, TaskNode, TaskNodeRelation> dag = genDagGraph(processDefinition);
@ -1257,8 +1282,8 @@ public class ProcessDefinitionService extends BaseDAGService {
*/
List<ProcessInstance> processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit);
for(ProcessInstance processInstance:processInstanceList){
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(),processInstance.getEndTime()));
for (ProcessInstance processInstance : processInstanceList) {
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
}
if (limit > processInstanceList.size()) {
@ -1361,9 +1386,8 @@ public class ProcessDefinitionService extends BaseDAGService {
*
* @param processDefinition process definition
* @return dag graph
* @throws Exception if exception happens
*/
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDefinition processDefinition) throws Exception {
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDefinition processDefinition) {
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
@ -1383,8 +1407,6 @@ public class ProcessDefinitionService extends BaseDAGService {
}
/**
* whether the graph has a ring
*
@ -1402,7 +1424,7 @@ public class ProcessDefinitionService extends BaseDAGService {
// Fill edge relations
for (TaskNode taskNodeResponse : taskNodeResponseList) {
taskNodeResponse.getPreTasks();
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(),String.class);
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class);
if (CollectionUtils.isNotEmpty(preTasks)) {
for (String preTask : preTasks) {
if (!graph.addEdge(preTask, taskNodeResponse.getName())) {
@ -1415,19 +1437,19 @@ public class ProcessDefinitionService extends BaseDAGService {
return graph.hasCycle();
}
private String recursionProcessDefinitionName(Integer projectId,String processDefinitionName,int num){
private String recursionProcessDefinitionName(Integer projectId, String processDefinitionName, int num) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(projectId, processDefinitionName);
if (processDefinition != null) {
if(num > 1){
String str = processDefinitionName.substring(0,processDefinitionName.length() - 3);
processDefinitionName = str + "("+num+")";
}else{
processDefinitionName = processDefinition.getName() + "("+num+")";
if (num > 1) {
String str = processDefinitionName.substring(0, processDefinitionName.length() - 3);
processDefinitionName = str + "(" + num + ")";
} else {
processDefinitionName = processDefinition.getName() + "(" + num + ")";
}
}else{
} else {
return processDefinitionName;
}
return recursionProcessDefinitionName(projectId,processDefinitionName,num + 1);
return recursionProcessDefinitionName(projectId, processDefinitionName, num + 1);
}
}

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -504,9 +504,8 @@ public class ProcessInstanceService extends BaseDAGService {
*
* @param processInstanceId process instance id
* @return variables data
* @throws Exception exception
*/
public Map<String, Object> viewVariables( Integer processInstanceId) throws Exception {
public Map<String, Object> viewVariables(Integer processInstanceId) {
Map<String, Object> result = new HashMap<>(5);
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
@ -537,7 +536,7 @@ public class ProcessInstanceService extends BaseDAGService {
List<TaskNode> taskNodeList = workflowData.getTasks();
// global param string
String globalParamStr = JSON.toJSONString(globalParams);
String globalParamStr = JSONUtils.toJson(globalParams);
globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams);
globalParams = JSON.parseArray(globalParamStr, Property.class);
for (Property property : globalParams) {

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java

@ -88,6 +88,8 @@ public class ProjectService extends BaseService{
project.setUpdateTime(now);
if (projectMapper.insert(project) > 0) {
Project insertedProject = projectMapper.queryByName(name);
result.put(Constants.DATA_LIST, insertedProject);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
@ -124,9 +126,7 @@ public class ProjectService extends BaseService{
* @return true if the login user have permission to see the project
*/
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) {
Map<String, Object> result = new HashMap<>(5);
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
} else if (!checkReadPermission(loginUser, project)) {
@ -135,8 +135,6 @@ public class ProjectService extends BaseService{
}else {
putMsg(result, Status.SUCCESS);
}
return result;
}

3
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -165,6 +165,9 @@ public class SchedulerService extends BaseService {
processDefinition.setReceivers(receivers);
processDefinition.setReceiversCc(receiversCc);
processDefinitionMapper.updateById(processDefinition);
// return scheduler object with ID
result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId()));
putMsg(result, Status.SUCCESS);
result.put("scheduleId", scheduleObj.getId());

21
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/exportprocess/DataSourceParam.java

@ -16,9 +16,9 @@
*/
package org.apache.dolphinscheduler.api.utils.exportprocess;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.springframework.beans.factory.InitializingBean;
@ -33,6 +33,7 @@ import java.util.List;
@Service
public class DataSourceParam implements ProcessAddTaskParam, InitializingBean {
private static final String PARAMS = "params";
@Autowired
private DataSourceMapper dataSourceMapper;
@ -42,14 +43,14 @@ public class DataSourceParam implements ProcessAddTaskParam, InitializingBean {
* @return task node json object
*/
@Override
public JSONObject addExportSpecialParam(JSONObject taskNode) {
public JsonNode addExportSpecialParam(JsonNode taskNode) {
// add sqlParameters
JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
DataSource dataSource = dataSourceMapper.selectById((Integer) sqlParameters.get("datasource"));
ObjectNode sqlParameters = (ObjectNode) taskNode.path(PARAMS);
DataSource dataSource = dataSourceMapper.selectById(sqlParameters.get("datasource").asInt());
if (null != dataSource) {
sqlParameters.put("datasourceName", dataSource.getName());
}
taskNode.put("params", sqlParameters);
((ObjectNode)taskNode).set(PARAMS, sqlParameters);
return taskNode;
}
@ -60,14 +61,14 @@ public class DataSourceParam implements ProcessAddTaskParam, InitializingBean {
* @return task node json object
*/
@Override
public JSONObject addImportSpecialParam(JSONObject taskNode) {
JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.getString("datasourceName"));
public JsonNode addImportSpecialParam(JsonNode taskNode) {
ObjectNode sqlParameters = (ObjectNode) taskNode.path(PARAMS);
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.path("datasourceName").asText());
if (!dataSources.isEmpty()) {
DataSource dataSource = dataSources.get(0);
sqlParameters.put("datasource", dataSource.getId());
}
taskNode.put("params", sqlParameters);
((ObjectNode)taskNode).set(PARAMS, sqlParameters);
return taskNode;
}

40
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/exportprocess/DependentParam.java

@ -16,8 +16,9 @@
*/
package org.apache.dolphinscheduler.api.utils.exportprocess;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
@ -34,6 +35,7 @@ import org.springframework.stereotype.Service;
@Service
public class DependentParam implements ProcessAddTaskParam, InitializingBean {
private static final String DEPENDENCE = "dependence";
@Autowired
ProcessDefinitionMapper processDefineMapper;
@ -47,18 +49,18 @@ public class DependentParam implements ProcessAddTaskParam, InitializingBean {
* @return task node json object
*/
@Override
public JSONObject addExportSpecialParam(JSONObject taskNode) {
public JsonNode addExportSpecialParam(JsonNode taskNode) {
// add dependent param
JSONObject dependentParameters = JSONUtils.parseObject(taskNode.getString("dependence"));
ObjectNode dependentParameters = JSONUtils.parseObject(taskNode.path(DEPENDENCE).asText());
if (null != dependentParameters) {
JSONArray dependTaskList = (JSONArray) dependentParameters.get("dependTaskList");
ArrayNode dependTaskList = (ArrayNode) dependentParameters.get("dependTaskList");
for (int j = 0; j < dependTaskList.size(); j++) {
JSONObject dependentTaskModel = dependTaskList.getJSONObject(j);
JSONArray dependItemList = (JSONArray) dependentTaskModel.get("dependItemList");
JsonNode dependentTaskModel = dependTaskList.path(j);
ArrayNode dependItemList = (ArrayNode) dependentTaskModel.get("dependItemList");
for (int k = 0; k < dependItemList.size(); k++) {
JSONObject dependentItem = dependItemList.getJSONObject(k);
int definitionId = dependentItem.getInteger("definitionId");
ObjectNode dependentItem = (ObjectNode) dependItemList.path(k);
int definitionId = dependentItem.path("definitionId").asInt();
ProcessDefinition definition = processDefineMapper.queryByDefineId(definitionId);
if (null != definition) {
dependentItem.put("projectName", definition.getProjectName());
@ -66,7 +68,7 @@ public class DependentParam implements ProcessAddTaskParam, InitializingBean {
}
}
}
taskNode.put("dependence", dependentParameters);
((ObjectNode)taskNode).set(DEPENDENCE, dependentParameters);
}
return taskNode;
@ -78,18 +80,18 @@ public class DependentParam implements ProcessAddTaskParam, InitializingBean {
* @return
*/
@Override
public JSONObject addImportSpecialParam(JSONObject taskNode) {
JSONObject dependentParameters = JSONUtils.parseObject(taskNode.getString("dependence"));
public JsonNode addImportSpecialParam(JsonNode taskNode) {
ObjectNode dependentParameters = JSONUtils.parseObject(taskNode.path(DEPENDENCE).asText());
if(dependentParameters != null){
JSONArray dependTaskList = (JSONArray) dependentParameters.get("dependTaskList");
ArrayNode dependTaskList = (ArrayNode) dependentParameters.path("dependTaskList");
for (int h = 0; h < dependTaskList.size(); h++) {
JSONObject dependentTaskModel = dependTaskList.getJSONObject(h);
JSONArray dependItemList = (JSONArray) dependentTaskModel.get("dependItemList");
ObjectNode dependentTaskModel = (ObjectNode) dependTaskList.path(h);
ArrayNode dependItemList = (ArrayNode) dependentTaskModel.get("dependItemList");
for (int k = 0; k < dependItemList.size(); k++) {
JSONObject dependentItem = dependItemList.getJSONObject(k);
Project dependentItemProject = projectMapper.queryByName(dependentItem.getString("projectName"));
ObjectNode dependentItem = (ObjectNode) dependItemList.path(k);
Project dependentItemProject = projectMapper.queryByName(dependentItem.path("projectName").asText());
if(dependentItemProject != null){
ProcessDefinition definition = processDefineMapper.queryByDefineName(dependentItemProject.getId(),dependentItem.getString("definitionName"));
ProcessDefinition definition = processDefineMapper.queryByDefineName(dependentItemProject.getId(),dependentItem.path("definitionName").asText());
if(definition != null){
dependentItem.put("projectId",dependentItemProject.getId());
dependentItem.put("definitionId",definition.getId());
@ -97,7 +99,7 @@ public class DependentParam implements ProcessAddTaskParam, InitializingBean {
}
}
}
taskNode.put("dependence", dependentParameters);
((ObjectNode)taskNode).set(DEPENDENCE, dependentParameters);
}
return taskNode;
}

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/exportprocess/ProcessAddTaskParam.java

@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.api.utils.exportprocess;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
/**
* ProcessAddTaskParam
@ -28,12 +28,12 @@ public interface ProcessAddTaskParam {
* @param taskNode task node json object
* @return task node json object
*/
JSONObject addExportSpecialParam(JSONObject taskNode);
JsonNode addExportSpecialParam(JsonNode taskNode);
/**
* add task special param: sql task dependent task
* @param taskNode task node json object
* @return task node json object
*/
JSONObject addImportSpecialParam(JSONObject taskNode);
JsonNode addImportSpecialParam(JsonNode taskNode);
}

5
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java

@ -46,7 +46,7 @@ public class ProjectControllerTest extends AbstractControllerTest{
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("projectName","project_test1");
paramsMap.add("desc","the test project");
paramsMap.add("description","the test project");
MvcResult mvcResult = mockMvc.perform(post("/projects/create")
.header(SESSION_ID, sessionId)
@ -56,7 +56,8 @@ public class ProjectControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
Assert.assertNotNull(result.getData());
logger.info(mvcResult.getResponse().getContentAsString());
}

8
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -16,8 +16,8 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.enums.Status;
@ -621,8 +621,8 @@ public class ProcessDefinitionServiceTest {
"\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
JSONObject jsonObject = JSONUtils.parseObject(topProcessJson);
JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
ObjectNode jsonObject = JSONUtils.parseObject(topProcessJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.path("tasks");
String originSubJson = jsonArray.toString();

19
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/exportprocess/DataSourceParamTest.java

@ -16,7 +16,8 @@
*/
package org.apache.dolphinscheduler.api.utils.exportprocess;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
@ -48,13 +49,13 @@ public class DataSourceParamTest {
"\"preTasks\":[\"dependent\"]}";
JSONObject taskNode = JSONUtils.parseObject(sqlJson);
if (StringUtils.isNotEmpty(taskNode.getString("type"))) {
String taskType = taskNode.getString("type");
ObjectNode taskNode = JSONUtils.parseObject(sqlJson);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
JSONObject sql = addTaskParam.addExportSpecialParam(taskNode);
JsonNode sql = addTaskParam.addExportSpecialParam(taskNode);
JSONAssert.assertEquals(taskNode.toString(), sql.toString(), false);
}
@ -72,13 +73,13 @@ public class DataSourceParamTest {
"\"taskInstancePriority\":\"MEDIUM\",\"name\":\"mysql\",\"dependence\":{}," +
"\"retryInterval\":\"1\",\"preTasks\":[\"dependent\"],\"id\":\"tasks-8745\"}";
JSONObject taskNode = JSONUtils.parseObject(sqlJson);
if (StringUtils.isNotEmpty(taskNode.getString("type"))) {
String taskType = taskNode.getString("type");
ObjectNode taskNode = JSONUtils.parseObject(sqlJson);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
JSONObject sql = addTaskParam.addImportSpecialParam(taskNode);
JsonNode sql = addTaskParam.addImportSpecialParam(taskNode);
JSONAssert.assertEquals(taskNode.toString(), sql.toString(), false);
}

34
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/exportprocess/DependentParamTest.java

@ -17,6 +17,8 @@
package org.apache.dolphinscheduler.api.utils.exportprocess;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
@ -43,13 +45,13 @@ public class DependentParamTest {
"\"dependItemList\":[{\"projectId\":2,\"definitionId\":46,\"depTasks\":\"ALL\"," +
"\"cycle\":\"day\",\"dateValue\":\"today\"}]}]}}";
JSONObject taskNode = JSONUtils.parseObject(dependentJson);
if (StringUtils.isNotEmpty(taskNode.getString("type"))) {
String taskType = taskNode.getString("type");
ObjectNode taskNode = JSONUtils.parseObject(dependentJson);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
JSONObject dependent = addTaskParam.addExportSpecialParam(taskNode);
JsonNode dependent = addTaskParam.addExportSpecialParam(taskNode);
JSONAssert.assertEquals(taskNode.toString(), dependent.toString(), false);
}
@ -57,13 +59,13 @@ public class DependentParamTest {
String dependentEmpty = "{\"type\":\"DEPENDENT\",\"id\":\"tasks-33787\"," +
"\"name\":\"dependent\",\"params\":{},\"description\":\"\",\"runFlag\":\"NORMAL\"}";
JSONObject taskEmpty = JSONUtils.parseObject(dependentEmpty);
if (StringUtils.isNotEmpty(taskEmpty.getString("type"))) {
String taskType = taskEmpty.getString("type");
ObjectNode taskEmpty = JSONUtils.parseObject(dependentEmpty);
if (StringUtils.isNotEmpty(taskEmpty.path("type").asText())) {
String taskType = taskEmpty.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
JSONObject dependent = addTaskParam.addImportSpecialParam(taskEmpty);
JsonNode dependent = addTaskParam.addImportSpecialParam(taskEmpty);
JSONAssert.assertEquals(taskEmpty.toString(), dependent.toString(), false);
}
@ -81,13 +83,13 @@ public class DependentParamTest {
"\"projectId\":1,\"cycle\":\"day\",\"definitionId\":7}],\"relation\":\"AND\"}]," +
"\"relation\":\"AND\"},\"retryInterval\":\"1\",\"preTasks\":[],\"id\":\"tasks-55485\"}";
JSONObject taskNode = JSONUtils.parseObject(dependentJson);
if (StringUtils.isNotEmpty(taskNode.getString("type"))) {
String taskType = taskNode.getString("type");
ObjectNode taskNode = JSONUtils.parseObject(dependentJson);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
JSONObject dependent = addTaskParam.addImportSpecialParam(taskNode);
JsonNode dependent = addTaskParam.addImportSpecialParam(taskNode);
JSONAssert.assertEquals(taskNode.toString(), dependent.toString(), false);
}
@ -97,13 +99,13 @@ public class DependentParamTest {
"\"strategy\":\"\"},\"maxRetryTimes\":\"0\",\"taskInstancePriority\":\"MEDIUM\"" +
",\"name\":\"dependent\",\"retryInterval\":\"1\",\"preTasks\":[],\"id\":\"tasks-55485\"}";
JSONObject taskNodeEmpty = JSONUtils.parseObject(dependentEmpty);
if (StringUtils.isNotEmpty(taskNodeEmpty.getString("type"))) {
String taskType = taskNodeEmpty.getString("type");
JsonNode taskNodeEmpty = JSONUtils.parseObject(dependentEmpty);
if (StringUtils.isNotEmpty(taskNodeEmpty.path("type").asText())) {
String taskType = taskNodeEmpty.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
JSONObject dependent = addTaskParam.addImportSpecialParam(taskNode);
JsonNode dependent = addTaskParam.addImportSpecialParam(taskNode);
JSONAssert.assertEquals(taskNodeEmpty.toString(), dependent.toString(), false);
}

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -103,6 +103,11 @@ public final class Constants {
*/
public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address";
/**
* yarn.job.history.status.address
*/
public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address";
/**
* hdfs configuration
* hdfs.root.user

20
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java

@ -18,6 +18,9 @@ package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
import java.util.HashMap;
import java.util.Map;
/**
* command types
*/
@ -66,11 +69,18 @@ public enum CommandType {
return descp;
}
public static CommandType of(Integer status){
for(CommandType cmdType : values()){
if(cmdType.getCode() == status){
return cmdType;
}
private static final Map<Integer, CommandType> COMMAND_TYPE_MAP = new HashMap<>();
static {
for (CommandType commandType : CommandType.values()) {
COMMAND_TYPE_MAP.put(commandType.code,commandType);
}
}
public static CommandType of(Integer status) {
if (COMMAND_TYPE_MAP.containsKey(status)) {
return COMMAND_TYPE_MAP.get(status);
}
throw new IllegalArgumentException("invalid status : " + status);
}

16
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java

@ -18,6 +18,8 @@ package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
import java.util.HashMap;
/**
* data base types
*/
@ -59,11 +61,17 @@ public enum DbType {
}
private static HashMap<Integer, DbType> DB_TYPE_MAP =new HashMap<>();
static {
for (DbType dbType:DbType.values()){
DB_TYPE_MAP.put(dbType.getCode(),dbType);
}
}
public static DbType of(int type){
for(DbType ty : values()){
if(ty.getCode() == type){
return ty;
}
if(DB_TYPE_MAP.containsKey(type)){
return DB_TYPE_MAP.get(type);
}
throw new IllegalArgumentException("invalid type : " + type);
}

17
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java

@ -19,6 +19,8 @@ package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
import java.util.HashMap;
/**
* running status for workflow and task nodes
*
@ -62,6 +64,13 @@ public enum ExecutionStatus {
private final int code;
private final String descp;
private static HashMap<Integer, ExecutionStatus> EXECUTION_STATUS_MAP=new HashMap<>();
static {
for (ExecutionStatus executionStatus:ExecutionStatus.values()){
EXECUTION_STATUS_MAP.put(executionStatus.code,executionStatus);
}
}
/**
* status is success
@ -130,11 +139,9 @@ public enum ExecutionStatus {
}
public static ExecutionStatus of(int status){
for(ExecutionStatus es : values()){
if(es.getCode() == status){
return es;
}
}
if(EXECUTION_STATUS_MAP.containsKey(status)){
return EXECUTION_STATUS_MAP.get(status);
}
throw new IllegalArgumentException("invalid status : " + status);
}
}

41
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopJobType.java

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
public enum SqoopJobType {
CUSTOM(0, "CUSTOM"),
TEMPLATE(1, "TEMPLATE");
SqoopJobType(int code, String descp){
this.code = code;
this.descp = descp;
}
@EnumValue
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
}

13
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java

@ -90,6 +90,11 @@ public class FlinkParameters extends AbstractParameters {
*/
private String others;
/**
* flink version
*/
private String flinkVersion;
/**
* program type
* 0 JAVA,1 SCALA,2 PYTHON
@ -200,6 +205,14 @@ public class FlinkParameters extends AbstractParameters {
this.programType = programType;
}
public String getFlinkVersion() {
return flinkVersion;
}
public void setFlinkVersion(String flinkVersion) {
this.flinkVersion = flinkVersion;
}
@Override
public boolean checkParameters() {
return mainJar != null && programType != null;

27
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java

@ -56,6 +56,17 @@ public class HttpParameters extends AbstractParameters {
private String condition;
/**
* Connect Timeout
* Unit: ms
*/
private int connectTimeout ;
/**
* Socket Timeout
* Unit: ms
*/
private int socketTimeout ;
@Override
public boolean checkParameters() {
@ -106,4 +117,20 @@ public class HttpParameters extends AbstractParameters {
public void setCondition(String condition) {
this.condition = condition;
}
public int getConnectTimeout() {
return connectTimeout;
}
public void setConnectTimeout(int connectTimeout) {
this.connectTimeout = connectTimeout;
}
public int getSocketTimeout() {
return socketTimeout;
}
public void setSocketTimeout(int socketTimeout) {
this.socketTimeout = socketTimeout;
}
}

99
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java

@ -16,6 +16,8 @@
*/
package org.apache.dolphinscheduler.common.task.sqoop;
import org.apache.dolphinscheduler.common.enums.SqoopJobType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.utils.StringUtils;
@ -28,6 +30,23 @@ import java.util.List;
*/
public class SqoopParameters extends AbstractParameters {
/**
* sqoop job type:
* CUSTOM - custom sqoop job
* TEMPLATE - sqoop template job
*/
private String jobType;
/**
* customJob eq 1, use customShell
*/
private String customShell;
/**
* sqoop job name - map-reduce job name
*/
private String jobName;
/**
* model type
*/
@ -53,6 +72,16 @@ public class SqoopParameters extends AbstractParameters {
*/
private String targetParams;
/**
* hadoop custom param for sqoop job
*/
private List<Property> hadoopCustomParams;
/**
* sqoop advanced param
*/
private List<Property> sqoopAdvancedParams;
public String getModelType() {
return modelType;
}
@ -101,18 +130,74 @@ public class SqoopParameters extends AbstractParameters {
this.targetParams = targetParams;
}
public String getJobType() {
return jobType;
}
public void setJobType(String jobType) {
this.jobType = jobType;
}
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
public String getCustomShell() {
return customShell;
}
public void setCustomShell(String customShell) {
this.customShell = customShell;
}
public List<Property> getHadoopCustomParams() {
return hadoopCustomParams;
}
public void setHadoopCustomParams(List<Property> hadoopCustomParams) {
this.hadoopCustomParams = hadoopCustomParams;
}
public List<Property> getSqoopAdvancedParams() {
return sqoopAdvancedParams;
}
public void setSqoopAdvancedParams(List<Property> sqoopAdvancedParams) {
this.sqoopAdvancedParams = sqoopAdvancedParams;
}
@Override
public boolean checkParameters() {
return StringUtils.isNotEmpty(modelType)&&
concurrency != 0 &&
StringUtils.isNotEmpty(sourceType)&&
StringUtils.isNotEmpty(targetType)&&
StringUtils.isNotEmpty(sourceParams)&&
StringUtils.isNotEmpty(targetParams);
boolean sqoopParamsCheck = false;
if (StringUtils.isEmpty(jobType)) {
return sqoopParamsCheck;
}
if (SqoopJobType.TEMPLATE.getDescp().equals(jobType)) {
sqoopParamsCheck = StringUtils.isEmpty(customShell) &&
StringUtils.isNotEmpty(modelType) &&
StringUtils.isNotEmpty(jobName) &&
concurrency != 0 &&
StringUtils.isNotEmpty(sourceType) &&
StringUtils.isNotEmpty(targetType) &&
StringUtils.isNotEmpty(sourceParams) &&
StringUtils.isNotEmpty(targetParams);
} else if (SqoopJobType.CUSTOM.getDescp().equals(jobType)) {
sqoopParamsCheck = StringUtils.isNotEmpty(customShell) &&
StringUtils.isEmpty(jobName);
}
return sqoopParamsCheck;
}
@Override
public List<ResourceInfo> getResourceFilesList() {
return new ArrayList<>();
return new ArrayList<>();
}
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java

@ -106,7 +106,7 @@ public class TargetMysqlParameter {
this.preQuery = preQuery;
}
public boolean isUpdate() {
public boolean getIsUpdate() {
return isUpdate;
}

59
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -16,16 +16,16 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.commons.io.IOUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
@ -59,6 +59,7 @@ public class HadoopUtils implements Closeable {
public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler");
public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS);
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
@ -114,11 +115,11 @@ public class HadoopUtils implements Closeable {
String resourceStorageType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType);
if (resUploadType == ResUploadType.HDFS){
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE,false)){
if (resUploadType == ResUploadType.HDFS) {
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos");
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
hdfsUser = "";
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
@ -195,7 +196,7 @@ public class HadoopUtils implements Closeable {
*/
String appUrl = "";
//not use resourcemanager
if (rmHaIds.contains(Constants.YARN_RESOURCEMANAGER_HA_XX)){
if (rmHaIds.contains(Constants.YARN_RESOURCEMANAGER_HA_XX)) {
yarnEnabled = false;
logger.warn("should not step here");
@ -212,6 +213,12 @@ public class HadoopUtils implements Closeable {
return String.format(appUrl, applicationId);
}
public String getJobHistoryUrl(String applicationId) {
//eg:application_1587475402360_712719 -> job_1587475402360_712719
String jobId = applicationId.replace("application", "job");
return String.format(jobHistoryAddress, jobId);
}
/**
* cat file on hdfs
*
@ -389,9 +396,10 @@ public class HadoopUtils implements Closeable {
/**
* hadoop resourcemanager enabled or not
*
* @return result
*/
public boolean isYarnEnabled() {
public boolean isYarnEnabled() {
return yarnEnabled;
}
@ -407,12 +415,22 @@ public class HadoopUtils implements Closeable {
return null;
}
String result = Constants.FAILED;
String applicationUrl = getApplicationUrl(applicationId);
logger.info("applicationUrl={}", applicationUrl);
String responseContent = HttpUtils.get(applicationUrl);
JSONObject jsonObject = JSON.parseObject(responseContent);
String result = jsonObject.getJSONObject("app").getString("finalStatus");
if (responseContent != null) {
JSONObject jsonObject = JSON.parseObject(responseContent);
result = jsonObject.getJSONObject("app").getString("finalStatus");
} else {
//may be in job history
String jobHistoryUrl = getJobHistoryUrl(applicationId);
logger.info("jobHistoryUrl={}", jobHistoryUrl);
responseContent = HttpUtils.get(jobHistoryUrl);
JSONObject jsonObject = JSONObject.parseObject(responseContent);
result = jsonObject.getJSONObject("job").getString("state");
}
switch (result) {
case Constants.ACCEPTED:
@ -435,6 +453,7 @@ public class HadoopUtils implements Closeable {
/**
* get data hdfs path
*
* @return data hdfs path
*/
public static String getHdfsDataBasePath() {
@ -452,7 +471,7 @@ public class HadoopUtils implements Closeable {
* @param tenantCode tenant code
* @return hdfs resource dir
*/
public static String getHdfsDir(ResourceType resourceType,String tenantCode) {
public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
String hdfsDir = "";
if (resourceType.equals(ResourceType.FILE)) {
hdfsDir = getHdfsResDir(tenantCode);
@ -497,16 +516,16 @@ public class HadoopUtils implements Closeable {
/**
* get hdfs file name
*
* @param resourceType resource type
* @param tenantCode tenant code
* @param fileName file name
* @param resourceType resource type
* @param tenantCode tenant code
* @param fileName file name
* @return hdfs file name
*/
public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/","");
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsDir(resourceType,tenantCode), fileName);
return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName);
}
/**
@ -518,7 +537,7 @@ public class HadoopUtils implements Closeable {
*/
public static String getHdfsResourceFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/","");
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsResDir(tenantCode), fileName);
}
@ -532,7 +551,7 @@ public class HadoopUtils implements Closeable {
*/
public static String getHdfsUdfFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/","");
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName);
}

388
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java

@ -16,13 +16,12 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -35,230 +34,233 @@ import java.util.*;
*/
public class JSONUtils {
private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class);
/**
* can use static singleton, inject: just make sure to reuse!
*/
private static final ObjectMapper objectMapper = new ObjectMapper();
private JSONUtils() {
//Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault());
}
/**
* json representation of object
* @param object object
* @return object to json string
*/
public static String toJson(Object object) {
try{
return JSON.toJSONString(object,false);
} catch (Exception e) {
logger.error("object to json exception!",e);
private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class);
/**
* can use static singleton, inject: just make sure to reuse!
*/
private static final ObjectMapper objectMapper = new ObjectMapper();
private JSONUtils() {
}
return null;
}
/**
*
* This method deserializes the specified Json into an object of the specified class. It is not
* suitable to use if the specified class is a generic type since it will not have the generic
* type information because of the Type Erasure feature of Java. Therefore, this method should not
* be used if the desired type is a generic type. Note that this method works fine if the any of
* the fields of the specified object are generics, just the object itself should not be a
* generic type.
*
* @param json the string from which the object is to be deserialized
* @param clazz the class of T
* @param <T> T
* @return an object of type T from the string
* classOfT
*/
public static <T> T parseObject(String json, Class<T> clazz) {
if (StringUtils.isEmpty(json)) {
return null;
static {
//Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault());
objectMapper.configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true).setTimeZone(TimeZone.getDefault());
}
try {
return JSON.parseObject(json, clazz);
} catch (Exception e) {
logger.error("parse object exception!",e);
/**
* json representation of object
*
* @param object object
* @return object to json string
*/
public static String toJson(Object object) {
try {
return objectMapper.writeValueAsString(object);
} catch (Exception e) {
logger.error("object to json exception!", e);
}
return null;
}
return null;
}
/**
* json to list
*
* @param json json string
* @param clazz class
* @param <T> T
* @return list
*/
public static <T> List<T> toList(String json, Class<T> clazz) {
if (StringUtils.isEmpty(json)) {
return new ArrayList<>();
/**
* This method deserializes the specified Json into an object of the specified class. It is not
* suitable to use if the specified class is a generic type since it will not have the generic
* type information because of the Type Erasure feature of Java. Therefore, this method should not
* be used if the desired type is a generic type. Note that this method works fine if the any of
* the fields of the specified object are generics, just the object itself should not be a
* generic type.
*
* @param json the string from which the object is to be deserialized
* @param clazz the class of T
* @param <T> T
* @return an object of type T from the string
* classOfT
*/
public static <T> T parseObject(String json, Class<T> clazz) {
if (StringUtils.isEmpty(json)) {
return null;
}
try {
return objectMapper.readValue(json, clazz);
} catch (Exception e) {
logger.error("parse object exception!", e);
}
return null;
}
try {
return JSONArray.parseArray(json, clazz);
} catch (Exception e) {
logger.error("JSONArray.parseArray exception!",e);
/**
* json to list
*
* @param json json string
* @param clazz class
* @param <T> T
* @return list
*/
public static <T> List<T> toList(String json, Class<T> clazz) {
if (StringUtils.isEmpty(json)) {
return new ArrayList<>();
}
try {
return objectMapper.readValue(json, new TypeReference<List<T>>() {
});
} catch (Exception e) {
logger.error("JSONArray.parseArray exception!", e);
}
return new ArrayList<>();
}
return new ArrayList<>();
}
/**
* check json object valid
*
* @param json json
* @return true if valid
*/
public static boolean checkJsonValid(String json) {
if (StringUtils.isEmpty(json)) {
return false;
}
/**
* check json object valid
*
* @param json json
* @return true if valid
*/
public static boolean checkJsonValid(String json) {
try {
objectMapper.readTree(json);
return true;
} catch (IOException e) {
logger.error("check json object valid exception!", e);
}
if (StringUtils.isEmpty(json)) {
return false;
return false;
}
try {
objectMapper.readTree(json);
return true;
} catch (IOException e) {
logger.error("check json object valid exception!",e);
}
return false;
}
/**
* Method for finding a JSON Object field with specified name in this
* node or its child nodes, and returning value it has.
* If no matching field is found in this node or its descendants, returns null.
*
* @param jsonNode json node
* @param fieldName Name of field to look for
*
* @return Value of first matching node found, if any; null if none
*/
public static String findValue(JsonNode jsonNode, String fieldName) {
JsonNode node = jsonNode.findValue(fieldName);
if (node == null) {
return null;
}
/**
* Method for finding a JSON Object field with specified name in this
* node or its child nodes, and returning value it has.
* If no matching field is found in this node or its descendants, returns null.
*
* @param jsonNode json node
* @param fieldName Name of field to look for
* @return Value of first matching node found, if any; null if none
*/
public static String findValue(JsonNode jsonNode, String fieldName) {
JsonNode node = jsonNode.findValue(fieldName);
return node.toString();
}
/**
* json to map
*
* {@link #toMap(String, Class, Class)}
*
* @param json json
* @return json to map
*/
public static Map<String, String> toMap(String json) {
if (StringUtils.isEmpty(json)) {
return null;
}
if (node == null) {
return null;
}
try {
return JSON.parseObject(json, new TypeReference<HashMap<String, String>>(){});
} catch (Exception e) {
logger.error("json to map exception!",e);
return node.toString();
}
return null;
}
/**
*
* json to map
*
* @param json json
* @param classK classK
* @param classV classV
* @param <K> K
* @param <V> V
* @return to map
*/
public static <K, V> Map<K, V> toMap(String json, Class<K> classK, Class<V> classV) {
if (StringUtils.isEmpty(json)) {
return null;
/**
* json to map
* <p>
* {@link #toMap(String, Class, Class)}
*
* @param json json
* @return json to map
*/
public static Map<String, String> toMap(String json) {
if (StringUtils.isEmpty(json)) {
return null;
}
try {
return objectMapper.readValue(json, new TypeReference<Map<String, String>>() {});
} catch (Exception e) {
logger.error("json to map exception!", e);
}
return null;
}
try {
return JSON.parseObject(json, new TypeReference<HashMap<K, V>>() {});
} catch (Exception e) {
logger.error("json to map exception!",e);
/**
* json to map
*
* @param json json
* @param classK classK
* @param classV classV
* @param <K> K
* @param <V> V
* @return to map
*/
public static <K, V> Map<K, V> toMap(String json, Class<K> classK, Class<V> classV) {
if (StringUtils.isEmpty(json)) {
return null;
}
try {
return objectMapper.readValue(json, new TypeReference<Map<K, V>>() {
});
} catch (Exception e) {
logger.error("json to map exception!", e);
}
return null;
}
return null;
}
/**
* object to json string
* @param object object
* @return json string
*/
public static String toJsonString(Object object) {
try{
return JSON.toJSONString(object,false);
} catch (Exception e) {
throw new RuntimeException("Object json deserialization exception.", e);
/**
* object to json string
*
* @param object object
* @return json string
*/
public static String toJsonString(Object object) {
try {
return objectMapper.writeValueAsString(object);
} catch (Exception e) {
throw new RuntimeException("Object json deserialization exception.", e);
}
}
}
public static JSONObject parseObject(String text) {
try{
return JSON.parseObject(text);
} catch (Exception e) {
throw new RuntimeException("String json deserialization exception.", e);
public static ObjectNode parseObject(String text) {
try {
return (ObjectNode) objectMapper.readTree(text);
} catch (Exception e) {
throw new RuntimeException("String json deserialization exception.", e);
}
}
}
public static JSONArray parseArray(String text) {
try{
return JSON.parseArray(text);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
public static ArrayNode parseArray(String text) {
try {
return (ArrayNode) objectMapper.readTree(text);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
}
}
}
/**
* json serializer
*/
public static class JsonDataSerializer extends JsonSerializer<String> {
/**
* json serializer
*/
public static class JsonDataSerializer extends JsonSerializer<String> {
@Override
public void serialize(String value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeRawValue(value);
}
@Override
public void serialize(String value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeRawValue(value);
}
}
/**
* json data deserializer
*/
public static class JsonDataDeserializer extends JsonDeserializer<String> {
/**
* json data deserializer
*/
public static class JsonDataDeserializer extends JsonDeserializer<String> {
@Override
public String deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
JsonNode node = p.getCodec().readTree(p);
return node.toString();
}
@Override
public String deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
JsonNode node = p.getCodec().readTree(p);
return node.toString();
}
}
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java

@ -196,7 +196,7 @@ public class ParameterUtils {
property.setValue(val);
}
}
return JSON.toJSONString(globalParamList);
return JSONUtils.toJson(globalParamList);
}

36
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StreamUtils.java

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import java.util.Iterator;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class StreamUtils {
private StreamUtils() { }
public static <T> Stream<T> asStream(Iterator<T> sourceIterator) {
return asStream(sourceIterator, false);
}
public static <T> Stream<T> asStream(Iterator<T> sourceIterator, boolean parallel) {
Iterable<T> iterable = () -> sourceIterator;
return StreamSupport.stream(iterable.spliterator(), parallel);
}
}

14
dolphinscheduler-common/src/main/resources/common.properties

@ -18,7 +18,7 @@
# resource storage type : HDFS,S3,NONE
resource.storage.type=NONE
# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions"/dolphinscheduler" is recommended
# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions."/dolphinscheduler" is recommended
#resource.upload.path=/dolphinscheduler
# user data local directory path, please make sure the directory exists and have read write permissions
@ -42,16 +42,16 @@ resource.storage.type=NONE
# if resource.storage.type=HDFS, the user need to have permission to create directories under the HDFS root path
hdfs.root.user=hdfs
# if resource.storage.type=S3the value like: s3a://dolphinscheduler ; if resource.storage.type=HDFS, When namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir
# if resource.storage.type=S3,the value like: s3a://dolphinscheduler ; if resource.storage.type=HDFS, When namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir
fs.defaultFS=hdfs://mycluster:8020
# if resource.storage.type=S3s3 endpoint
# if resource.storage.type=S3,s3 endpoint
#fs.s3a.endpoint=http://192.168.199.91:9010
# if resource.storage.type=S3s3 access key
# if resource.storage.type=S3,s3 access key
#fs.s3a.access.key=A3DXS30FO22544RE
# if resource.storage.type=S3s3 secret key
# if resource.storage.type=S3,s3 secret key
#fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK
# if not use hadoop resourcemanager, please keep default value; if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty
@ -59,8 +59,10 @@ yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
# If resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname.
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
# job history status url when application number threshold is reached(default 10000,maybe it was set to 1000)
yarn.job.history.status.address=http://ark1:19888/ws/v1/history/mapreduce/jobs/%s
# system env path
#dolphinscheduler.env.path=env/dolphinscheduler_env.sh
development.state=false
kerberos.expire.time=7
kerberos.expire.time=7

91
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/HttpParametersTest.java

@ -0,0 +1,91 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.task;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.common.enums.HttpCheckCondition;
import org.apache.dolphinscheduler.common.enums.HttpMethod;
import org.apache.dolphinscheduler.common.process.HttpProperty;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.http.HttpParameters;
import org.junit.Assert;
import org.junit.Test;
import com.alibaba.fastjson.JSON;
/**
* http parameter
*/
public class HttpParametersTest {
@Test
public void testGenerator(){
String paramData = "{\"localParams\":[],\"httpParams\":[],\"url\":\"https://www.baidu.com/\"," +
"\"httpMethod\":\"GET\",\"httpCheckCondition\":\"STATUS_CODE_DEFAULT\",\"condition\":\"\",\"connectTimeout\":\"10000\",\"socketTimeout\":\"10000\"}";
HttpParameters httpParameters = JSON.parseObject(paramData, HttpParameters.class);
Assert.assertEquals(10000,httpParameters.getConnectTimeout() );
Assert.assertEquals(10000,httpParameters.getSocketTimeout());
Assert.assertEquals("https://www.baidu.com/",httpParameters.getUrl());
Assert.assertEquals(HttpMethod.GET,httpParameters.getHttpMethod());
Assert.assertEquals(HttpCheckCondition.STATUS_CODE_DEFAULT,httpParameters.getHttpCheckCondition());
Assert.assertEquals("",httpParameters.getCondition());
}
@Test
public void testCheckParameters(){
String paramData = "{\"localParams\":[],\"httpParams\":[],\"url\":\"https://www.baidu.com/\"," +
"\"httpMethod\":\"GET\",\"httpCheckCondition\":\"STATUS_CODE_DEFAULT\",\"condition\":\"\",\"connectTimeout\":\"10000\",\"socketTimeout\":\"10000\"}";
HttpParameters httpParameters = JSON.parseObject(paramData, HttpParameters.class);
Assert.assertTrue( httpParameters.checkParameters());
Assert.assertEquals(10000,httpParameters.getConnectTimeout() );
Assert.assertEquals(10000,httpParameters.getSocketTimeout());
Assert.assertEquals("https://www.baidu.com/",httpParameters.getUrl());
Assert.assertEquals(HttpMethod.GET,httpParameters.getHttpMethod());
Assert.assertEquals(HttpCheckCondition.STATUS_CODE_DEFAULT,httpParameters.getHttpCheckCondition());
Assert.assertEquals("",httpParameters.getCondition());
}
@Test
public void testCheckValues() {
String paramData = "{\"localParams\":[],\"httpParams\":[],\"url\":\"https://www.baidu.com/\"," +
"\"httpMethod\":\"GET\",\"httpCheckCondition\":\"STATUS_CODE_DEFAULT\",\"condition\":\"\",\"connectTimeout\":\"10000\",\"socketTimeout\":\"10000\"}";
HttpParameters httpParameters = JSON.parseObject(paramData, HttpParameters.class);
Assert.assertTrue( httpParameters.checkParameters());
Assert.assertEquals(10000,httpParameters.getConnectTimeout() );
Assert.assertEquals(10000,httpParameters.getSocketTimeout());
Assert.assertEquals("https://www.baidu.com/",httpParameters.getUrl());
Assert.assertEquals(HttpMethod.GET,httpParameters.getHttpMethod());
Assert.assertEquals(HttpCheckCondition.STATUS_CODE_DEFAULT,httpParameters.getHttpCheckCondition());
Assert.assertEquals("",httpParameters.getCondition());
Assert.assertEquals(0,httpParameters.getLocalParametersMap().size());
Assert.assertEquals(0,httpParameters.getResourceFilesList().size());
}
}

6
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java

@ -190,6 +190,12 @@ public class HadoopUtilsTest {
logger.info(application_1516778421218_0042);
}
@Test
public void getJobHistoryUrl(){
String application_1516778421218_0042 = hadoopUtils.getJobHistoryUrl("application_1529051418016_0167");
logger.info(application_1516778421218_0042);
}
@Test
public void catFileWithLimitTest() {
List<String> stringList = new ArrayList<>();

36
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java

@ -17,6 +17,8 @@
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.Direct;
import org.apache.dolphinscheduler.common.process.Property;
@ -101,9 +103,6 @@ public class JSONUtilsTest {
@Test
public void testParseObject() {
Assert.assertEquals("{\"foo\":\"bar\"}", JSONUtils.parseObject(
"{\n" + "\"foo\": \"bar\",\n" + "}", String.class));
Assert.assertNull(JSONUtils.parseObject("", null));
Assert.assertNull(JSONUtils.parseObject("foo", String.class));
}
@ -134,15 +133,19 @@ public class JSONUtilsTest {
map.put("foo","bar");
Assert.assertTrue(map.equals(JSONUtils.toMap(
"{\n" + "\"foo\": \"bar\",\n" + "}")));
"{\n" + "\"foo\": \"bar\"\n" + "}")));
Assert.assertFalse(map.equals(JSONUtils.toMap(
"{\n" + "\"bar\": \"foo\",\n" + "}")));
"{\n" + "\"bar\": \"foo\"\n" + "}")));
Assert.assertNull(JSONUtils.toMap("3"));
Assert.assertNull(JSONUtils.toMap(null));
Assert.assertNull(JSONUtils.toMap("3", null, null));
Assert.assertNull(JSONUtils.toMap(null, null, null));
String str = "{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}";
Map<String, String> m = JSONUtils.toMap(str);
Assert.assertNotNull(m);
}
@Test
@ -155,4 +158,27 @@ public class JSONUtilsTest {
Assert.assertEquals(String.valueOf((Object) null),
JSONUtils.toJsonString(null));
}
@Test
public void parseObject() {
String str = "{\"color\":\"yellow\",\"type\":\"renault\"}";
ObjectNode node = JSONUtils.parseObject(str);
Assert.assertEquals("yellow", node.path("color").asText());
node.put("price", 100);
Assert.assertEquals(100, node.path("price").asInt());
node.put("color", "red");
Assert.assertEquals("red", node.path("color").asText());
}
@Test
public void parseArray() {
String str = "[{\"color\":\"yellow\",\"type\":\"renault\"}]";
ArrayNode node = JSONUtils.parseArray(str);
Assert.assertEquals("yellow", node.path(0).path("color").asText());
}
}

8
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java

@ -91,13 +91,13 @@ public class ParameterUtilsTest {
globalParamList.add(property);
String result2 = ParameterUtils.curingGlobalParams(null,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime);
Assert.assertEquals(result2, JSON.toJSONString(globalParamList));
Assert.assertEquals(result2, JSONUtils.toJson(globalParamList));
String result3 = ParameterUtils.curingGlobalParams(globalParamMap,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,null);
Assert.assertEquals(result3, JSON.toJSONString(globalParamList));
Assert.assertEquals(result3, JSONUtils.toJson(globalParamList));
String result4 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime);
Assert.assertEquals(result4, JSON.toJSONString(globalParamList));
Assert.assertEquals(result4, JSONUtils.toJson(globalParamList));
//test var $ startsWith
globalParamMap.put("bizDate","${system.biz.date}");
@ -113,7 +113,7 @@ public class ParameterUtilsTest {
globalParamList.add(property4);
String result5 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime);
Assert.assertEquals(result5,JSONUtils.toJsonString(globalParamList));
Assert.assertEquals(result5, JSONUtils.toJson(globalParamList));
}
/**

39
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StreamUtilsTest.java

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
public class StreamUtilsTest {
@Test
public void asStream() {
List<String> list = Arrays.asList("a", "b", "c");
List<String> ret = StreamUtils.asStream(list.iterator())
.filter(item -> item.equals("a"))
.collect(Collectors.toList());
Assert.assertEquals("a", ret.get(0));
}
}

101
dolphinscheduler-microbench/pom.xml

@ -0,0 +1,101 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dolphinscheduler</artifactId>
<groupId>org.apache.dolphinscheduler</groupId>
<version>1.2.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-microbench</artifactId>
<packaging>jar</packaging>
<name>${project.artifactId}</name>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jmh.version>1.21</jmh.version>
<javac.target>1.8</javac.target>
<uberjar.name>benchmarks</uberjar.name>
</properties>
<dependencies>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<version>${jmh.version}</version>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<version>${jmh.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<compilerVersion>${javac.target}</compilerVersion>
<source>${javac.target}</source>
<target>${javac.target}</target>
<useIncrementalCompilation>false</useIncrementalCompilation>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
<configuration>
<archive>
<manifest>
<mainClass>org.openjdk.jmh.Main</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
</plugins>
</build>
</project>

123
dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.microbench.base;
import org.junit.Test;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.results.format.ResultFormatType;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.options.ChainedOptionsBuilder;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
/**
* BaseBenchMark
* If you need to test jmh, please extend him first
*/
@Warmup(iterations = AbstractBaseBenchmark.DEFAULT_WARMUP_ITERATIONS)
@Measurement(iterations = AbstractBaseBenchmark.DEFAULT_MEASURE_ITERATIONS)
@State(Scope.Thread)
@Fork(AbstractBaseBenchmark.DEFAULT_FORKS)
public abstract class AbstractBaseBenchmark {
static final int DEFAULT_WARMUP_ITERATIONS = 10;
static final int DEFAULT_MEASURE_ITERATIONS = 10;
static final int DEFAULT_FORKS = 2;
private static Logger logger = LoggerFactory.getLogger(AbstractBaseBenchmark.class);
private ChainedOptionsBuilder newOptionsBuilder() {
String className = getClass().getSimpleName();
ChainedOptionsBuilder optBuilder = new OptionsBuilder()
// set benchmark ClassName
.include(className);
if (getMeasureIterations() > 0) {
optBuilder.warmupIterations(getMeasureIterations());
}
if (getMeasureIterations() > 0) {
optBuilder.measurementIterations(getMeasureIterations());
}
if (getForks() > 0) {
optBuilder.forks(getForks());
}
String output = getReportDir();
if (output != null) {
boolean writeFileStatus;
String filePath = getReportDir() + className + ".json";
File file = new File(filePath);
if (file.exists()) {
writeFileStatus = file.delete();
} else {
writeFileStatus = file.getParentFile().mkdirs();
try {
writeFileStatus = file.createNewFile();
} catch (IOException e) {
logger.warn("jmh test create file error" + e);
}
}
if (writeFileStatus) {
optBuilder.resultFormat(ResultFormatType.JSON)
.result(filePath);
}
}
return optBuilder;
}
@Test
public void run() throws Exception {
new Runner(newOptionsBuilder().build()).run();
}
private int getWarmupIterations() {
String value = System.getProperty("warmupIterations");
return null != value ? Integer.parseInt(value) : -1;
}
private int getMeasureIterations() {
String value = System.getProperty("measureIterations");
return null != value ? Integer.parseInt(value) : -1;
}
private static String getReportDir() {
return System.getProperty("perfReportDir");
}
private static int getForks() {
String value = System.getProperty("forkCount");
return null != value ? Integer.parseInt(value) : -1;
}
}

112
dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/EnumBenchMark.java

@ -0,0 +1,112 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.microbench.common;
import org.apache.dolphinscheduler.microbench.base.AbstractBaseBenchmark;
import org.openjdk.jmh.annotations.*;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
*Enum values JMH test
*/
@Warmup(iterations = 2, time = 1)
@Measurement(iterations = 4, time = 1)
@State(Scope.Benchmark)
public class EnumBenchMark extends AbstractBaseBenchmark {
@Benchmark
public boolean simpleTest(){
return Boolean.TRUE;
}
@Param({"101", "108", "103", "104", "105", "103"})
private int testNum;
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void enumValuesTest() {
TestTypeEnum.oldGetNameByType(testNum);
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void enumStaticMapTest() {
TestTypeEnum.newGetNameByType(testNum);
}
public enum TestTypeEnum {
TYPE_101(101, "TYPE101"),
TYPE_102(102, "TYPE102"),
TYPE_103(103, "TYPE103"),
TYPE_104(104, "TYPE104"),
TYPE_105(105, "TYPE105"),
TYPE_106(106, "TYPE106"),
TYPE_107(107, "TYPE107"),
TYPE_108(108, "TYPE108");
private int code;
private String name;
public int getCode() {
return code;
}
public String getName() {
return name;
}
TestTypeEnum(int code, String name) {
this.code = code;
this.name = name;
}
private static final Map<Integer, TestTypeEnum> TEST_TYPE_MAP = new HashMap<>();
static {
for (TestTypeEnum testTypeEnum : TestTypeEnum.values()) {
TEST_TYPE_MAP.put(testTypeEnum.code,testTypeEnum);
}
}
public static void newGetNameByType(int code) {
if (TEST_TYPE_MAP.containsKey(code)) {
TEST_TYPE_MAP.get(code);
return;
}
throw new IllegalArgumentException("invalid code : " + code);
}
public static void oldGetNameByType(int code) {
for (TestTypeEnum testTypeEnum : TestTypeEnum.values()) {
if (testTypeEnum.getCode() == code) {
return;
}
}
throw new IllegalArgumentException("invalid code : " + code);
}
}
}

32
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.master.consumer;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.SqoopJobType;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.model.TaskNode;
@ -258,29 +259,32 @@ public class TaskPriorityQueueConsumer extends Thread{
/**
* set datax task relation
* set sqoop task relation
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @param taskNode taskNode
*/
private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) {
SqoopParameters sqoopParameters = JSONObject.parseObject(taskNode.getParams(), SqoopParameters.class);
SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class);
// sqoop job type is template set task relation
if (sqoopParameters.getJobType().equals(SqoopJobType.TEMPLATE.getDescp())) {
SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class);
DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource());
DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource());
DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource());
DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource());
if (dataSource != null){
sqoopTaskExecutionContext.setDataSourceId(dataSource.getId());
sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode());
sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
}
if (dataSource != null){
sqoopTaskExecutionContext.setDataSourceId(dataSource.getId());
sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode());
sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
}
if (dataTarget != null){
sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId());
sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode());
sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
if (dataTarget != null){
sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId());
sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode());
sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
}
}
}

18
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java

@ -28,10 +28,12 @@ import java.util.List;
/**
* spark args utils
* flink args utils
*/
public class FlinkArgsUtils {
private static final String LOCAL_DEPLOY_MODE = "local";
private static final String FLINK_VERSION_BEFORE_1_10 = "<1.10";
/**
* build args
* @param param flink parameters
@ -44,7 +46,6 @@ public class FlinkArgsUtils {
String tmpDeployMode = param.getDeployMode();
if (StringUtils.isNotEmpty(tmpDeployMode)) {
deployMode = tmpDeployMode;
}
if (!LOCAL_DEPLOY_MODE.equals(deployMode)) {
args.add(Constants.FLINK_RUN_MODE); //-m
@ -63,12 +64,15 @@ public class FlinkArgsUtils {
args.add(appName);
}
int taskManager = param.getTaskManager();
if (taskManager != 0) { //-yn
args.add(Constants.FLINK_TASK_MANAGE);
args.add(String.format("%d", taskManager));
// judgy flink version,from flink1.10,the parameter -yn removed
String flinkVersion = param.getFlinkVersion();
if (FLINK_VERSION_BEFORE_1_10.equals(flinkVersion)) {
int taskManager = param.getTaskManager();
if (taskManager != 0) { //-yn
args.add(Constants.FLINK_TASK_MANAGE);
args.add(String.format("%d", taskManager));
}
}
String jobManagerMemory = param.getJobManagerMemory();
if (StringUtils.isNotEmpty(jobManagerMemory)) {
args.add(Constants.FLINK_JOB_MANAGE_MEM);

9
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java

@ -66,13 +66,6 @@ public class HttpTask extends AbstractTask {
*/
private HttpParameters httpParameters;
/**
* Convert mill seconds to second unit
*/
protected static final int MAX_CONNECTION_MILLISECONDS = 60000;
/**
* application json
*/
@ -303,7 +296,7 @@ public class HttpTask extends AbstractTask {
* @return RequestConfig
*/
private RequestConfig requestConfig() {
return RequestConfig.custom().setSocketTimeout(MAX_CONNECTION_MILLISECONDS).setConnectTimeout(MAX_CONNECTION_MILLISECONDS).build();
return RequestConfig.custom().setSocketTimeout(httpParameters.getSocketTimeout()).setConnectTimeout(httpParameters.getConnectTimeout()).build();
}
/**

39
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java

@ -16,10 +16,17 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* common script generator
*/
@ -32,6 +39,38 @@ public class CommonGenerator {
try{
result.append("sqoop ")
.append(sqoopParameters.getModelType());
//set sqoop job name
result.append(" -D mapred.job.name")
.append(Constants.EQUAL_SIGN)
.append(sqoopParameters.getJobName());
//set hadoop custom param
List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams();
if (CollectionUtils.isNotEmpty(hadoopCustomParams)) {
for (Property hadoopCustomParam : hadoopCustomParams) {
String hadoopCustomParamStr = " -D " + hadoopCustomParam.getProp()
+ Constants.EQUAL_SIGN + hadoopCustomParam.getValue();
if (StringUtils.isNotEmpty(hadoopCustomParamStr)) {
result.append(hadoopCustomParamStr);
}
}
}
//set sqoop advanced custom param
List<Property> sqoopAdvancedParams = sqoopParameters.getSqoopAdvancedParams();
if (CollectionUtils.isNotEmpty(sqoopAdvancedParams)) {
for (Property sqoopAdvancedParam : sqoopAdvancedParams) {
String sqoopAdvancedParamStr = " " + sqoopAdvancedParam.getProp()
+ " " + sqoopAdvancedParam.getValue();
if (StringUtils.isNotEmpty(sqoopAdvancedParamStr)) {
result.append(sqoopAdvancedParamStr);
}
}
}
if(sqoopParameters.getConcurrency() >0){
result.append(" -m ")
.append(sqoopParameters.getConcurrency());

22
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.enums.SqoopJobType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HdfsSourceGenerator;
@ -62,14 +63,23 @@ public class SqoopJobGenerator {
* @return
*/
public String generateSqoopJob(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext){
createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType());
if(sourceGenerator == null || targetGenerator == null){
return null;
String sqoopScripts = "";
if (SqoopJobType.TEMPLATE.getDescp().equals(sqoopParameters.getJobType())) {
createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType());
if(sourceGenerator == null || targetGenerator == null){
throw new RuntimeException("sqoop task source type or target type is null");
}
sqoopScripts = commonGenerator.generate(sqoopParameters)
+ sourceGenerator.generate(sqoopParameters,taskExecutionContext)
+ targetGenerator.generate(sqoopParameters,taskExecutionContext);
} else if (SqoopJobType.CUSTOM.getDescp().equals(sqoopParameters.getJobType())) {
sqoopScripts = sqoopParameters.getCustomShell().replaceAll("\\r\\n", "\n");
}
return commonGenerator.generate(sqoopParameters)
+ sourceGenerator.generate(sqoopParameters,taskExecutionContext)
+ targetGenerator.generate(sqoopParameters,taskExecutionContext);
return sqoopScripts;
}
/**

14
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java

@ -77,19 +77,19 @@ public class MysqlSourceGenerator implements ISourceGenerator {
}else{
srcQuery += " WHERE $CONDITIONS";
}
result.append(" --query \'"+srcQuery+"\'");
result.append(" --query \'").append(srcQuery).append("\'");
}
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();
if(mapColumnHive != null && !mapColumnHive.isEmpty()){
String columnMap = "";
StringBuilder columnMap = new StringBuilder();
for(Property item:mapColumnHive){
columnMap = item.getProp()+"="+ item.getValue()+",";
columnMap.append(item.getProp()).append("=").append(item.getValue()).append(",");
}
if(StringUtils.isNotEmpty(columnMap)){
if(StringUtils.isNotEmpty(columnMap.toString())){
result.append(" --map-column-hive ")
.append(columnMap.substring(0,columnMap.length()-1));
}
@ -98,12 +98,12 @@ public class MysqlSourceGenerator implements ISourceGenerator {
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava();
if(mapColumnJava != null && !mapColumnJava.isEmpty()){
String columnMap = "";
StringBuilder columnMap = new StringBuilder();
for(Property item:mapColumnJava){
columnMap = item.getProp()+"="+ item.getValue()+",";
columnMap.append(item.getProp()).append("=").append(item.getValue()).append(",");
}
if(StringUtils.isNotEmpty(columnMap)){
if(StringUtils.isNotEmpty(columnMap.toString())){
result.append(" --map-column-java ")
.append(columnMap.substring(0,columnMap.length()-1));
}

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java

@ -78,7 +78,7 @@ public class MysqlTargetGenerator implements ITargetGenerator {
result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'");
}
if(targetMysqlParameter.isUpdate()
if(targetMysqlParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){
result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey())

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java

@ -49,6 +49,7 @@ public class FlinkArgsUtilsTest {
public String mainArgs = "testArgs";
public String queue = "queue1";
public String others = "--input file:///home";
public String flinkVersion = "<1.10";
@Before
@ -79,6 +80,7 @@ public class FlinkArgsUtilsTest {
param.setMainArgs(mainArgs);
param.setQueue(queue);
param.setOthers(others);
param.setFlinkVersion(flinkVersion);
//Invoke buildArgs
List<String> result = FlinkArgsUtils.buildArgs(param);
@ -128,4 +130,4 @@ public class FlinkArgsUtilsTest {
assertEquals(5, result.size());
}
}
}

208
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTaskTest.java

@ -0,0 +1,208 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.task.http;
import static org.apache.dolphinscheduler.common.enums.CommandType.*;
import java.io.IOException;
import java.util.Date;
import org.apache.dolphinscheduler.common.enums.HttpCheckCondition;
import org.apache.dolphinscheduler.common.enums.HttpMethod;
import org.apache.dolphinscheduler.common.task.http.HttpParameters;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor;
import org.apache.dolphinscheduler.server.worker.task.TaskProps;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import com.alibaba.fastjson.JSON;
@RunWith(PowerMockRunner.class)
@PrepareForTest(OSUtils.class)
@PowerMockIgnore({"javax.management.*","javax.net.ssl.*"})
public class HttpTaskTest {
private static final Logger logger = LoggerFactory.getLogger(HttpTaskTest.class);
private HttpTask httpTask;
private ProcessService processService;
private ShellCommandExecutor shellCommandExecutor;
private ApplicationContext applicationContext;
private TaskExecutionContext taskExecutionContext;
@Before
public void before() throws Exception {
taskExecutionContext = new TaskExecutionContext();
PowerMockito.mockStatic(OSUtils.class);
processService = PowerMockito.mock(ProcessService.class);
shellCommandExecutor = PowerMockito.mock(ShellCommandExecutor.class);
applicationContext = PowerMockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext);
PowerMockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService);
TaskProps props = new TaskProps();
props.setExecutePath("/tmp");
props.setTaskAppId(String.valueOf(System.currentTimeMillis()));
props.setTaskInstanceId(1);
props.setTenantCode("1");
props.setEnvFile(".dolphinscheduler_env.sh");
props.setTaskStartTime(new Date());
props.setTaskTimeout(0);
props.setTaskParams(
"{\"localParams\":[],\"httpParams\":[],\"url\":\"https://github.com/\",\"httpMethod\":\"GET\"," +
"\"httpCheckCondition\":\"STATUS_CODE_DEFAULT\",\"condition\":\"https://github.com/\"," +
"\"connectTimeout\":\"1000\",\"socketTimeout\":\"1000\"}");
taskExecutionContext = Mockito.mock(TaskExecutionContext.class);
Mockito.when(taskExecutionContext.getTaskParams()).thenReturn(props.getTaskParams());
Mockito.when(taskExecutionContext.getExecutePath()).thenReturn("/tmp");
Mockito.when(taskExecutionContext.getTaskAppId()).thenReturn("1");
Mockito.when(taskExecutionContext.getTenantCode()).thenReturn("root");
Mockito.when(taskExecutionContext.getStartTime()).thenReturn(new Date());
Mockito.when(taskExecutionContext.getTaskTimeout()).thenReturn(10000);
Mockito.when(taskExecutionContext.getLogPath()).thenReturn("/tmp/dx");
httpTask = new HttpTask(taskExecutionContext, logger);
httpTask.init();
}
@Test
public void testGetParameters() {
Assert.assertNotNull(httpTask.getParameters());
}
@Test
public void testCheckParameters() {
Assert.assertTrue(httpTask.getParameters().checkParameters());
}
@Test
public void testGenerator(){
String paramJson = "{\"localParams\":[],\"httpParams\":[],\"url\":\"https://github.com/\"," +
"\"httpMethod\":\"GET\",\"httpCheckCondition\":\"STATUS_CODE_DEFAULT\",\"condition\":\"\",\"connectTimeout\":\"10000\",\"socketTimeout\":\"10000\"}";
HttpParameters httpParameters = JSON.parseObject(paramJson, HttpParameters.class);
Assert.assertEquals(10000,httpParameters.getConnectTimeout() );
Assert.assertEquals(10000,httpParameters.getSocketTimeout());
Assert.assertEquals("https://github.com/",httpParameters.getUrl());
Assert.assertEquals(HttpMethod.GET,httpParameters.getHttpMethod());
Assert.assertEquals(HttpCheckCondition.STATUS_CODE_DEFAULT,httpParameters.getHttpCheckCondition());
Assert.assertEquals("",httpParameters.getCondition());
}
@Test
public void testHandle(){
boolean flag = true ;
try {
httpTask.handle();
} catch (Exception e) {
flag = false ;
e.printStackTrace();
}
Assert.assertTrue(flag);
}
@Test
public void testSendRequest(){
CloseableHttpClient client = httpTask.createHttpClient();
String statusCode = null;
String body = null;
try {
CloseableHttpResponse response = httpTask.sendRequest(client) ;
statusCode = String.valueOf(httpTask.getStatusCode(response));
body = httpTask.getResponseBody(response);
int exitStatusCode = httpTask.validResponse(body, statusCode);
Assert.assertNotEquals(-1,exitStatusCode);
} catch (IOException e) {
e.printStackTrace();
};
}
@Test
public void testValidResponse(){
String body = "body";
String statusCode = "200" ;
int exitStatusCode = httpTask.validResponse(body,statusCode);
Assert.assertNotEquals(-1,exitStatusCode);
}
@Test
public void testAppendMessage(){
httpTask.appendMessage("message");
Assert.assertEquals("message",httpTask.getOutput());
}
@Test
public void testCreateHttpClient(){
Assert.assertNotNull(httpTask.createHttpClient());
}
@Test
public void testCreateRequestBuilder(){
RequestBuilder requestBuilder = httpTask.createRequestBuilder();
Assert.assertEquals(RequestBuilder.get().getMethod(),requestBuilder.getMethod());
}
private ProcessInstance getProcessInstance() {
ProcessInstance processInstance = new ProcessInstance();
processInstance.setCommandType(START_PROCESS);
processInstance.setScheduleTime(new Date());
return processInstance;
}
}

126
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java

@ -17,11 +17,9 @@
package org.apache.dolphinscheduler.server.worker.task.sqoop;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.TaskProps;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
@ -35,7 +33,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import java.util.*;
import java.util.Date;
/**
* sqoop task test
@ -52,64 +50,98 @@ public class SqoopTaskTest {
@Before
public void before() throws Exception{
processService = Mockito.mock(ProcessService.class);
Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource());
applicationContext = Mockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext);
Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService);
TaskProps props = new TaskProps();
props.setTaskAppId(String.valueOf(System.currentTimeMillis()));
props.setTenantCode("1");
props.setEnvFile(".dolphinscheduler_env.sh");
props.setTaskStartTime(new Date());
props.setTaskTimeout(0);
props.setTaskParams("{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}");
sqoopTask = new SqoopTask(new TaskExecutionContext(),logger);
TaskExecutionContext taskExecutionContext = new TaskExecutionContext();
taskExecutionContext.setTaskAppId(String.valueOf(System.currentTimeMillis()));
taskExecutionContext.setTenantCode("1");
taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh");
taskExecutionContext.setStartTime(new Date());
taskExecutionContext.setTaskTimeout(0);
taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," +
"\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," +
"\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," +
"\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[]," +
"\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" +
",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," +
"\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," +
"\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}");
sqoopTask = new SqoopTask(taskExecutionContext,logger);
//test sqoop tash init method
sqoopTask.init();
}
/**
* test SqoopJobGenerator
*/
@Test
public void testGenerator(){
String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters1 = JSON.parseObject(data1,SqoopParameters.class);
TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext();
//sqoop TEMPLATE job
//import mysql to HDFS with hadoo
String mysqlToHdfs = "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}],\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}]," +
"\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters mysqlToHdfsParams = JSON.parseObject(mysqlToHdfs,SqoopParameters.class);
SqoopJobGenerator generator = new SqoopJobGenerator();
String script = generator.generateSqoopJob(sqoopParameters1,new TaskExecutionContext());
String expected = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'";
Assert.assertEquals(expected, script);
String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters2 = JSON.parseObject(data2,SqoopParameters.class);
String script2 = generator.generateSqoopJob(sqoopParameters2,new TaskExecutionContext());
String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
Assert.assertEquals(expected2, script2);
String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters3 = JSON.parseObject(data3,SqoopParameters.class);
String script3 = generator.generateSqoopJob(sqoopParameters3,new TaskExecutionContext());
String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'";
Assert.assertEquals(expected3, script3);
String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters4 = JSON.parseObject(data4,SqoopParameters.class);
String script4 = generator.generateSqoopJob(sqoopParameters4,new TaskExecutionContext());
String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";
Assert.assertEquals(expected4, script4);
String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams,mysqlTaskExecutionContext);
String mysqlToHdfsExpected = "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'";
Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript);
//export hdfs to mysql using update mode
String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," +
"\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," +
"\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters hdfsToMysqlParams = JSON.parseObject(hdfsToMysql,SqoopParameters.class);
String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams,mysqlTaskExecutionContext);
String hdfsToMysqlScriptExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript);
//export hive to mysql
String hiveToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters hiveToMysqlParams = JSON.parseObject(hiveToMysql,SqoopParameters.class);
String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams,mysqlTaskExecutionContext);
String hiveToMysqlExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'";
Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript);
//import mysql to hive
String mysqlToHive = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
SqoopParameters mysqlToHiveParams = JSON.parseObject(mysqlToHive,SqoopParameters.class);
String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams,mysqlTaskExecutionContext);
String mysqlToHiveExpected = "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";
Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript);
//sqoop CUSTOM job
String sqoopCustomString = "{\"jobType\":\"CUSTOM\",\"localParams\":[],\"customShell\":\"sqoop import\"}";
SqoopParameters sqoopCustomParams = JSON.parseObject(sqoopCustomString, SqoopParameters.class);
String sqoopCustomScript = generator.generateSqoopJob(sqoopCustomParams, new TaskExecutionContext());
String sqoopCustomExpected = "sqoop import";
Assert.assertEquals(sqoopCustomExpected, sqoopCustomScript);
}
private DataSource getDataSource() {
DataSource dataSource = new DataSource();
dataSource.setType(DbType.MYSQL);
dataSource.setConnectionParams(
"{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}");
dataSource.setUserId(1);
return dataSource;
/**
* get taskExecutionContext include mysql
* @return TaskExecutionContext
*/
private TaskExecutionContext getMysqlTaskExecutionContext() {
TaskExecutionContext taskExecutionContext = new TaskExecutionContext();
SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext();
String mysqlSourceConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}";
String mysqlTargetConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}";
sqoopTaskExecutionContext.setDataSourceId(2);
sqoopTaskExecutionContext.setDataTargetId(2);
sqoopTaskExecutionContext.setSourcetype(0);
sqoopTaskExecutionContext.setTargetConnectionParams(mysqlTargetConnectionParams);
sqoopTaskExecutionContext.setSourceConnectionParams(mysqlSourceConnectionParams);
sqoopTaskExecutionContext.setTargetType(0);
taskExecutionContext.setSqoopTaskExecutionContext(sqoopTaskExecutionContext);
return taskExecutionContext;
}
@Test

2
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue

@ -131,7 +131,7 @@
},
created () {
let supportType = this.supportType || []
this.typeList = _.cloneDeep(this.store.state.dag.dsTypeListS)
this.typeList = this.data.typeList || _.cloneDeep(this.store.state.dag.dsTypeListS)
// Have a specified data source
if (supportType.length) {
let is = (type) => {

67
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue

@ -62,56 +62,73 @@
</x-radio-group>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Flink Version')}}</div>
<div slot="content">
<x-select
style="width: 100px;"
v-model="flinkVersion"
:disabled="isDetails">
<x-option
v-for="version in flinkVersionList"
:key="version.code"
:value="version.code"
:label="version.code">
</x-option>
</x-select>
</div>
</m-list-box>
<div class="list-box-4p">
<div class="clearfix list">
<span class="sp1">{{$t('slot')}}</span>
<span class="sp1" style="word-break:break-all">{{$t('jobManagerMemory')}}</span>
<span class="sp2">
<x-input
:disabled="isDetails"
type="input"
v-model="slot"
:placeholder="$t('Please enter driver core number')"
style="width: 200px;"
autocomplete="off">
:disabled="isDetails"
type="input"
v-model="jobManagerMemory"
:placeholder="$t('Please enter the number of Executor')"
style="width: 200px;"
autocomplete="off">
</x-input>
</span>
<span class="sp1 sp3">{{$t('taskManager')}}</span>
<span class="sp1 sp3">{{$t('taskManagerMemory')}}</span>
<span class="sp2">
<x-input
:disabled="isDetails"
type="input"
v-model="taskManager"
:placeholder="$t('Please enter driver memory use')"
style="width: 186px;"
autocomplete="off">
:disabled="isDetails"
type="input"
v-model="taskManagerMemory"
:placeholder="$t('Please enter the Executor memory')"
style="width: 186px;"
autocomplete="off">
</x-input>
</span>
</div>
<div class="clearfix list">
<span class="sp1" style="word-break:break-all">{{$t('jobManagerMemory')}}</span>
<span class="sp1">{{$t('slot')}}</span>
<span class="sp2">
<x-input
:disabled="isDetails"
type="input"
v-model="jobManagerMemory"
:placeholder="$t('Please enter the number of Executor')"
v-model="slot"
:placeholder="$t('Please enter driver core number')"
style="width: 200px;"
autocomplete="off">
</x-input>
</span>
<span class="sp1 sp3">{{$t('taskManagerMemory')}}</span>
<div v-if="flinkVersion !== '>=1.10'">
<span class="sp1 sp3">{{$t('taskManager')}}</span>
<span class="sp2">
<x-input
:disabled="isDetails"
type="input"
v-model="taskManagerMemory"
:placeholder="$t('Please enter the Executor memory')"
v-model="taskManager"
:placeholder="$t('Please enter driver memory use')"
style="width: 186px;"
autocomplete="off">
</x-input>
</span>
</div>
</div>
</div>
<m-list-box>
<div slot="text">{{$t('Command-line parameters')}}</div>
@ -207,6 +224,11 @@
programType: 'SCALA',
// Program type(List)
programTypeList: [{ code: 'JAVA' }, { code: 'SCALA' }, { code: 'PYTHON' }],
flinkVersion:'<1.10',
// Flink Versions(List)
flinkVersionList: [{ code: '<1.10' }, { code: '>=1.10' }],
normalizer(node) {
return {
label: node.name
@ -324,6 +346,7 @@
return {id: v}
}),
localParams: this.localParams,
flinkVersion: this.flinkVersion,
slot: this.slot,
taskManager: this.taskManager,
jobManagerMemory: this.jobManagerMemory,
@ -485,11 +508,13 @@
this.mainJar = o.params.mainJar.id || ''
}
this.deployMode = o.params.deployMode || ''
this.flinkVersion = o.params.flinkVersion || '<1.10'
this.slot = o.params.slot || 1
this.taskManager = o.params.taskManager || '2'
this.jobManagerMemory = o.params.jobManagerMemory || '1G'
this.taskManagerMemory = o.params.taskManagerMemory || '2G'
this.mainArgs = o.params.mainArgs || ''
this.others = o.params.others
this.programType = o.params.programType || 'SCALA'

58
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue

@ -85,6 +85,39 @@
</x-input>
</div>
</m-list-box>
<m-list-box >
<div slot="text">{{$t('Timeout Settings')}}</div>
<div slot="content">
<label class="label-box">
<div style="padding-top: 5px;">
<x-switch
v-model="timeoutSettings"
:disabled="isDetails"
></x-switch>
</div>
</label>
</div>
</m-list-box>
<div class="clearfix list" v-if = "timeoutSettings" >
<div class="text-box">
<span>{{$t('Connect Timeout')}}</span>
</div>
<div class="cont-box">
<span class="label-box" style="width: 193px;display: inline-block;" >
<x-input v-model='connectTimeout' maxlength="7" />
</span>
<span>{{$t('ms')}}</span>
<span class="text-b">{{$t('Socket Timeout')}}</span>
<span class="label-box" style="width: 193px;display: inline-block;" >
<x-input v-model='socketTimeout' maxlength="7" />
</span>
<span>{{$t('ms')}}</span>
</div>
</div>
<m-list-box>
<div slot="text">{{$t('Custom Parameters')}}</div>
<div slot="content">
@ -110,6 +143,10 @@
name: 'http',
data () {
return {
timeoutSettings: false,
connectTimeout : 60000 ,
socketTimeout : 60000 ,
url: '',
condition: '',
localParams: [],
@ -152,6 +189,14 @@
if (!this.$refs.refHttpParams._verifValue()) {
return false
}
if (!_.isNumber(parseInt(this.socketTimeout))) {
this.$message.warning(`${i18n.$t('Socket Timeout be a positive integer')}`)
return false
}
if (!_.isNumber(parseInt(this.connectTimeout))) {
this.$message.warning(`${i18n.$t('Connect timeout be a positive integer')}`)
return false
}
// storage
this.$emit('on-params', {
localParams: this.localParams,
@ -159,7 +204,9 @@
url: this.url,
httpMethod: this.httpMethod,
httpCheckCondition: this.httpCheckCondition,
condition: this.condition
condition: this.condition,
connectTimeout : this.connectTimeout ,
socketTimeout : this.socketTimeout
})
return true
}
@ -172,7 +219,9 @@
url: this.url,
httpMethod: this.httpMethod,
httpCheckCondition: this.httpCheckCondition,
condition: this.condition
condition: this.condition,
connectTimeout : this.connectTimeout ,
socketTimeout : this.socketTimeout
}
}
},
@ -193,6 +242,11 @@
this.httpMethod = o.params.httpMethod || 'GET'
this.httpCheckCondition = o.params.httpCheckCondition || 'DEFAULT'
this.condition = o.params.condition || ''
this.connectTimeout = o.params.connectTimeout
this.socketTimeout = o.params.socketTimeout
if(this.connectTimeout != 60000 || this.socketTimeout != 60000 ){
this.timeoutSettings = true
}
// backfill localParams
let localParams = o.params.localParams || []
if (localParams.length) {

962
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue

File diff suppressed because it is too large Load Diff

16
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -98,6 +98,7 @@ export default {
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Flink Version':'Flink Version',
'Driver core number': 'Driver core number',
'Please enter driver core number': 'Please enter driver core number',
'Driver memory use': 'Driver memory use',
@ -540,6 +541,9 @@ export default {
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
@ -554,6 +558,8 @@ export default {
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
@ -587,10 +593,18 @@ export default {
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'Successful branch flow and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
'Enable': 'Enable',
'Disable': 'Disable'
'Disable': 'Disable',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout':'Connect Timeout',
'Socket Timeout':'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
'ms':'ms'
}

16
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js

@ -99,6 +99,7 @@ export default {
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Flink Version': 'Flink版本',
'Driver core number': 'Driver内核数',
'Please enter driver core number': '请输入Driver内核数',
'Driver memory use': 'Driver内存数',
@ -540,6 +541,9 @@ export default {
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名 , 隔开',
@ -554,6 +558,8 @@ export default {
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向',
Type: '类型',
ModelType: '模式',
@ -587,10 +593,18 @@ export default {
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': '成功分支流转和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'Enable': '启用',
'Disable': '停用'
'Disable': '停用',
'Timeout Settings': '超时设置',
'Connect Timeout':'连接超时',
'Socket Timeout':'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
'ms':'毫秒'
}

34
e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java

@ -23,7 +23,7 @@ import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import redis.clients.jedis.Jedis;
import org.openqa.selenium.JavascriptExecutor;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
@ -115,6 +115,7 @@ public class BrowserCommon {
/**
* Click button element
*
* @param locator By
* @return clickButton
*/
@ -128,12 +129,13 @@ public class BrowserCommon {
/**
* Click Navigation Bar element
*
* @param locator By
* @return clickButton
*/
public void clickTopElement(By locator) {
WebElement element = driver.findElement(locator);
JavascriptExecutor executor = (JavascriptExecutor)driver;
JavascriptExecutor executor = (JavascriptExecutor) driver;
executor.executeScript("arguments[0].click();", element);
}
@ -164,6 +166,7 @@ public class BrowserCommon {
inputElement.sendKeys(content);
return inputElement;
}
/**
* clear element
*
@ -182,9 +185,8 @@ public class BrowserCommon {
*
* @param codeMirrorLocator By codeMirror
* @param codeMirrorLineLocator By codeMirrorLine
*/
public void inputCodeMirror(By codeMirrorLocator,By codeMirrorLineLocator,String content) {
public void inputCodeMirror(By codeMirrorLocator, By codeMirrorLineLocator, String content) {
WebElement codeMirrorElement = locateElement(codeMirrorLocator);
WebElement codeMirrorLineElement = locateElement(codeMirrorLineLocator);
codeMirrorElement.click();
@ -193,10 +195,11 @@ public class BrowserCommon {
/**
* move to element
*
* @param locator BY
* @return actions
*/
public Actions moveToElement(By locator){
public Actions moveToElement(By locator) {
return actions.moveToElement(locateElement(locator));
}
@ -206,14 +209,14 @@ public class BrowserCommon {
* @param source_locator BY
* @param target_locator BY
*/
public void dragAndDrop(By source_locator, By target_locator){
public void dragAndDrop(By source_locator, By target_locator) {
WebElement sourceElement = locateElement(source_locator);
WebElement targetElement = locateElement(target_locator);
actions.dragAndDrop(sourceElement, targetElement).perform();
actions.release();
}
public void moveToDragElement(By target_locator, int X, int Y){
public void moveToDragElement(By target_locator, int X, int Y) {
WebElement targetElement = locateElement(target_locator);
actions.dragAndDropBy(targetElement, X, Y).perform();
actions.release();
@ -249,9 +252,22 @@ public class BrowserCommon {
return driver;
}
/**
* select time
*
* @return WebElement
*/
public WebElement selectDate(String js, By locator_time, String date) {
JavascriptExecutor removeAttribute = (JavascriptExecutor) driver;
removeAttribute.executeScript("var setDate=" + js + ";setDate.removeAttribute('readonly');");
WebElement dateElement = locateElement(locator_time);
//input date
dateElement.clear();
dateElement.sendKeys(Keys.HOME,Keys.chord(Keys.SHIFT,Keys.END));
dateElement.sendKeys(date);
return dateElement;
}
/**
* Multi-window switch handle, according to the handle number passed in

36
e2e/src/test/java/org/apache/dolphinscheduler/data/security/AlertManageData.java

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.data.security;
public class AlertManageData {
/**
* Alert Name
*/
public static final String ALERT_NAME = "selenium_alert_Name";
/**
* Alert Type
*/
public static final String ALERT_TYPE = "邮件";
/**
* Alert Description
*/
public static final String DESCRIPTION = "create alert test";
public static final String ALERT_MANAGE = "告警组管理 - DolphinScheduler";
}

42
e2e/src/test/java/org/apache/dolphinscheduler/data/security/QueueManageData.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.data.security;
public class QueueManageData {
/**
* Create Queue Name
*/
public static final String QUEUE_NAME = "selenium_queue_name";
/**
* Create Queue Value
*/
public static final String QUEUE_VALUE = "selenium_queue_value";
/**
* Edit Queue Name
*/
public static final String EDIT_QUEUE_NAME = "_edit";
/**
* Edit Queue Value
*/
public static final String EDIT_QUEUE_VALUE = "_edit";
public static final String QUEUE_MANAGE = "队列管理 - DolphinScheduler";
}

24
e2e/src/test/java/org/apache/dolphinscheduler/data/security/TokenManageData.java

@ -0,0 +1,24 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.data.security;
public class TokenManageData {
public static final String TOKEN_MANAGE = "令牌管理 - DolphinScheduler";
public static final String DATE = "2038-06-10 00:00:00";
}

35
e2e/src/test/java/org/apache/dolphinscheduler/locator/security/AlertManageLocator.java

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.locator.security;
import org.openqa.selenium.By;
public class AlertManageLocator {
//create alert locator
public static final By CLICK_ALERT_MANAGE = By.xpath("//div[4]/div/a/div/a/span");
public static final By CLICK_CREATE_ALERT = By.xpath("//div[1]/div[2]/div/div[2]/div[2]/div/div[1]/button/span");
public static final By INPUT_ALERT_NAME = By.xpath("//div[2]/div/div[1]/div[2]/div/input");
public static final By CLICK_ALERT_TYPE = By.xpath("//div[2]/div/div[2]/div/div[2]/div[2]/div/div[1]/div/input");
public static final By SELECT_ALERT_EMAIL = By.xpath("//div[2]/div/div[2]/div/div[2]/div[2]/div/div[2]/div/div/div/ul/li[1]/span");
public static final By INPUT_ALERT_DESCRIPTION = By.xpath("//textarea");
public static final By SUBMIT_ALERT = By.xpath("//div[3]/button[2]/span");
//delete alert locator
public static final By DELETE_ALERT_BUTTON = By.xpath("//span/button");
public static final By CONFIRM_DELETE_ALERT_BUTTON = By.xpath("//div[2]/div/button[2]/span");
}

31
e2e/src/test/java/org/apache/dolphinscheduler/locator/security/QueueManageLocator.java

@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.locator.security;
import org.openqa.selenium.By;
public class QueueManageLocator {
//create queue locator
public static final By CLICK_QUEUE_MANAGE = By.xpath("//div[6]/div/a/div/a/span");
public static final By CLICK_CREATE_QUEUE = By.xpath("//button/span");
public static final By INPUT_QUEUE_NAME = By.xpath("//div[2]/div/div/div[2]/div/input");
public static final By INPUT_QUEUE_VALUE = By.xpath("//div[2]/div[2]/div/input");
public static final By SUBMIT_QUEUE = By.xpath("//button[2]/span");
//edit queue locator
public static final By CLICK_EDIT_QUEUE = By.xpath("//td[6]/button/i");
}

41
e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TokenManageLocator.java

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.locator.security;
import org.openqa.selenium.By;
public class TokenManageLocator {
//create token
public static final By CLICK_TOKEN_MANAGE = By.xpath("//div[7]/div/a/div/a/span");
public static final By CLICK_CREATE_TOKEN = By.xpath("//div[2]/div/div[2]/div[2]/div/div[1]/button/span");
public static final By SELECT_USER = By.xpath("//div[2]/div[2]/div/div/div/span/i");
public static final By CLICK_GENERATE_TOKEN_BUTTON = By.xpath("//div[3]/div[2]/button/span");
public static final By CLICK_SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span");
//edit token
public static final By CLICK_EDIT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[7]/button/i");
//delete token
public static final By CLICK_DELETE_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[7]/span/button");
public static final By CLICK_CONFIRM_DELETE_BUTTON = By.xpath("//div[2]/div/button[2]/span");
}

77
e2e/src/test/java/org/apache/dolphinscheduler/page/security/AlertManagePage.java

@ -0,0 +1,77 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.page.security;
import org.apache.dolphinscheduler.common.PageCommon;
import org.apache.dolphinscheduler.data.security.AlertManageData;
import org.apache.dolphinscheduler.locator.security.AlertManageLocator;
import org.openqa.selenium.WebDriver;
public class AlertManagePage extends PageCommon {
/**
* Unique constructor
* @param driver driver
*/
public AlertManagePage(WebDriver driver) {
super(driver);
}
/**
* createTenant
*
* @return Whether to enter the specified page after create tenant
*/
public boolean createAlert() throws InterruptedException {
// click alert manage
clickElement(AlertManageLocator.CLICK_ALERT_MANAGE);
Thread.sleep(1000);
// click create alert button
clickElement(AlertManageLocator.CLICK_CREATE_ALERT);
Thread.sleep(1000);
// input alert data
sendInput(AlertManageLocator.INPUT_ALERT_NAME, AlertManageData.ALERT_NAME);
clickElement(AlertManageLocator.CLICK_ALERT_TYPE);
clickElement(AlertManageLocator.SELECT_ALERT_EMAIL);
sendInput(AlertManageLocator.INPUT_ALERT_DESCRIPTION, AlertManageData.DESCRIPTION);
// click button
clickButton(AlertManageLocator.SUBMIT_ALERT);
// Whether to enter the specified page after submit
return ifTitleContains(AlertManageData.ALERT_MANAGE);
}
public boolean deleteAlert() throws InterruptedException {
// click user manage
clickElement(AlertManageLocator.CLICK_ALERT_MANAGE);
// click delete user button
clickButton(AlertManageLocator.DELETE_ALERT_BUTTON);
// click confirm delete button
clickButton(AlertManageLocator.CONFIRM_DELETE_ALERT_BUTTON);
// Whether to enter the specified page after submit
return ifTitleContains(AlertManageData.ALERT_MANAGE);
}
}

85
e2e/src/test/java/org/apache/dolphinscheduler/page/security/QueueManagePage.java

@ -0,0 +1,85 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.page.security;
import org.apache.dolphinscheduler.common.PageCommon;
import org.apache.dolphinscheduler.data.security.QueueManageData;
import org.apache.dolphinscheduler.locator.security.QueueManageLocator;
import org.openqa.selenium.WebDriver;
public class QueueManagePage extends PageCommon {
/**
* Unique constructor
* @param driver driver
*/
public QueueManagePage(WebDriver driver) {
super(driver);
}
/**
* create queue
*
* @return Whether to enter the specified page after create queue
*/
public boolean createQueue() throws InterruptedException {
// click queue manage
clickElement(QueueManageLocator.CLICK_QUEUE_MANAGE);
Thread.sleep(1000);
// click create queue button
clickElement(QueueManageLocator.CLICK_CREATE_QUEUE);
Thread.sleep(1000);
// input queue data
sendInput(QueueManageLocator.INPUT_QUEUE_NAME, QueueManageData.QUEUE_NAME);
sendInput(QueueManageLocator.INPUT_QUEUE_VALUE, QueueManageData.QUEUE_VALUE);
// click button
clickButton(QueueManageLocator.SUBMIT_QUEUE);
// Whether to enter the specified page after submit
return ifTitleContains(QueueManageData.QUEUE_MANAGE);
}
/**
* edit queue
*
* @return Whether to enter the specified page after create queue
*/
public boolean editQueue() throws InterruptedException {
// click queue manage
Thread.sleep(1000);
clickElement(QueueManageLocator.CLICK_QUEUE_MANAGE);
Thread.sleep(1000);
// click edit queue button
clickElement(QueueManageLocator.CLICK_EDIT_QUEUE);
Thread.sleep(1000);
// input queue data
sendInput(QueueManageLocator.INPUT_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME);
sendInput(QueueManageLocator.INPUT_QUEUE_VALUE, QueueManageData.EDIT_QUEUE_VALUE);
// click button
clickButton(QueueManageLocator.SUBMIT_QUEUE);
// Whether to enter the specified page after submit
return ifTitleContains(QueueManageData.QUEUE_MANAGE);
}
}

94
e2e/src/test/java/org/apache/dolphinscheduler/page/security/TokenManagePage.java

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.page.security;
import org.apache.dolphinscheduler.common.PageCommon;
import org.apache.dolphinscheduler.data.security.TokenManageData;
import org.apache.dolphinscheduler.locator.security.TokenManageLocator;
import org.openqa.selenium.WebDriver;
public class TokenManagePage extends PageCommon {
public TokenManagePage(WebDriver driver) {
super(driver);
}
/**
* createTenant
*
* @return Whether to enter the specified page after creat tenant
*/
public boolean createToken() throws InterruptedException {
//create token
Thread.sleep(1000);
clickElement(TokenManageLocator.CLICK_TOKEN_MANAGE);
Thread.sleep(1000);
// click create token button
clickButton(TokenManageLocator.CLICK_CREATE_TOKEN);
Thread.sleep(1000);
//selectDate(TokenManageLocator.js, TokenManageLocator.CLICK_TIME, TokenManageData.DATE);
clickButton(TokenManageLocator.SELECT_USER);
clickButton(TokenManageLocator.CLICK_GENERATE_TOKEN_BUTTON);
Thread.sleep(2500);
// click button
clickButton(TokenManageLocator.CLICK_SUBMIT_BUTTON);
// Whether to enter the specified page after submit
return ifTitleContains(TokenManageData.TOKEN_MANAGE);
}
//edit token
public boolean editToken() throws InterruptedException {
// click token manage
clickElement(TokenManageLocator.CLICK_TOKEN_MANAGE);
Thread.sleep(1000);
// click create token button
clickButton(TokenManageLocator.CLICK_EDIT_BUTTON);
Thread.sleep(1000);
clickButton(TokenManageLocator.SELECT_USER);
clickButton(TokenManageLocator.CLICK_GENERATE_TOKEN_BUTTON);
Thread.sleep(2500);
// click button
clickButton(TokenManageLocator.CLICK_SUBMIT_BUTTON);
// Whether to enter the specified page after submit
return ifTitleContains(TokenManageData.TOKEN_MANAGE);
}
//delete token
public boolean deleteToken() throws InterruptedException {
// click token manage
clickElement(TokenManageLocator.CLICK_TOKEN_MANAGE);
Thread.sleep(1000);
clickButton(TokenManageLocator.CLICK_DELETE_BUTTON);
clickButton(TokenManageLocator.CLICK_CONFIRM_DELETE_BUTTON);
return ifTitleContains(TokenManageData.TOKEN_MANAGE);
}
}

4
e2e/src/test/java/org/apache/dolphinscheduler/testcase/LoginTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/TestLogin.java

@ -22,10 +22,10 @@ import org.testng.annotations.Test;
import static org.apache.dolphinscheduler.base.BaseTest.driver;
@Test(groups={"functionTests","login"})
public class LoginTest {
public class TestLogin {
private LoginPage loginPage;
@Test(description = "LoginTest", priority = 1)
@Test(description = "TestLogin")
public void testLogin() throws InterruptedException {
loginPage = new LoginPage(driver);
System.out.println("===================================");

42
e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteAlert.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.testDeleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.AlertManagePage;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.testng.annotations.Test;
public class TestDeleteAlert extends BaseTest {
private AlertManagePage alertManagePage;
private TenantManagePage tenantManagePage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","alert" },description = "TestDeleteAlert",priority=8)
public void testDeleteAlert() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
System.out.println("jump to testSecurity to delete alert");
tenantManagePage.jumpSecurity();
alertManagePage = new AlertManagePage(driver);
//assert alert manage page
System.out.println("start delete alert");
assert alertManagePage.deleteAlert();
System.out.println("end delete alert");
System.out.println("===================================");
}
}

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteProjectTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteProject.java

@ -14,16 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
package org.apache.dolphinscheduler.testcase.testDeleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.project.CreateProjectPage;
import org.testng.annotations.Test;
public class DeleteProjectTest extends BaseTest {
public class TestDeleteProject extends BaseTest {
private CreateProjectPage createProjectPage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","project"},description = "DeleteProjectTest",priority=7)
@Test(groups={"functionTests"},dependsOnGroups = { "login","project"},description = "TestDeleteProject")
public void testDeleteProject() throws InterruptedException {
createProjectPage = new CreateProjectPage(driver);
//jump to project manage page

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteTenantTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteTenant.java

@ -14,16 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
package org.apache.dolphinscheduler.testcase.testDeleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.testng.annotations.Test;
public class DeleteTenantTest extends BaseTest {
public class TestDeleteTenant extends BaseTest {
private TenantManagePage tenantManagePage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","createTenant"},description = "DeleteTenantTest",priority=9)
@Test(groups={"functionTests"},dependsOnGroups = { "login","createTenant"},description = "TestDeleteTenant")
public void testDeleteTenant() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
//assert tenant manage page

42
e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteToken.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.testDeleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.apache.dolphinscheduler.page.security.TokenManagePage;
import org.testng.annotations.Test;
public class TestDeleteToken extends BaseTest {
private TokenManagePage tokenManagePage;
private TenantManagePage tenantManagePage;
@Test(groups = {"functionTests"}, dependsOnGroups = {"login", "token"}, description = "TestDeleteToken")
public void testUserManage() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
System.out.println("jump to security to delete token");
tenantManagePage.jumpSecurity();
tokenManagePage = new TokenManagePage(driver);
//delete token
System.out.println("start delete token");
assert tokenManagePage.deleteToken();
System.out.println("end delete token");
System.out.println("===================================");
}
}

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteUserTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteUser.java

@ -14,18 +14,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
package org.apache.dolphinscheduler.testcase.testDeleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.apache.dolphinscheduler.page.security.UserManagePage;
import org.testng.annotations.Test;
public class DeleteUserTest extends BaseTest {
public class TestDeleteUser extends BaseTest {
private UserManagePage userManagePage;
private TenantManagePage tenantManagePage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","user" },description = "DeleteUserTest",priority=8)
@Test(groups={"functionTests"},dependsOnGroups = { "login","user" },description = "TestDeleteUser")
public void testDeleteUser() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
System.out.println("jump to security to delete user");

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteWorkflowTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testDeleteData/TestDeleteWorkflow.java

@ -14,18 +14,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
package org.apache.dolphinscheduler.testcase.testDeleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.project.CreateProjectPage;
import org.apache.dolphinscheduler.page.project.CreateWorkflowPage;
import org.testng.annotations.Test;
public class DeleteWorkflowTest extends BaseTest {
public class TestDeleteWorkflow extends BaseTest {
private CreateWorkflowPage createWorkflowPage;
private CreateProjectPage createProjectPage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","workflow"},description = "DeleteWorkflowTest",priority=6)
@Test(groups={"functionTests"},dependsOnGroups = { "login","workflow"},description = "TestDeleteWorkflow")
public void testDeleteWorkflow() throws InterruptedException {
createProjectPage = new CreateProjectPage(driver);
//jump to project manage page

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateProjectTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testProject/TestCreateProject.java

@ -14,16 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.project;
package org.apache.dolphinscheduler.testcase.testProject;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.project.CreateProjectPage;
import org.testng.annotations.Test;
public class CreateProjectTest extends BaseTest {
public class TestCreateProject extends BaseTest {
private CreateProjectPage createProjectPage;
@Test(groups={"functionTests","project"},dependsOnGroups = { "login" },description = "CreateProjectTest",priority=4)
@Test(groups={"functionTests","project"},dependsOnGroups = { "login" },description = "CreateProjectTest")
public void testCreateProject() throws InterruptedException {
createProjectPage = new CreateProjectPage(driver);
// enter user manage page

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateWorkflowTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testProject/TestCreateWorkflow.java

@ -14,19 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.project;
package org.apache.dolphinscheduler.testcase.testProject;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.project.CreateProjectPage;
import org.apache.dolphinscheduler.page.project.CreateWorkflowPage;
import org.testng.annotations.Test;
public class CreateWorkflowTest extends BaseTest {
public class TestCreateWorkflow extends BaseTest {
private CreateWorkflowPage createWorkflowPage;
private CreateProjectPage createProjectPage;
@Test(groups={"functionTests","workflow"},dependsOnGroups = { "login" },description = "CreateWorkflowTest",priority=5)
@Test(groups={"functionTests","workflow"},dependsOnGroups = { "login" },description = "TestCreateWorkflow")
public void testCreateWorkflow() throws InterruptedException {
createProjectPage = new CreateProjectPage(driver);
System.out.println("jump to the projectManage page to create workflow");

36
e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestAlertManage.java

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.testSecurity;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.AlertManagePage;
import org.testng.annotations.Test;
public class TestAlertManage extends BaseTest {
private AlertManagePage alertManagePage;
@Test(groups={"functionTests","alert"},dependsOnGroups = { "login" },description = "AlertManagePage")
public void testAlertManage() throws InterruptedException {
alertManagePage = new AlertManagePage(driver);
//assert alert manage page
System.out.println("start create alert");
assert alertManagePage.createAlert();
System.out.println("end create alert");
System.out.println("===================================");
}
}

42
e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestQueueManage.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.testSecurity;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.QueueManagePage;
import org.testng.annotations.Test;
public class TestQueueManage extends BaseTest {
private QueueManagePage queueManagePage;
@Test(groups={"functionTests","queue"},dependsOnGroups = { "login" },description = "TestQueueManage")
public void testTenantManage() throws InterruptedException {
queueManagePage = new QueueManagePage(driver);
//create queue
System.out.println("start create queue");
assert queueManagePage.createQueue();
System.out.println("end create queue");
System.out.println("===================================");
//edit queue
System.out.println("start edit queue");
assert queueManagePage.editQueue();
System.out.println("end edit queue");
System.out.println("===================================");
}
}

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/TenantManageTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTenantManage.java

@ -14,17 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.security;
package org.apache.dolphinscheduler.testcase.testSecurity;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.testng.annotations.Test;
public class TenantManageTest extends BaseTest {
public class TestTenantManage extends BaseTest {
private TenantManagePage tenantManagePage;
@Test(groups={"functionTests","createTenant"},dependsOnGroups = { "login" },description = "TenantManageTest",priority=2)
@Test(groups={"functionTests","createTenant"},dependsOnGroups = { "login" },description = "TestTenantManage")
public void testTenantManage() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
//assert tenant manage page

43
e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTokenManage.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.testSecurity;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TokenManagePage;
import org.testng.annotations.Test;
public class TestTokenManage extends BaseTest {
private TokenManagePage tokenManagePage;
@Test(groups={"functionTests","token"},dependsOnGroups = { "login" },description = "TestTokenManage")
public void testUserManage() throws InterruptedException {
tokenManagePage = new TokenManagePage(driver);
//generate token
System.out.println("start create token");
assert tokenManagePage.createToken();
System.out.println("end create token");
System.out.println("===================================");
//edit token
System.out.println("start edit token");
assert tokenManagePage.editToken();
System.out.println("end edit token");
System.out.println("===================================");
}
}

6
e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/UserManageTest.java → e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestUserManage.java

@ -14,16 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.security;
package org.apache.dolphinscheduler.testcase.testSecurity;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.UserManagePage;
import org.testng.annotations.Test;
public class UserManageTest extends BaseTest {
public class TestUserManage extends BaseTest {
private UserManagePage userManagePage;
@Test(groups={"functionTests","user"},dependsOnGroups = { "login" },description = "UserManageTest",priority=3)
@Test(groups={"functionTests","user"},dependsOnGroups = { "login" },description = "TestUserManage")
public void testUserManage() throws InterruptedException {
userManagePage = new UserManagePage(driver);
//assert user manage page

28
e2e/testng.xml

@ -16,24 +16,30 @@
~ limitations under the License.
-->
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
<suite name="dolphinscheduler_e2e" parallel="true">
<suite name="dolphinscheduler_e2e" verbose="1">
<test name="dolphinscheduler_test" preserve-order="true">
<groups>
<run>
<include name="functionTests"/>
</run>
</groups>
<classes>
<class name="org.apache.dolphinscheduler.testcase.LoginTest"></class>
<class name="org.apache.dolphinscheduler.testcase.security.TenantManageTest"></class>
<class name="org.apache.dolphinscheduler.testcase.security.UserManageTest"></class>
<class name="org.apache.dolphinscheduler.testcase.project.CreateProjectTest"></class>
<class name="org.apache.dolphinscheduler.testcase.project.CreateWorkflowTest"></class>
<class name="org.apache.dolphinscheduler.testcase.deleteData.DeleteWorkflowTest"></class>
<class name="org.apache.dolphinscheduler.testcase.deleteData.DeleteProjectTest"></class>
<class name="org.apache.dolphinscheduler.testcase.deleteData.DeleteUserTest"></class>
<class name="org.apache.dolphinscheduler.testcase.deleteData.DeleteTenantTest"></class>
<class name="org.apache.dolphinscheduler.testcase.TestLogin"></class>
<class name="org.apache.dolphinscheduler.testcase.testSecurity.TestTenantManage"></class>
<class name="org.apache.dolphinscheduler.testcase.testSecurity.TestUserManage"></class>
<class name="org.apache.dolphinscheduler.testcase.testSecurity.TestAlertManage"></class>
<class name="org.apache.dolphinscheduler.testcase.testSecurity.TestQueueManage"></class>
<class name="org.apache.dolphinscheduler.testcase.testSecurity.TestTokenManage"></class>
<class name="org.apache.dolphinscheduler.testcase.testProject.TestCreateProject"></class>
<class name="org.apache.dolphinscheduler.testcase.testProject.TestCreateWorkflow"></class>
<class name="org.apache.dolphinscheduler.testcase.testDeleteData.TestDeleteWorkflow"></class>
<class name="org.apache.dolphinscheduler.testcase.testDeleteData.TestDeleteProject"></class>
<class name="org.apache.dolphinscheduler.testcase.testDeleteData.TestDeleteAlert"></class>
<class name="org.apache.dolphinscheduler.testcase.testDeleteData.TestDeleteToken"></class>
<class name="org.apache.dolphinscheduler.testcase.testDeleteData.TestDeleteUser"></class>
<class name="org.apache.dolphinscheduler.testcase.testDeleteData.TestDeleteTenant"></class>
</classes>
</test>

4
pom.xml

@ -749,6 +749,7 @@
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
@ -821,6 +822,8 @@
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/http/HttpTaskTest.java</include>
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
@ -1010,5 +1013,6 @@
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-plugin-api</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>

Loading…
Cancel
Save