Browse Source

[Improvement] Add remote task model (#11767)

3.2.0-release
caishunfeng 2 years ago committed by GitHub
parent
commit
2e61c76c22
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java
  2. 66
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  3. 86
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java
  4. 2
      dolphinscheduler-bom/pom.xml
  5. 5
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  6. 18
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java
  7. 9
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java
  8. 3
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java
  9. 20
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java
  10. 7
      dolphinscheduler-log-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java
  11. 1
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java
  12. 3
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskEvent.java
  13. 36
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java
  14. 46
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java
  15. 11
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java
  16. 25
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/MasterFailoverService.java
  17. 1
      dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutionContextTestUtils.java
  18. 23
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java
  19. 86
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractRemoteTask.java
  20. 141
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTask.java
  21. 97
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java
  22. 54
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java
  23. 25
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskCallBack.java
  24. 6
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/TaskExecutionStatus.java
  25. 25
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTask.java
  26. 24
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/BaseLoopTaskExecutor.java
  27. 30
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/model/ApplicationInfo.java
  28. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/stream/StreamTask.java
  29. 25
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtils.java
  30. 9
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtilsTest.java
  31. 22
      dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTask.java
  32. 18
      dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java
  33. 30
      dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java
  34. 22
      dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTask.java
  35. 5
      dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java
  36. 32
      dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java
  37. 43
      dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java
  38. 27
      dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/test/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTaskTest.java
  39. 33
      dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/test/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTaskTest.java
  40. 8
      dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTask.java
  41. 8
      dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java
  42. 32
      dolphinscheduler-task-plugin/dolphinscheduler-task-hivecli/src/main/java/org/apache/dolphinscheduler/plugin/task/hivecli/HiveCliTask.java
  43. 14
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java
  44. 28
      dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/test/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskTest.java
  45. 33
      dolphinscheduler-task-plugin/dolphinscheduler-task-jupyter/src/main/java/org/apache/dolphinscheduler/plugin/task/jupyter/JupyterTask.java
  46. 7
      dolphinscheduler-task-plugin/dolphinscheduler-task-k8s/src/main/java/org/apache/dolphinscheduler/plugin/task/k8s/K8sTask.java
  47. 14
      dolphinscheduler-task-plugin/dolphinscheduler-task-mlflow/src/main/java/org/apache/dolphinscheduler/plugin/task/mlflow/MlflowTask.java
  48. 38
      dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java
  49. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/test/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskTest.java
  50. 12
      dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java
  51. 16
      dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java
  52. 13
      dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/PytorchTask.java
  53. 81
      dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/PipelineUtils.java
  54. 96
      dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTask.java
  55. 23
      dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/test/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTaskTest.java
  56. 33
      dolphinscheduler-task-plugin/dolphinscheduler-task-seatunnel/src/main/java/org/apache/dolphinscheduler/plugin/task/seatunnel/SeatunnelTask.java
  57. 15
      dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java
  58. 12
      dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java
  59. 33
      dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTask.java
  60. 17
      dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/test/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTaskTest.java
  61. 112
      dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/UpgradeDao.java
  62. 9
      dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/message/TaskExecuteRunningMessageSender.java
  63. 2
      dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java
  64. 5
      dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/DefaultWorkerDelayTaskExecuteRunnable.java
  65. 62
      dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskCallbackImpl.java
  66. 8
      dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerTaskExecuteRunnable.java
  67. 24
      dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/utils/TaskExecutionCheckerUtils.java

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java

@ -18,6 +18,7 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.k8s.K8sClientService;
import org.apache.dolphinscheduler.api.service.K8sNamespaceService; import org.apache.dolphinscheduler.api.service.K8sNamespaceService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
@ -29,7 +30,6 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ClusterMapper; import org.apache.dolphinscheduler.dao.mapper.ClusterMapper;
import org.apache.dolphinscheduler.dao.mapper.K8sNamespaceMapper; import org.apache.dolphinscheduler.dao.mapper.K8sNamespaceMapper;
import org.apache.dolphinscheduler.remote.exceptions.RemotingException; import org.apache.dolphinscheduler.remote.exceptions.RemotingException;
import org.apache.dolphinscheduler.api.k8s.K8sClientService;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -118,7 +118,8 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
* @return * @return
*/ */
@Override @Override
public Map<String, Object> createK8sNamespace(User loginUser, String namespace, Long clusterCode, Double limitsCpu, Integer limitsMemory) { public Map<String, Object> createK8sNamespace(User loginUser, String namespace, Long clusterCode, Double limitsCpu,
Integer limitsMemory) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) { if (isNotAdmin(loginUser, result)) {
return result; return result;
@ -209,7 +210,8 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
* @return * @return
*/ */
@Override @Override
public Map<String, Object> updateK8sNamespace(User loginUser, int id, String userName, Double limitsCpu, Integer limitsMemory) { public Map<String, Object> updateK8sNamespace(User loginUser, int id, String userName, Double limitsCpu,
Integer limitsMemory) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) { if (isNotAdmin(loginUser, result)) {
return result; return result;
@ -360,7 +362,6 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
return result; return result;
} }
/** /**
* query unauthorized namespace * query unauthorized namespace
* *
@ -432,7 +433,8 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
* @param authedNamespaceList authed namespace list * @param authedNamespaceList authed namespace list
* @return namespace list that authorization * @return namespace list that authorization
*/ */
private List<K8sNamespace> getUnauthorizedNamespaces(Set<K8sNamespace> namespaceSet, List<K8sNamespace> authedNamespaceList) { private List<K8sNamespace> getUnauthorizedNamespaces(Set<K8sNamespace> namespaceSet,
List<K8sNamespace> authedNamespaceList) {
List<K8sNamespace> resultList = new ArrayList<>(); List<K8sNamespace> resultList = new ArrayList<>();
for (K8sNamespace k8sNamespace : namespaceSet) { for (K8sNamespace k8sNamespace : namespaceSet) {
boolean existAuth = false; boolean existAuth = false;

66
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -17,14 +17,28 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import com.baomidou.mybatisplus.core.metadata.IPage; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_DEFINITION_MOVE;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.VERSION_DELETE;
import com.fasterxml.jackson.databind.JsonNode; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.VERSION_LIST;
import com.fasterxml.jackson.databind.node.ArrayNode; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_BATCH_COPY;
import com.fasterxml.jackson.databind.node.ObjectNode; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_CREATE;
import com.google.common.collect.Lists; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION;
import org.apache.commons.collections4.CollectionUtils; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION_DELETE;
import org.apache.commons.lang3.StringUtils; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION_EXPORT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_EXPORT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_IMPORT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_ONLINE_OFFLINE;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_SWITCH_TO_THIS_VERSION;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_TREE_VIEW;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_UPDATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE;
import static org.apache.dolphinscheduler.common.Constants.COPY_SUFFIX;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import static org.apache.dolphinscheduler.common.Constants.EMPTY_STRING;
import static org.apache.dolphinscheduler.common.Constants.IMPORT_SUFFIX;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMPLEX_TASK_TYPES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SQL;
import org.apache.dolphinscheduler.api.dto.DagDataSchedule; import org.apache.dolphinscheduler.api.dto.DagDataSchedule;
import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.treeview.Instance; import org.apache.dolphinscheduler.api.dto.treeview.Instance;
@ -92,16 +106,10 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode;
import org.apache.dolphinscheduler.plugin.task.api.parameters.SqlParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.SqlParameters;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.task.TaskPluginManager; import org.apache.dolphinscheduler.service.task.TaskPluginManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletOutputStream; import org.apache.commons.collections4.CollectionUtils;
import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
@ -124,27 +132,6 @@ import java.util.stream.Collectors;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream; import java.util.zip.ZipInputStream;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_DEFINITION_MOVE;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.VERSION_DELETE;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.VERSION_LIST;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_BATCH_COPY;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_CREATE;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION_DELETE;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION_EXPORT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_EXPORT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_IMPORT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_ONLINE_OFFLINE;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_SWITCH_TO_THIS_VERSION;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_TREE_VIEW;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_UPDATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE;
import static org.apache.dolphinscheduler.common.Constants.COPY_SUFFIX;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import static org.apache.dolphinscheduler.common.Constants.EMPTY_STRING;
import static org.apache.dolphinscheduler.common.Constants.IMPORT_SUFFIX;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMPLEX_TASK_TYPES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SQL;
import javax.servlet.ServletOutputStream; import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
@ -773,7 +760,8 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
* @return true if process definition name not exists, otherwise false * @return true if process definition name not exists, otherwise false
*/ */
@Override @Override
public Map<String, Object> verifyProcessDefinitionName(User loginUser, long projectCode, String name, long processDefinitionCode) { public Map<String, Object> verifyProcessDefinitionName(User loginUser, long projectCode, String name,
long processDefinitionCode) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
// check user access for project // check user access for project
Map<String, Object> result = Map<String, Object> result =

86
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java

@ -98,7 +98,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
*/ */
@Transactional @Transactional
@Override @Override
public Map<String, Object> createProcessTaskRelation(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) { public Map<String, Object> createProcessTaskRelation(User loginUser, long projectCode, long processDefinitionCode,
long preTaskCode, long postTaskCode) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
// check user access for project // check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null); Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
@ -115,11 +116,14 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
return result; return result;
} }
updateProcessDefiniteVersion(loginUser, result, processDefinition); updateProcessDefiniteVersion(loginUser, result, processDefinition);
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); List<ProcessTaskRelation> processTaskRelationList =
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelations = Lists.newArrayList(processTaskRelationList); List<ProcessTaskRelation> processTaskRelations = Lists.newArrayList(processTaskRelationList);
if (!processTaskRelations.isEmpty()) { if (!processTaskRelations.isEmpty()) {
Map<Long, ProcessTaskRelation> preTaskCodeMap = processTaskRelations.stream().filter(r -> r.getPostTaskCode() == postTaskCode) Map<Long, ProcessTaskRelation> preTaskCodeMap =
.collect(Collectors.toMap(ProcessTaskRelation::getPreTaskCode, processTaskRelation -> processTaskRelation)); processTaskRelations.stream().filter(r -> r.getPostTaskCode() == postTaskCode)
.collect(Collectors.toMap(ProcessTaskRelation::getPreTaskCode,
processTaskRelation -> processTaskRelation));
if (!preTaskCodeMap.isEmpty()) { if (!preTaskCodeMap.isEmpty()) {
if (preTaskCodeMap.containsKey(preTaskCode) || (!preTaskCodeMap.containsKey(0L) && preTaskCode == 0L)) { if (preTaskCodeMap.containsKey(preTaskCode) || (!preTaskCodeMap.containsKey(0L) && preTaskCode == 0L)) {
putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, String.valueOf(processDefinitionCode)); putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, String.valueOf(processDefinitionCode));
@ -135,7 +139,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
ProcessTaskRelation processTaskRelation = setRelation(processDefinition, postTaskDefinition); ProcessTaskRelation processTaskRelation = setRelation(processDefinition, postTaskDefinition);
if (preTaskCode != 0L) { if (preTaskCode != 0L) {
TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(preTaskCode); TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(preTaskCode);
List<ProcessTaskRelation> upstreamTaskRelationList = processTaskRelations.stream().filter(r -> r.getPostTaskCode() == preTaskCode).collect(Collectors.toList()); List<ProcessTaskRelation> upstreamTaskRelationList = processTaskRelations.stream()
.filter(r -> r.getPostTaskCode() == preTaskCode).collect(Collectors.toList());
// upstream is or not exist // upstream is or not exist
if (upstreamTaskRelationList.isEmpty()) { if (upstreamTaskRelationList.isEmpty()) {
ProcessTaskRelation preProcessTaskRelation = setRelation(processDefinition, preTaskDefinition); ProcessTaskRelation preProcessTaskRelation = setRelation(processDefinition, preTaskDefinition);
@ -169,7 +174,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
return processTaskRelation; return processTaskRelation;
} }
private void updateProcessDefiniteVersion(User loginUser, Map<String, Object> result, ProcessDefinition processDefinition) { private void updateProcessDefiniteVersion(User loginUser, Map<String, Object> result,
ProcessDefinition processDefinition) {
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion <= 0) { if (insertVersion <= 0) {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
@ -189,7 +195,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
*/ */
@Transactional @Transactional
@Override @Override
public Map<String, Object> deleteTaskProcessRelation(User loginUser, long projectCode, long processDefinitionCode, long taskCode) { public Map<String, Object> deleteTaskProcessRelation(User loginUser, long projectCode, long processDefinitionCode,
long taskCode) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
// check user access for project // check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null); Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
@ -210,7 +217,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode)); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode));
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations); List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
if (CollectionUtils.isEmpty(processTaskRelationList)) { if (CollectionUtils.isEmpty(processTaskRelationList)) {
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList"); putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
@ -246,8 +254,10 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
private void updateRelation(User loginUser, Map<String, Object> result, ProcessDefinition processDefinition, private void updateRelation(User loginUser, Map<String, Object> result, ProcessDefinition processDefinition,
List<ProcessTaskRelation> processTaskRelationList) { List<ProcessTaskRelation> processTaskRelationList) {
List<ProcessTaskRelationLog> relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); List<ProcessTaskRelationLog> relationLogs =
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList());
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(),
processDefinition.getVersion(), relationLogs, Lists.newArrayList(), Boolean.TRUE); processDefinition.getVersion(), relationLogs, Lists.newArrayList(), Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) { if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -269,7 +279,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
*/ */
@Transactional @Transactional
@Override @Override
public Map<String, Object> deleteUpstreamRelation(User loginUser, long projectCode, String preTaskCodes, long taskCode) { public Map<String, Object> deleteUpstreamRelation(User loginUser, long projectCode, String preTaskCodes,
long taskCode) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
// check user access for project // check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null); Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
@ -286,12 +297,14 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
return result; return result;
} }
List<Long> preTaskCodeList = Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toList()); List<Long> preTaskCodeList = Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream()
.map(Long::parseLong).collect(Collectors.toList());
if (preTaskCodeList.contains(0L)) { if (preTaskCodeList.contains(0L)) {
putMsg(result, Status.DATA_IS_NULL, "preTaskCodes"); putMsg(result, Status.DATA_IS_NULL, "preTaskCodes");
return result; return result;
} }
List<Long> currentUpstreamList = upstreamList.stream().map(ProcessTaskRelation::getPreTaskCode).collect(Collectors.toList()); List<Long> currentUpstreamList =
upstreamList.stream().map(ProcessTaskRelation::getPreTaskCode).collect(Collectors.toList());
if (currentUpstreamList.contains(0L)) { if (currentUpstreamList.contains(0L)) {
putMsg(result, Status.DATA_IS_NOT_VALID, "currentUpstreamList"); putMsg(result, Status.DATA_IS_NOT_VALID, "currentUpstreamList");
return result; return result;
@ -303,12 +316,15 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
putMsg(result, Status.DATA_IS_NOT_VALID, StringUtils.join(preTaskCodeList, Constants.COMMA)); putMsg(result, Status.DATA_IS_NOT_VALID, StringUtils.join(preTaskCodeList, Constants.COMMA));
return result; return result;
} }
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode()); ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode());
if (processDefinition == null) { if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(upstreamList.get(0).getProcessDefinitionCode())); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,
String.valueOf(upstreamList.get(0).getProcessDefinitionCode()));
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode()); List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode());
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations); List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
List<ProcessTaskRelation> processTaskRelationWaitRemove = Lists.newArrayList(); List<ProcessTaskRelation> processTaskRelationWaitRemove = Lists.newArrayList();
for (ProcessTaskRelation processTaskRelation : processTaskRelationList) { for (ProcessTaskRelation processTaskRelation : processTaskRelationList) {
@ -318,7 +334,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
processTaskRelationWaitRemove.add(processTaskRelation); processTaskRelationWaitRemove.add(processTaskRelation);
} }
} else { } else {
if (processTaskRelation.getPostTaskCode() == taskCode && (currentUpstreamList.isEmpty() || tmpCurrent.isEmpty())) { if (processTaskRelation.getPostTaskCode() == taskCode
&& (currentUpstreamList.isEmpty() || tmpCurrent.isEmpty())) {
processTaskRelation.setPreTaskVersion(0); processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPreTaskCode(0L); processTaskRelation.setPreTaskCode(0L);
} }
@ -341,7 +358,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
*/ */
@Transactional @Transactional
@Override @Override
public Map<String, Object> deleteDownstreamRelation(User loginUser, long projectCode, String postTaskCodes, long taskCode) { public Map<String, Object> deleteDownstreamRelation(User loginUser, long projectCode, String postTaskCodes,
long taskCode) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
// check user access for project // check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null); Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
@ -352,24 +370,31 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes"); putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result; return result;
} }
List<ProcessTaskRelation> downstreamList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode); List<ProcessTaskRelation> downstreamList =
processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
if (CollectionUtils.isEmpty(downstreamList)) { if (CollectionUtils.isEmpty(downstreamList)) {
putMsg(result, Status.DATA_IS_NULL, "taskCode"); putMsg(result, Status.DATA_IS_NULL, "taskCode");
return result; return result;
} }
List<Long> postTaskCodeList = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toList()); List<Long> postTaskCodeList = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream()
.map(Long::parseLong).collect(Collectors.toList());
if (postTaskCodeList.contains(0L)) { if (postTaskCodeList.contains(0L)) {
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes"); putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result; return result;
} }
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode()); ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode());
if (processDefinition == null) { if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(downstreamList.get(0).getProcessDefinitionCode())); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,
String.valueOf(downstreamList.get(0).getProcessDefinitionCode()));
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode()); List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode());
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations); List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
processTaskRelationList.removeIf(processTaskRelation -> postTaskCodeList.contains(processTaskRelation.getPostTaskCode()) && processTaskRelation.getPreTaskCode() == taskCode); processTaskRelationList
.removeIf(processTaskRelation -> postTaskCodeList.contains(processTaskRelation.getPostTaskCode())
&& processTaskRelation.getPreTaskCode() == taskCode);
updateProcessDefiniteVersion(loginUser, result, processDefinition); updateProcessDefiniteVersion(loginUser, result, processDefinition);
updateRelation(loginUser, result, processDefinition, processTaskRelationList); updateRelation(loginUser, result, processDefinition, processTaskRelationList);
return result; return result;
@ -391,7 +416,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
if (result.get(Constants.STATUS) != Status.SUCCESS) { if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); List<ProcessTaskRelation> processTaskRelationList =
processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>(); List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processTaskRelationList)) { if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList Set<TaskDefinition> taskDefinitions = processTaskRelationList
@ -427,7 +453,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
if (result.get(Constants.STATUS) != Status.SUCCESS) { if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode); List<ProcessTaskRelation> processTaskRelationList =
processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>(); List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processTaskRelationList)) { if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList Set<TaskDefinition> taskDefinitions = processTaskRelationList
@ -459,7 +486,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
*/ */
@Transactional @Transactional
@Override @Override
public Map<String, Object> deleteEdge(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) { public Map<String, Object> deleteEdge(User loginUser, long projectCode, long processDefinitionCode,
long preTaskCode, long postTaskCode) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
// check user access for project // check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null); Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
@ -471,7 +499,8 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode)); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode));
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations); List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
if (CollectionUtils.isEmpty(processTaskRelationList)) { if (CollectionUtils.isEmpty(processTaskRelationList)) {
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList"); putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
@ -522,6 +551,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
private TaskDefinition buildTaskDefinition() { private TaskDefinition buildTaskDefinition() {
return new TaskDefinition() { return new TaskDefinition() {
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) { if (this == o) {

2
dolphinscheduler-bom/pom.xml

@ -275,8 +275,8 @@
<groupId>io.grpc</groupId> <groupId>io.grpc</groupId>
<artifactId>grpc-bom</artifactId> <artifactId>grpc-bom</artifactId>
<version>${io.grpc.version}</version> <version>${io.grpc.version}</version>
<scope>import</scope>
<type>pom</type> <type>pom</type>
<scope>import</scope>
</dependency> </dependency>
<dependency> <dependency>

5
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -17,11 +17,12 @@
package org.apache.dolphinscheduler.common; package org.apache.dolphinscheduler.common;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus; import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import java.time.Duration; import java.time.Duration;
import java.util.regex.Pattern; import java.util.regex.Pattern;

18
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java

@ -19,8 +19,13 @@ package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.shell.ShellExecutor; import org.apache.dolphinscheduler.common.shell.ShellExecutor;
import org.apache.commons.lang3.SystemUtils; import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.GlobalMemory;
import oshi.hardware.HardwareAbstractionLayer;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -42,11 +47,6 @@ import java.util.regex.Pattern;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.GlobalMemory;
import oshi.hardware.HardwareAbstractionLayer;
/** /**
* os utils * os utils
*/ */
@ -198,7 +198,8 @@ public class OSUtils {
private static List<String> getUserListFromLinux() throws IOException { private static List<String> getUserListFromLinux() throws IOException {
List<String> userList = new ArrayList<>(); List<String> userList = new ArrayList<>();
try (BufferedReader bufferedReader = new BufferedReader( try (
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(new FileInputStream("/etc/passwd")))) { new InputStreamReader(new FileInputStream("/etc/passwd")))) {
String line; String line;
@ -472,7 +473,8 @@ public class OSUtils {
// system available physical memory // system available physical memory
double availablePhysicalMemorySize = availablePhysicalMemorySize(); double availablePhysicalMemorySize = availablePhysicalMemorySize();
if (loadAverage > maxCpuLoadAvg || availablePhysicalMemorySize < reservedMemory) { if (loadAverage > maxCpuLoadAvg || availablePhysicalMemorySize < reservedMemory) {
logger.warn("Current cpu load average {} is too high or available memory {}G is too low, under max.cpuLoad.avg={} and reserved.memory={}G", logger.warn(
"Current cpu load average {} is too high or available memory {}G is too low, under max.cpuLoad.avg={} and reserved.memory={}G",
loadAverage, availablePhysicalMemorySize, maxCpuLoadAvg, reservedMemory); loadAverage, availablePhysicalMemorySize, maxCpuLoadAvg, reservedMemory);
return true; return true;
} }

9
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java

@ -16,18 +16,17 @@
*/ */
package org.apache.dolphinscheduler.common.os; package org.apache.dolphinscheduler.common.os;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import java.util.List;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.List;
/** /**
* OSUtilsTest * OSUtilsTest
*/ */
@ -97,6 +96,4 @@ public class OSUtilsTest {
} }
} }

3
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java

@ -34,6 +34,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import lombok.Data;
import com.baomidou.mybatisplus.annotation.FieldStrategy; import com.baomidou.mybatisplus.annotation.FieldStrategy;
import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableField;
@ -43,7 +45,6 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import lombok.Data;
@Data @Data
@TableName("t_ds_task_definition") @TableName("t_ds_task_definition")

20
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java

@ -17,17 +17,18 @@
package org.apache.dolphinscheduler.plugin.datasource.api.utils; package org.apache.dolphinscheduler.plugin.datasource.api.utils;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.*;
import static org.apache.dolphinscheduler.spi.utils.Constants.RESOURCE_STORAGE_TYPE;
import org.apache.dolphinscheduler.spi.enums.ResUploadType; import org.apache.dolphinscheduler.spi.enums.ResUploadType;
import org.apache.dolphinscheduler.spi.utils.PropertyUtils; import org.apache.dolphinscheduler.spi.utils.PropertyUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException; import java.io.IOException;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.*;
import static org.apache.dolphinscheduler.spi.utils.Constants.RESOURCE_STORAGE_TYPE;
/** /**
* common utils * common utils
*/ */
@ -70,7 +71,8 @@ public class CommonUtils {
* @param loginUserKeytabPath loginUserKeytabPath * @param loginUserKeytabPath loginUserKeytabPath
* @throws IOException errors * @throws IOException errors
*/ */
public static void loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) throws IOException { public static void loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername,
String loginUserKeytabPath) throws IOException {
Configuration configuration = new Configuration(); Configuration configuration = new Configuration();
configuration.setClassLoader(configuration.getClass().getClassLoader()); configuration.setClassLoader(configuration.getClass().getClassLoader());
loadKerberosConf(javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath, configuration); loadKerberosConf(javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath, configuration);
@ -86,12 +88,16 @@ public class CommonUtils {
* @return load kerberos config return true * @return load kerberos config return true
* @throws IOException errors * @throws IOException errors
*/ */
public static boolean loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath, Configuration configuration) throws IOException { public static boolean loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername,
String loginUserKeytabPath, Configuration configuration) throws IOException {
if (CommonUtils.getKerberosStartupState()) { if (CommonUtils.getKerberosStartupState()) {
System.setProperty(JAVA_SECURITY_KRB5_CONF, StringUtils.defaultIfBlank(javaSecurityKrb5Conf, PropertyUtils.getString(JAVA_SECURITY_KRB5_CONF_PATH))); System.setProperty(JAVA_SECURITY_KRB5_CONF, StringUtils.defaultIfBlank(javaSecurityKrb5Conf,
PropertyUtils.getString(JAVA_SECURITY_KRB5_CONF_PATH)));
configuration.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS); configuration.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS);
UserGroupInformation.setConfiguration(configuration); UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(StringUtils.defaultIfBlank(loginUserKeytabUsername, PropertyUtils.getString(LOGIN_USER_KEY_TAB_USERNAME)), UserGroupInformation.loginUserFromKeytab(
StringUtils.defaultIfBlank(loginUserKeytabUsername,
PropertyUtils.getString(LOGIN_USER_KEY_TAB_USERNAME)),
StringUtils.defaultIfBlank(loginUserKeytabPath, PropertyUtils.getString(LOGIN_USER_KEY_TAB_PATH))); StringUtils.defaultIfBlank(loginUserKeytabPath, PropertyUtils.getString(LOGIN_USER_KEY_TAB_PATH)));
return true; return true;
} }

7
dolphinscheduler-log-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java

@ -46,8 +46,10 @@ import java.io.InputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -166,8 +168,9 @@ public class LoggerRequestProcessor implements NettyRequestProcessor {
if (!checkPathSecurity(logPath)) { if (!checkPathSecurity(logPath)) {
throw new IllegalArgumentException("Illegal path"); throw new IllegalArgumentException("Illegal path");
} }
List<String> appIds = LogUtils.getAppIdsFromLogFile(logPath); Set<String> appIds = LogUtils.getAppIdsFromLogFile(logPath);
channel.writeAndFlush(new GetAppIdResponseCommand(appIds).convert2Command(command.getOpaque())); channel.writeAndFlush(
new GetAppIdResponseCommand(new ArrayList<>(appIds)).convert2Command(command.getOpaque()));
break; break;
default: default:
throw new IllegalArgumentException("unknown commandType: " + commandType); throw new IllegalArgumentException("unknown commandType: " + commandType);

1
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java

@ -68,6 +68,7 @@ public class TaskExecutionContextBuilder {
taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.SUBMITTED_SUCCESS); taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.SUBMITTED_SUCCESS);
taskExecutionContext.setCpuQuota(taskInstance.getCpuQuota()); taskExecutionContext.setCpuQuota(taskInstance.getCpuQuota());
taskExecutionContext.setMemoryMax(taskInstance.getMemoryMax()); taskExecutionContext.setMemoryMax(taskInstance.getMemoryMax());
taskExecutionContext.setAppIds(taskInstance.getAppLink());
return this; return this;
} }

3
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskEvent.java

@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.remote.command.TaskRejectCommand;
import java.util.Date; import java.util.Date;
import io.netty.channel.Channel;
import lombok.Data; import lombok.Data;
import io.netty.channel.Channel;
/** /**
* task event * task event
@ -113,6 +113,7 @@ public class TaskEvent {
event.setStartTime(command.getStartTime()); event.setStartTime(command.getStartTime());
event.setExecutePath(command.getExecutePath()); event.setExecutePath(command.getExecutePath());
event.setLogPath(command.getLogPath()); event.setLogPath(command.getLogPath());
event.setAppIds(command.getAppIds());
event.setChannel(channel); event.setChannel(channel);
event.setWorkerAddress(workerAddress); event.setWorkerAddress(workerAddress);
event.setEvent(TaskEventType.RUNNING); event.setEvent(TaskEventType.RUNNING);

36
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java

@ -17,8 +17,9 @@
package org.apache.dolphinscheduler.server.master.registry; package org.apache.dolphinscheduler.server.master.registry;
import org.apache.commons.collections.CollectionUtils; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS;
import org.apache.commons.lang3.StringUtils; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.enums.NodeType;
import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.model.Server;
@ -34,13 +35,10 @@ import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.service.queue.MasterPriorityQueue; import org.apache.dolphinscheduler.service.queue.MasterPriorityQueue;
import org.apache.dolphinscheduler.service.registry.RegistryClient; import org.apache.dolphinscheduler.service.registry.RegistryClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PreDestroy; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -57,8 +55,13 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS; import javax.annotation.PreDestroy;
import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/** /**
* server node manager * server node manager
@ -74,7 +77,6 @@ public class ServerNodeManager implements InitializingBean {
private final ReentrantReadWriteLock.ReadLock workerGroupReadLock = workerGroupLock.readLock(); private final ReentrantReadWriteLock.ReadLock workerGroupReadLock = workerGroupLock.readLock();
private final ReentrantReadWriteLock.WriteLock workerGroupWriteLock = workerGroupLock.writeLock(); private final ReentrantReadWriteLock.WriteLock workerGroupWriteLock = workerGroupLock.writeLock();
private final ReentrantReadWriteLock workerNodeInfoLock = new ReentrantReadWriteLock(); private final ReentrantReadWriteLock workerNodeInfoLock = new ReentrantReadWriteLock();
private final ReentrantReadWriteLock.ReadLock workerNodeInfoReadLock = workerNodeInfoLock.readLock(); private final ReentrantReadWriteLock.ReadLock workerNodeInfoReadLock = workerNodeInfoLock.readLock();
private final ReentrantReadWriteLock.WriteLock workerNodeInfoWriteLock = workerNodeInfoLock.writeLock(); private final ReentrantReadWriteLock.WriteLock workerNodeInfoWriteLock = workerNodeInfoLock.writeLock();
@ -138,7 +140,6 @@ public class ServerNodeManager implements InitializingBean {
return MASTER_SIZE; return MASTER_SIZE;
} }
/** /**
* init listener * init listener
* *
@ -153,7 +154,8 @@ public class ServerNodeManager implements InitializingBean {
/** /**
* init executor service * init executor service
*/ */
executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("ServerNodeManagerExecutor")); executorService =
Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("ServerNodeManagerExecutor"));
executorService.scheduleWithFixedDelay(new WorkerNodeInfoAndGroupDbSyncTask(), 0, 10, TimeUnit.SECONDS); executorService.scheduleWithFixedDelay(new WorkerNodeInfoAndGroupDbSyncTask(), 0, 10, TimeUnit.SECONDS);
/* /*
* init MasterNodeListener listener * init MasterNodeListener listener
@ -212,7 +214,6 @@ public class ServerNodeManager implements InitializingBean {
} }
} }
protected Set<String> getWorkerAddressByWorkerGroup(Map<String, String> newWorkerNodeInfo, WorkerGroup wg) { protected Set<String> getWorkerAddressByWorkerGroup(Map<String, String> newWorkerNodeInfo, WorkerGroup wg) {
Set<String> nodes = new HashSet<>(); Set<String> nodes = new HashSet<>();
String[] addrs = wg.getAddrList().split(Constants.COMMA); String[] addrs = wg.getAddrList().split(Constants.COMMA);
@ -238,7 +239,8 @@ public class ServerNodeManager implements InitializingBean {
try { try {
String[] parts = path.split("/"); String[] parts = path.split("/");
if (parts.length < WORKER_LISTENER_CHECK_LENGTH) { if (parts.length < WORKER_LISTENER_CHECK_LENGTH) {
throw new IllegalArgumentException(String.format("worker group path : %s is not valid, ignore", path)); throw new IllegalArgumentException(
String.format("worker group path : %s is not valid, ignore", path));
} }
final String workerGroupName = parts[parts.length - 2]; final String workerGroupName = parts[parts.length - 2];
final String workerAddress = parts[parts.length - 1]; final String workerAddress = parts[parts.length - 1];
@ -272,6 +274,7 @@ public class ServerNodeManager implements InitializingBean {
} }
class MasterDataListener implements SubscribeListener { class MasterDataListener implements SubscribeListener {
@Override @Override
public void notify(Event event) { public void notify(Event event) {
final String path = event.path(); final String path = event.path();
@ -330,7 +333,8 @@ public class ServerNodeManager implements InitializingBean {
} else { } else {
logger.warn("current addr:{} is not in active master list", masterConfig.getMasterAddress()); logger.warn("current addr:{} is not in active master list", masterConfig.getMasterAddress());
} }
logger.info("update master nodes, master size: {}, slot: {}, addr: {}", MASTER_SIZE, MASTER_SLOT, masterConfig.getMasterAddress()); logger.info("update master nodes, master size: {}, slot: {}, addr: {}", MASTER_SIZE, MASTER_SLOT,
masterConfig.getMasterAddress());
} finally { } finally {
masterLock.unlock(); masterLock.unlock();
} }

46
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java

@ -73,11 +73,11 @@ import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import lombok.NonNull;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import lombok.NonNull;
/** /**
* stream task execute * stream task execute
*/ */
@ -118,7 +118,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
this.dispatcher = SpringApplicationContext.getBean(ExecutorDispatcher.class); this.dispatcher = SpringApplicationContext.getBean(ExecutorDispatcher.class);
this.taskPluginManager = SpringApplicationContext.getBean(TaskPluginManager.class); this.taskPluginManager = SpringApplicationContext.getBean(TaskPluginManager.class);
this.processTaskRelationMapper = SpringApplicationContext.getBean(ProcessTaskRelationMapper.class); this.processTaskRelationMapper = SpringApplicationContext.getBean(ProcessTaskRelationMapper.class);
this.streamTaskInstanceExecCacheManager = SpringApplicationContext.getBean(StreamTaskInstanceExecCacheManager.class); this.streamTaskInstanceExecCacheManager =
SpringApplicationContext.getBean(StreamTaskInstanceExecCacheManager.class);
this.taskDefinition = taskDefinition; this.taskDefinition = taskDefinition;
this.taskExecuteStartCommand = taskExecuteStartCommand; this.taskExecuteStartCommand = taskExecuteStartCommand;
} }
@ -137,7 +138,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
// add cache // add cache
streamTaskInstanceExecCacheManager.cache(taskInstance.getId(), this); streamTaskInstanceExecCacheManager.cache(taskInstance.getId(), this);
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByTaskCode(taskDefinition.getCode()); List<ProcessTaskRelation> processTaskRelationList =
processTaskRelationMapper.queryByTaskCode(taskDefinition.getCode());
long processDefinitionCode = processTaskRelationList.get(0).getProcessDefinitionCode(); long processDefinitionCode = processTaskRelationList.get(0).getProcessDefinitionCode();
int processDefinitionVersion = processTaskRelationList.get(0).getProcessDefinitionVersion(); int processDefinitionVersion = processTaskRelationList.get(0).getProcessDefinitionVersion();
processDefinition = processService.findProcessDefinition(processDefinitionCode, processDefinitionVersion); processDefinition = processService.findProcessDefinition(processDefinitionCode, processDefinitionVersion);
@ -155,7 +157,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
taskExecutionContext.getHost(), taskExecutionContext.getHost(),
System.currentTimeMillis()); System.currentTimeMillis());
ExecutionContext executionContext = new ExecutionContext(dispatchCommand.convert2Command(), ExecutorType.WORKER, taskExecutionContext.getWorkerGroup(), taskInstance); ExecutionContext executionContext = new ExecutionContext(dispatchCommand.convert2Command(), ExecutorType.WORKER,
taskExecutionContext.getWorkerGroup(), taskInstance);
Boolean dispatchSuccess = false; Boolean dispatchSuccess = false;
try { try {
dispatchSuccess = dispatcher.dispatch(executionContext); dispatchSuccess = dispatcher.dispatch(executionContext);
@ -190,7 +193,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
public boolean addTaskEvent(TaskEvent taskEvent) { public boolean addTaskEvent(TaskEvent taskEvent) {
if (taskInstance.getId() != taskEvent.getTaskInstanceId()) { if (taskInstance.getId() != taskEvent.getTaskInstanceId()) {
logger.info("state event would be abounded, taskInstanceId:{}, eventType:{}, state:{}", taskEvent.getTaskInstanceId(), taskEvent.getEvent(), taskEvent.getState()); logger.info("state event would be abounded, taskInstanceId:{}, eventType:{}, state:{}",
taskEvent.getTaskInstanceId(), taskEvent.getEvent(), taskEvent.getState());
return false; return false;
} }
taskEvents.add(taskEvent); taskEvents.add(taskEvent);
@ -231,7 +235,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
stateEventHandleException); stateEventHandleException);
ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS);
} catch (Exception e) { } catch (Exception e) {
// we catch the exception here, since if the state event handle failed, the state event will still keep in the stateEvents queue. // we catch the exception here, since if the state event handle failed, the state event will still keep
// in the stateEvents queue.
logger.error("State event handle error, get a unknown exception, will retry this event: {}", logger.error("State event handle error, get a unknown exception, will retry this event: {}",
taskEvent, taskEvent,
e); e);
@ -290,8 +295,10 @@ public class StreamTaskExecuteRunnable implements Runnable {
// task dry run flag // task dry run flag
taskInstance.setDryRun(taskExecuteStartCommand.getDryRun()); taskInstance.setDryRun(taskExecuteStartCommand.getDryRun());
taskInstance.setWorkerGroup(StringUtils.isBlank(taskDefinition.getWorkerGroup()) ? DEFAULT_WORKER_GROUP : taskDefinition.getWorkerGroup()); taskInstance.setWorkerGroup(StringUtils.isBlank(taskDefinition.getWorkerGroup()) ? DEFAULT_WORKER_GROUP
taskInstance.setEnvironmentCode(taskDefinition.getEnvironmentCode() == 0 ? -1 : taskDefinition.getEnvironmentCode()); : taskDefinition.getWorkerGroup());
taskInstance.setEnvironmentCode(
taskDefinition.getEnvironmentCode() == 0 ? -1 : taskDefinition.getEnvironmentCode());
if (!taskInstance.getEnvironmentCode().equals(-1L)) { if (!taskInstance.getEnvironmentCode().equals(-1L)) {
Environment environment = processService.findEnvironmentByCode(taskInstance.getEnvironmentCode()); Environment environment = processService.findEnvironmentByCode(taskInstance.getEnvironmentCode());
@ -335,7 +342,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
TaskChannel taskChannel = taskPluginManager.getTaskChannel(taskInstance.getTaskType()); TaskChannel taskChannel = taskPluginManager.getTaskChannel(taskInstance.getTaskType());
ResourceParametersHelper resources = taskChannel.getResources(taskInstance.getTaskParams()); ResourceParametersHelper resources = taskChannel.getResources(taskInstance.getTaskParams());
AbstractParameters baseParam = taskPluginManager.getParameters(ParametersNode.builder().taskType(taskInstance.getTaskType()).taskParams(taskInstance.getTaskParams()).build()); AbstractParameters baseParam = taskPluginManager.getParameters(ParametersNode.builder()
.taskType(taskInstance.getTaskType()).taskParams(taskInstance.getTaskParams()).build());
Map<String, Property> propertyMap = paramParsingPreparation(taskInstance, baseParam); Map<String, Property> propertyMap = paramParsingPreparation(taskInstance, baseParam);
TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get()
.buildTaskInstanceRelatedInfo(taskInstance) .buildTaskInstanceRelatedInfo(taskInstance)
@ -360,15 +368,18 @@ public class StreamTaskExecuteRunnable implements Runnable {
*/ */
protected Map<String, String> getResourceFullNames(TaskInstance taskInstance) { protected Map<String, String> getResourceFullNames(TaskInstance taskInstance) {
Map<String, String> resourcesMap = new HashMap<>(); Map<String, String> resourcesMap = new HashMap<>();
AbstractParameters baseParam = taskPluginManager.getParameters(ParametersNode.builder().taskType(taskInstance.getTaskType()).taskParams(taskInstance.getTaskParams()).build()); AbstractParameters baseParam = taskPluginManager.getParameters(ParametersNode.builder()
.taskType(taskInstance.getTaskType()).taskParams(taskInstance.getTaskParams()).build());
if (baseParam != null) { if (baseParam != null) {
List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList(); List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList();
if (CollectionUtils.isNotEmpty(projectResourceFiles)) { if (CollectionUtils.isNotEmpty(projectResourceFiles)) {
// filter the resources that the resource id equals 0 // filter the resources that the resource id equals 0
Set<ResourceInfo> oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == null).collect(Collectors.toSet()); Set<ResourceInfo> oldVersionResources =
projectResourceFiles.stream().filter(t -> t.getId() == null).collect(Collectors.toSet());
if (CollectionUtils.isNotEmpty(oldVersionResources)) { if (CollectionUtils.isNotEmpty(oldVersionResources)) {
oldVersionResources.forEach(t -> resourcesMap.put(t.getRes(), processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE))); oldVersionResources.forEach(t -> resourcesMap.put(t.getRes(),
processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE)));
} }
// get the resource id in order to get the resource names in batch // get the resource id in order to get the resource names in batch
@ -379,7 +390,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
Integer[] resourceIds = resourceIdsSet.toArray(new Integer[resourceIdsSet.size()]); Integer[] resourceIds = resourceIdsSet.toArray(new Integer[resourceIdsSet.size()]);
List<Resource> resources = processService.listResourceByIds(resourceIds); List<Resource> resources = processService.listResourceByIds(resourceIds);
resources.forEach(t -> resourcesMap.put(t.getFullName(), processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE))); resources.forEach(t -> resourcesMap.put(t.getFullName(),
processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE)));
} }
} }
} }
@ -411,7 +423,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
if (taskInstance.getState().isFinished()) { if (taskInstance.getState().isFinished()) {
streamTaskInstanceExecCacheManager.removeByTaskInstanceId(taskInstance.getId()); streamTaskInstanceExecCacheManager.removeByTaskInstanceId(taskInstance.getId());
logger.info("The stream task instance is finish, taskInstanceId:{}, state:{}", taskInstance.getId(), taskEvent.getState()); logger.info("The stream task instance is finish, taskInstanceId:{}, state:{}", taskInstance.getId(),
taskEvent.getState());
} }
return true; return true;
@ -441,7 +454,8 @@ public class StreamTaskExecuteRunnable implements Runnable {
} }
} }
public Map<String, Property> paramParsingPreparation(@NonNull TaskInstance taskInstance, @NonNull AbstractParameters parameters) { public Map<String, Property> paramParsingPreparation(@NonNull TaskInstance taskInstance,
@NonNull AbstractParameters parameters) {
// assign value to definedParams here // assign value to definedParams here
Map<String, String> globalParamsMap = taskExecuteStartCommand.getStartParams(); Map<String, String> globalParamsMap = taskExecuteStartCommand.getStartParams();
Map<String, Property> globalParams = ParamUtils.getUserDefParamsMap(globalParamsMap); Map<String, Property> globalParams = ParamUtils.getUserDefParamsMap(globalParamsMap);

11
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java

@ -386,7 +386,7 @@ public class WorkflowExecuteRunnable implements Callable<WorkflowSubmitStatue> {
if (!processInstance.isBlocked()) { if (!processInstance.isBlocked()) {
submitPostNode(Long.toString(taskInstance.getTaskCode())); submitPostNode(Long.toString(taskInstance.getTaskCode()));
} }
} else if (taskInstance.taskCanRetry() && processInstance.getState().isReadyStop()) { } else if (taskInstance.taskCanRetry() && !processInstance.getState().isReadyStop()) {
// retry task // retry task
logger.info("Retry taskInstance taskInstance state: {}", taskInstance.getState()); logger.info("Retry taskInstance taskInstance state: {}", taskInstance.getState());
retryTaskInstance(taskInstance); retryTaskInstance(taskInstance);
@ -847,6 +847,9 @@ public class WorkflowExecuteRunnable implements Callable<WorkflowSubmitStatue> {
} }
if (task.taskCanRetry()) { if (task.taskCanRetry()) {
if (task.getState().isNeedFaultTolerance()) { if (task.getState().isNeedFaultTolerance()) {
task.setFlag(Flag.NO);
processService.updateTaskInstance(task);
// tolerantTaskInstance add to standby list directly // tolerantTaskInstance add to standby list directly
TaskInstance tolerantTaskInstance = cloneTolerantTaskInstance(task); TaskInstance tolerantTaskInstance = cloneTolerantTaskInstance(task);
addTaskToStandByList(tolerantTaskInstance); addTaskToStandByList(tolerantTaskInstance);
@ -1085,6 +1088,11 @@ public class WorkflowExecuteRunnable implements Callable<WorkflowSubmitStatue> {
// todo relative funtion: TaskInstance.retryTaskIntervalOverTime // todo relative funtion: TaskInstance.retryTaskIntervalOverTime
newTaskInstance.setState(taskInstance.getState()); newTaskInstance.setState(taskInstance.getState());
newTaskInstance.setEndTime(taskInstance.getEndTime()); newTaskInstance.setEndTime(taskInstance.getEndTime());
if (taskInstance.getState() == TaskExecutionStatus.NEED_FAULT_TOLERANCE) {
newTaskInstance.setAppLink(taskInstance.getAppLink());
}
return newTaskInstance; return newTaskInstance;
} }
@ -1105,6 +1113,7 @@ public class WorkflowExecuteRunnable implements Callable<WorkflowSubmitStatue> {
newTaskInstance.setProcessInstance(processInstance); newTaskInstance.setProcessInstance(processInstance);
newTaskInstance.setRetryTimes(taskInstance.getRetryTimes()); newTaskInstance.setRetryTimes(taskInstance.getRetryTimes());
newTaskInstance.setState(taskInstance.getState()); newTaskInstance.setState(taskInstance.getState());
newTaskInstance.setAppLink(taskInstance.getAppLink());
return newTaskInstance; return newTaskInstance;
} }

25
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/MasterFailoverService.java

@ -17,13 +17,7 @@
package org.apache.dolphinscheduler.server.master.service; package org.apache.dolphinscheduler.server.master.service;
import io.micrometer.core.annotation.Counted;
import io.micrometer.core.annotation.Timed;
import lombok.NonNull;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.enums.NodeType;
import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils;
@ -47,9 +41,9 @@ import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.registry.RegistryClient; import org.apache.dolphinscheduler.service.registry.RegistryClient;
import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Service; import org.apache.commons.lang3.time.StopWatch;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -57,6 +51,15 @@ import java.util.Optional;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import lombok.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import io.micrometer.core.annotation.Counted;
import io.micrometer.core.annotation.Timed;
@Service @Service
public class MasterFailoverService { public class MasterFailoverService {
@ -241,7 +244,6 @@ public class MasterFailoverService {
} }
taskInstance.setState(TaskExecutionStatus.NEED_FAULT_TOLERANCE); taskInstance.setState(TaskExecutionStatus.NEED_FAULT_TOLERANCE);
taskInstance.setFlag(Flag.NO);
processService.saveTaskInstance(taskInstance); processService.saveTaskInstance(taskInstance);
} }
@ -285,7 +287,8 @@ public class MasterFailoverService {
// The processInstance is newly created // The processInstance is newly created
return false; return false;
} }
if (processInstance.getRestartTime() != null && processInstance.getRestartTime().after(beFailoveredMasterStartupTime)) { if (processInstance.getRestartTime() != null
&& processInstance.getRestartTime().after(beFailoveredMasterStartupTime)) {
// the processInstance is already be failovered. // the processInstance is already be failovered.
return false; return false;
} }

1
dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutionContextTestUtils.java

@ -36,6 +36,7 @@ import org.mockito.Mockito;
* for test use only * for test use only
*/ */
public class ExecutionContextTestUtils { public class ExecutionContextTestUtils {
public static ExecutionContext getExecutionContext(int port) { public static ExecutionContext getExecutionContext(int port) {
TaskInstance taskInstance = Mockito.mock(TaskInstance.class); TaskInstance taskInstance = Mockito.mock(TaskInstance.class);
ProcessDefinition processDefinition = Mockito.mock(ProcessDefinition.class); ProcessDefinition processDefinition = Mockito.mock(ProcessDefinition.class);

23
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.service.log; package org.apache.dolphinscheduler.service.log;
import lombok.NonNull;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.NetUtils;
@ -37,12 +36,17 @@ import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand;
import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.config.NettyClientConfig;
import org.apache.dolphinscheduler.remote.exceptions.RemotingException; import org.apache.dolphinscheduler.remote.exceptions.RemotingException;
import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.Host;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.annotation.Nullable;
import lombok.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* log client * log client
*/ */
@ -92,7 +96,8 @@ public class LogClientService implements AutoCloseable {
* @return log content * @return log content
*/ */
public String rollViewLog(String host, int port, String path, int skipLineNum, int limit) { public String rollViewLog(String host, int port, String path, int skipLineNum, int limit) {
logger.info("roll view log, host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, skipLineNum, limit); logger.info("roll view log, host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path,
skipLineNum, limit);
RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit); RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit);
String result = ""; String result = "";
final Host address = new Host(host, port); final Host address = new Host(host, port);
@ -202,18 +207,20 @@ public class LogClientService implements AutoCloseable {
return result; return result;
} }
public @Nullable List<String> getAppIds(@NonNull String host, int port, @NonNull String taskLogFilePath) throws RemotingException, InterruptedException { public @Nullable List<String> getAppIds(@NonNull String host, int port,
@NonNull String taskLogFilePath) throws RemotingException, InterruptedException {
logger.info("Begin to get appIds from worker: {}:{} taskLogPath: {}", host, port, taskLogFilePath); logger.info("Begin to get appIds from worker: {}:{} taskLogPath: {}", host, port, taskLogFilePath);
final Host workerAddress = new Host(host, port); final Host workerAddress = new Host(host, port);
List<String> appIds = null; List<String> appIds = null;
try { try {
if (NetUtils.getHost().equals(host)) { if (NetUtils.getHost().equals(host)) {
appIds = LogUtils.getAppIdsFromLogFile(taskLogFilePath); appIds = new ArrayList<>(LogUtils.getAppIdsFromLogFile(taskLogFilePath));
} else { } else {
final Command command = new GetAppIdRequestCommand(taskLogFilePath).convert2Command(); final Command command = new GetAppIdRequestCommand(taskLogFilePath).convert2Command();
Command response = this.client.sendSync(workerAddress, command, LOG_REQUEST_TIMEOUT); Command response = this.client.sendSync(workerAddress, command, LOG_REQUEST_TIMEOUT);
if (response != null) { if (response != null) {
GetAppIdResponseCommand responseCommand = JSONUtils.parseObject(response.getBody(), GetAppIdResponseCommand.class); GetAppIdResponseCommand responseCommand =
JSONUtils.parseObject(response.getBody(), GetAppIdResponseCommand.class);
appIds = responseCommand.getAppIds(); appIds = responseCommand.getAppIds();
} }
} }

86
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractRemoteTask.java

@ -0,0 +1,86 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.api.model.ApplicationInfo;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.Set;
public abstract class AbstractRemoteTask extends AbstractTask {
/**
* constructor
*
* @param taskExecutionContext taskExecutionContext
*/
protected AbstractRemoteTask(TaskExecutionContext taskExecutionContext) {
super(taskExecutionContext);
}
@Override
public void cancel() throws TaskException {
this.cancelApplication();
}
public abstract Set<String> getApplicationIds() throws TaskException;
public abstract void cancelApplication() throws TaskException;
/**
* If appIds is empty, submit a new remote application; otherwise, just track application status.
*
* @param taskCallBack
* @throws TaskException
*/
@Override
public void handle(TaskCallBack taskCallBack) throws TaskException {
// if appIds is not empty, just track application status, avoid resubmitting remote task
if (StringUtils.isNotEmpty(taskRequest.getAppIds())) {
setAppIds(taskRequest.getAppIds());
trackApplicationStatus();
return;
}
// submit a remote application
submitApplication();
if (StringUtils.isNotEmpty(getAppIds())) {
taskRequest.setAppIds(getAppIds());
// callback to update remote application info
taskCallBack.updateRemoteApplicationInfo(taskRequest.getTaskInstanceId(), new ApplicationInfo(getAppIds()));
}
// keep tracking application status
trackApplicationStatus();
}
/**
* submit a new remote application and get application info
*
* @return
* @throws TaskException
*/
public abstract void submitApplication() throws TaskException;
/**
* keep checking application status
* @throws TaskException
*/
public abstract void trackApplicationStatus() throws TaskException;
}

141
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTask.java

@ -18,6 +18,7 @@
package org.apache.dolphinscheduler.plugin.task.api; package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.model.TaskAlertInfo; import org.apache.dolphinscheduler.plugin.task.api.model.TaskAlertInfo;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils;
@ -29,19 +30,28 @@ import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.StringJoiner;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
/** /**
* executive task * executive task
*/ */
public abstract class AbstractTask { public abstract class AbstractTask {
/** public static final Marker FINALIZE_SESSION_MARKER = MarkerFactory.getMarker("FINALIZE_SESSION");
* rules for extracting application ID
*/ protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass()));
protected static final Pattern YARN_APPLICATION_REGEX = Pattern.compile(TaskConstants.YARN_APPLICATION_REGEX);
public String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*";
/** /**
* varPool string * varPool string
@ -68,11 +78,6 @@ public abstract class AbstractTask {
*/ */
protected String appIds; protected String appIds;
/**
* cancel flag
*/
protected volatile boolean cancel = false;
/** /**
* exit code * exit code
*/ */
@ -101,60 +106,9 @@ public abstract class AbstractTask {
return null; return null;
} }
public abstract void handle() throws TaskException; public abstract void handle(TaskCallBack taskCallBack) throws TaskException;
/**
* cancel application
*
* @param status status
* @throws Exception exception
*/
public void cancelApplication(boolean status) throws Exception {
this.cancel = status;
}
/**
* get application ids
* @return
* @throws IOException
*/
public Set<String> getApplicationIds() throws IOException {
Set<String> appIds = new HashSet<>();
File file = new File(taskRequest.getLogPath()); public abstract void cancel() throws TaskException;
if (!file.exists()) {
return appIds;
}
/*
* analysis log? get submitted yarn application id
*/
try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(taskRequest.getLogPath()), StandardCharsets.UTF_8))) {
String line;
while ((line = br.readLine()) != null) {
String appId = findAppId(line);
if (StringUtils.isNotEmpty(appId)) {
appIds.add(appId);
}
}
}
return appIds;
}
/**
* find app id
*
* @param line line
* @return appid
*/
protected String findAppId(String line) {
Matcher matcher = YARN_APPLICATION_REGEX.matcher(line);
if (matcher.find()) {
return matcher.group();
}
return null;
}
public void setVarPool(String varPool) { public void setVarPool(String varPool) {
this.varPool = varPool; this.varPool = varPool;
@ -177,14 +131,6 @@ public abstract class AbstractTask {
this.exitStatusCode = exitStatusCode; this.exitStatusCode = exitStatusCode;
} }
public String getAppIds() {
return appIds;
}
public void setAppIds(String appIds) {
this.appIds = appIds;
}
public int getProcessId() { public int getProcessId() {
return processId; return processId;
} }
@ -201,6 +147,14 @@ public abstract class AbstractTask {
this.resultString = resultString; this.resultString = resultString;
} }
public String getAppIds() {
return appIds;
}
public void setAppIds(String appIds) {
this.appIds = appIds;
}
public boolean getNeedAlert() { public boolean getNeedAlert() {
return needAlert; return needAlert;
} }
@ -245,4 +199,51 @@ public abstract class AbstractTask {
return status; return status;
} }
/**
* log handle
*
* @param logs log list
*/
public void logHandle(LinkedBlockingQueue<String> logs) {
// note that the "new line" is added here to facilitate log parsing
if (logs.contains(FINALIZE_SESSION_MARKER.toString())) {
logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString());
} else {
StringJoiner joiner = new StringJoiner("\n\t");
while (!logs.isEmpty()) {
joiner.add(logs.poll());
}
logger.info(" -> {}", joiner);
}
}
/**
* regular expressions match the contents between two specified strings
*
* @param content content
* @param rgex rgex
* @param sqlParamsMap sql params map
* @param paramsPropsMap params props map
*/
public void setSqlParamsMap(String content, String rgex, Map<Integer, Property> sqlParamsMap,
Map<String, Property> paramsPropsMap,int taskInstanceId) {
Pattern pattern = Pattern.compile(rgex);
Matcher m = pattern.matcher(content);
int index = 1;
while (m.find()) {
String paramName = m.group(1);
Property prop = paramsPropsMap.get(paramName);
if (prop == null) {
logger.error("setSqlParamsMap: No Property with paramName: {} is found in paramsPropsMap of task instance"
+ " with id: {}. So couldn't put Property in sqlParamsMap.", paramName, taskInstanceId);
} else {
sqlParamsMap.put(index, prop);
index++;
logger.info("setSqlParamsMap: Property with paramName: {} put in sqlParamsMap of content {} successfully.", paramName, content);
}
}
}
} }

97
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java

@ -1,97 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import java.util.Map;
import java.util.StringJoiner;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
public abstract class AbstractTaskExecutor extends AbstractTask {
public static final Marker FINALIZE_SESSION_MARKER = MarkerFactory.getMarker("FINALIZE_SESSION");
protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass()));
public String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*";
/**
* constructor
*
* @param taskRequest taskRequest
*/
protected AbstractTaskExecutor(TaskExecutionContext taskRequest) {
super(taskRequest);
}
/**
* log handle
*
* @param logs log list
*/
public void logHandle(LinkedBlockingQueue<String> logs) {
// note that the "new line" is added here to facilitate log parsing
if (logs.contains(FINALIZE_SESSION_MARKER.toString())) {
logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString());
} else {
StringJoiner joiner = new StringJoiner("\n\t");
while (!logs.isEmpty()) {
joiner.add(logs.poll());
}
logger.info(" -> {}", joiner);
}
}
/**
* regular expressions match the contents between two specified strings
*
* @param content content
* @param rgex rgex
* @param sqlParamsMap sql params map
* @param paramsPropsMap params props map
*/
public void setSqlParamsMap(String content, String rgex, Map<Integer, Property> sqlParamsMap,
Map<String, Property> paramsPropsMap,int taskInstanceId) {
Pattern pattern = Pattern.compile(rgex);
Matcher m = pattern.matcher(content);
int index = 1;
while (m.find()) {
String paramName = m.group(1);
Property prop = paramsPropsMap.get(paramName);
if (prop == null) {
logger.error("setSqlParamsMap: No Property with paramName: {} is found in paramsPropsMap of task instance"
+ " with id: {}. So couldn't put Property in sqlParamsMap.", paramName, taskInstanceId);
} else {
sqlParamsMap.put(index, prop);
index++;
logger.info("setSqlParamsMap: Property with paramName: {} put in sqlParamsMap of content {} successfully.", paramName, content);
}
}
}
}

54
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java

@ -19,13 +19,17 @@ package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse; import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse;
import org.apache.dolphinscheduler.plugin.task.api.utils.LogUtils;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
/** /**
* abstract yarn task * abstract yarn task
*/ */
public abstract class AbstractYarnTask extends AbstractTaskExecutor { public abstract class AbstractYarnTask extends AbstractRemoteTask {
/** /**
* process task * process task
*/ */
@ -48,8 +52,9 @@ public abstract class AbstractYarnTask extends AbstractTaskExecutor {
logger); logger);
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// SHELL task exit code // SHELL task exit code
TaskResponse response = shellCommandExecutor.run(buildCommand()); TaskResponse response = shellCommandExecutor.run(buildCommand());
@ -69,17 +74,54 @@ public abstract class AbstractYarnTask extends AbstractTaskExecutor {
} }
} }
// todo
@Override
public void submitApplication() throws TaskException {
}
// todo
@Override
public void trackApplicationStatus() throws TaskException {
}
/** /**
* cancel application * cancel application
* *
* @param status status * @throws TaskException exception
* @throws Exception exception
*/ */
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
cancel = true;
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
}
/**
* get application ids
* @return
* @throws TaskException
*/
public Set<String> getApplicationIds() throws TaskException {
return LogUtils.getAppIdsFromLogFile(taskRequest.getLogPath(), logger);
}
/**
* find app id
*
* @param line line
* @return appid
*/
protected String findAppId(String line) {
Matcher matcher = YARN_APPLICATION_REGEX.matcher(line);
if (matcher.find()) {
return matcher.group();
}
return null;
} }
/** /**

25
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskCallBack.java

@ -0,0 +1,25 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.api.model.ApplicationInfo;
public interface TaskCallBack {
public void updateRemoteApplicationInfo(int taskInstanceId, ApplicationInfo applicationInfo);
}

6
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/TaskExecutionStatus.java

@ -17,11 +17,11 @@
package org.apache.dolphinscheduler.plugin.task.api.enums; package org.apache.dolphinscheduler.plugin.task.api.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import com.baomidou.mybatisplus.annotation.EnumValue;
public enum TaskExecutionStatus { public enum TaskExecutionStatus {
SUBMITTED_SUCCESS(0, "submit success"), SUBMITTED_SUCCESS(0, "submit success"),
@ -80,7 +80,7 @@ public enum TaskExecutionStatus {
} }
public boolean isFailure() { public boolean isFailure() {
return this == TaskExecutionStatus.FAILURE; return this == TaskExecutionStatus.FAILURE || this == NEED_FAULT_TOLERANCE;
} }
public boolean isPause() { public boolean isPause() {

25
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTask.java

@ -17,13 +17,15 @@
package org.apache.dolphinscheduler.plugin.task.api.k8s; package org.apache.dolphinscheduler.plugin.task.api.k8s;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.k8s.impl.K8sTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.k8s.impl.K8sTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse; import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse;
public abstract class AbstractK8sTask extends AbstractTaskExecutor { public abstract class AbstractK8sTask extends AbstractRemoteTask {
/** /**
* process task * process task
*/ */
@ -38,8 +40,9 @@ public abstract class AbstractK8sTask extends AbstractTaskExecutor {
this.abstractK8sTaskExecutor = new K8sTaskExecutor(logger,taskRequest); this.abstractK8sTaskExecutor = new K8sTaskExecutor(logger,taskRequest);
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
TaskResponse response = abstractK8sTaskExecutor.run(buildCommand()); TaskResponse response = abstractK8sTaskExecutor.run(buildCommand());
setExitStatusCode(response.getExitStatusCode()); setExitStatusCode(response.getExitStatusCode());
@ -50,15 +53,25 @@ public abstract class AbstractK8sTask extends AbstractTaskExecutor {
} }
} }
// todo
@Override
public void submitApplication() throws TaskException {
}
// todo
@Override
public void trackApplicationStatus() throws TaskException {
}
/** /**
* cancel application * cancel application
* *
* @param status status
* @throws Exception exception * @throws Exception exception
*/ */
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
cancel = true;
// cancel process // cancel process
abstractK8sTaskExecutor.cancelApplication(buildCommand()); abstractK8sTaskExecutor.cancelApplication(buildCommand());
} }

24
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/BaseLoopTaskExecutor.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.api.loop; package org.apache.dolphinscheduler.plugin.task.api.loop;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.model.ApplicationInfo;
import org.apache.dolphinscheduler.plugin.task.api.utils.RetryUtils; import org.apache.dolphinscheduler.plugin.task.api.utils.RetryUtils;
import java.time.Duration; import java.time.Duration;
@ -34,7 +36,12 @@ import lombok.NonNull;
* <p> * <p>
* The loop task type means, we will submit a task, and loop the task status until the task is finished. * The loop task type means, we will submit a task, and loop the task status until the task is finished.
*/ */
public abstract class BaseLoopTaskExecutor extends AbstractTaskExecutor { public abstract class BaseLoopTaskExecutor extends AbstractRemoteTask {
/**
* cancel flag
*/
protected volatile boolean cancel = false;
/** /**
* The task instance info will be set when task has submitted successful. * The task instance info will be set when task has submitted successful.
@ -46,11 +53,13 @@ public abstract class BaseLoopTaskExecutor extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
final long loopInterval = getTaskInstanceStatusQueryInterval().toMillis(); final long loopInterval = getTaskInstanceStatusQueryInterval().toMillis();
loopTaskInstanceInfo = submitLoopTask(); loopTaskInstanceInfo = submitLoopTask();
this.appIds = loopTaskInstanceInfo.getTaskInstanceId(); this.setAppIds(loopTaskInstanceInfo.getTaskInstanceId());
taskCallBack.updateRemoteApplicationInfo(taskRequest.getTaskInstanceId(), new ApplicationInfo(getAppIds()));
// loop the task status until the task is finished or task has been canceled. // loop the task status until the task is finished or task has been canceled.
// we use retry utils here to avoid the task status query failure due to network failure. // we use retry utils here to avoid the task status query failure due to network failure.
// the default retry policy is 3 times, and the interval is 1 second. // the default retry policy is 3 times, and the interval is 1 second.
@ -94,8 +103,7 @@ public abstract class BaseLoopTaskExecutor extends AbstractTaskExecutor {
/** /**
* Query the loop task status, if query failed, directly throw exception * Query the loop task status, if query failed, directly throw exception
*/ */
public abstract @NonNull LoopTaskInstanceStatus queryTaskInstanceStatus(@NonNull LoopTaskInstanceInfo taskInstanceInfo) public abstract @NonNull LoopTaskInstanceStatus queryTaskInstanceStatus(@NonNull LoopTaskInstanceInfo taskInstanceInfo) throws TaskException;
throws TaskException;
/** /**
* Get the interval time to query the loop task status * Get the interval time to query the loop task status
@ -110,8 +118,8 @@ public abstract class BaseLoopTaskExecutor extends AbstractTaskExecutor {
public abstract void cancelLoopTaskInstance(@Nullable LoopTaskInstanceInfo taskInstanceInfo) throws TaskException; public abstract void cancelLoopTaskInstance(@Nullable LoopTaskInstanceInfo taskInstanceInfo) throws TaskException;
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
this.cancel = true;
cancelLoopTaskInstance(loopTaskInstanceInfo); cancelLoopTaskInstance(loopTaskInstanceInfo);
super.cancelApplication(status);
} }
} }

30
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/model/ApplicationInfo.java

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class ApplicationInfo {
String appIds;
}

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/stream/StreamTask.java

@ -17,8 +17,6 @@
package org.apache.dolphinscheduler.plugin.task.api.stream; package org.apache.dolphinscheduler.plugin.task.api.stream;
import java.io.IOException;
public interface StreamTask { public interface StreamTask {
public void savePoint() throws Exception; public void savePoint() throws Exception;
} }

25
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtils.java

@ -17,47 +17,46 @@
package org.apache.dolphinscheduler.plugin.task.api.utils; package org.apache.dolphinscheduler.plugin.task.api.utils;
import lombok.NonNull;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.slf4j.Logger;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Stream; import java.util.stream.Stream;
import lombok.NonNull;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
@Slf4j @Slf4j
@UtilityClass @UtilityClass
public class LogUtils { public class LogUtils {
private static final Pattern APPLICATION_REGEX = Pattern.compile(TaskConstants.YARN_APPLICATION_REGEX); private static final Pattern APPLICATION_REGEX = Pattern.compile(TaskConstants.YARN_APPLICATION_REGEX);
public List<String> getAppIdsFromLogFile(@NonNull String logPath) { public Set<String> getAppIdsFromLogFile(@NonNull String logPath) {
return getAppIdsFromLogFile(logPath, log); return getAppIdsFromLogFile(logPath, log);
} }
public List<String> getAppIdsFromLogFile(@NonNull String logPath, Logger logger) { public Set<String> getAppIdsFromLogFile(@NonNull String logPath, Logger logger) {
File logFile = new File(logPath); File logFile = new File(logPath);
if (!logFile.exists() || !logFile.isFile()) { if (!logFile.exists() || !logFile.isFile()) {
return Collections.emptyList(); return Collections.emptySet();
} }
Set<String> appIds = new HashSet<>(); Set<String> appIds = new HashSet<>();
try (Stream<String> stream = Files.lines(Paths.get(logPath))) { try (Stream<String> stream = Files.lines(Paths.get(logPath))) {
stream.filter(line -> { stream.filter(line -> {
Matcher matcher = APPLICATION_REGEX.matcher(line); Matcher matcher = APPLICATION_REGEX.matcher(line);
return matcher.find(); return matcher.find();
} }).forEach(line -> {
).forEach(line -> {
Matcher matcher = APPLICATION_REGEX.matcher(line); Matcher matcher = APPLICATION_REGEX.matcher(line);
if (matcher.find()) { if (matcher.find()) {
String appId = matcher.group(); String appId = matcher.group();
@ -66,10 +65,10 @@ public class LogUtils {
} }
} }
}); });
return new ArrayList<>(appIds); return appIds;
} catch (IOException e) { } catch (IOException e) {
logger.error("Get appId from log file erro, logPath: {}", logPath, e); logger.error("Get appId from log file erro, logPath: {}", logPath, e);
return Collections.emptyList(); return Collections.emptySet();
} }
} }
} }

9
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtilsTest.java

@ -17,11 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.api.utils; package org.apache.dolphinscheduler.plugin.task.api.utils;
import com.google.common.collect.Lists; import java.util.Set;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import java.util.List; import com.google.common.collect.Sets;
public class LogUtilsTest { public class LogUtilsTest {
@ -30,7 +31,7 @@ public class LogUtilsTest {
@Test @Test
public void getAppIdsFromLogFile() { public void getAppIdsFromLogFile() {
List<String> appIds = LogUtils.getAppIdsFromLogFile(APP_ID_FILE); Set<String> appIds = LogUtils.getAppIdsFromLogFile(APP_ID_FILE);
Assert.assertEquals(Lists.newArrayList("application_1548381669007_1234"), appIds); Assert.assertEquals(Sets.newHashSet("application_1548381669007_1234"), appIds);
} }
} }

22
dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTask.java

@ -18,9 +18,11 @@
package org.apache.dolphinscheduler.plugin.task.chunjun; package org.apache.dolphinscheduler.plugin.task.chunjun;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang.SystemUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -41,6 +43,7 @@ import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.PosixFilePermissions;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -51,7 +54,7 @@ import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.RWXR_XR_
/** /**
* chunjun task * chunjun task
*/ */
public class ChunJunTask extends AbstractTaskExecutor { public class ChunJunTask extends AbstractTask {
/** /**
* chunjun path * chunjun path
*/ */
@ -104,7 +107,7 @@ public class ChunJunTask extends AbstractTaskExecutor {
* @throws TaskException exception * @throws TaskException exception
*/ */
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap(); Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap();
@ -113,7 +116,9 @@ public class ChunJunTask extends AbstractTaskExecutor {
TaskResponse commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath); TaskResponse commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath);
setExitStatusCode(commandExecuteResult.getExitStatusCode()); setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(String.join(TaskConstants.COMMA, getApplicationIds()));
// todo get applicationId
setAppIds(String.join(TaskConstants.COMMA, Collections.emptySet()));
setProcessId(commandExecuteResult.getProcessId()); setProcessId(commandExecuteResult.getProcessId());
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
@ -251,13 +256,16 @@ public class ChunJunTask extends AbstractTaskExecutor {
/** /**
* cancel ChunJun process * cancel ChunJun process
* *
* @param cancelApplication cancelApplication
* @throws Exception if error throws Exception * @throws Exception if error throws Exception
*/ */
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancel() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
} }

18
dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java

@ -33,8 +33,9 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -75,7 +76,7 @@ import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUt
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.RWXR_XR_X; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.RWXR_XR_X;
public class DataxTask extends AbstractTaskExecutor { public class DataxTask extends AbstractTask {
/** /**
* jvm parameters * jvm parameters
*/ */
@ -147,7 +148,7 @@ public class DataxTask extends AbstractTaskExecutor {
* @throws Exception if error throws Exception * @throws Exception if error throws Exception
*/ */
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// replace placeholder,and combine local and global parameters // replace placeholder,and combine local and global parameters
Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap(); Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap();
@ -158,7 +159,6 @@ public class DataxTask extends AbstractTaskExecutor {
TaskResponse commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath); TaskResponse commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath);
setExitStatusCode(commandExecuteResult.getExitStatusCode()); setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(String.join(TaskConstants.COMMA, getApplicationIds()));
setProcessId(commandExecuteResult.getProcessId()); setProcessId(commandExecuteResult.getProcessId());
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
@ -175,14 +175,16 @@ public class DataxTask extends AbstractTaskExecutor {
/** /**
* cancel DataX process * cancel DataX process
* *
* @param cancelApplication cancelApplication * @throws TaskException if error throws Exception
* @throws Exception if error throws Exception
*/ */
@Override @Override
public void cancelApplication(boolean cancelApplication) public void cancel() throws TaskException {
throws Exception {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
/** /**

30
dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java

@ -21,7 +21,10 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.MissingNode; import com.fasterxml.jackson.databind.node.MissingNode;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -40,12 +43,14 @@ import org.apache.http.util.EntityUtils;
import java.net.URI; import java.net.URI;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Set;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
public class DinkyTask extends AbstractTaskExecutor { public class DinkyTask extends AbstractRemoteTask {
/** /**
* taskExecutionContext * taskExecutionContext
@ -67,6 +72,11 @@ public class DinkyTask extends AbstractTaskExecutor {
this.taskExecutionContext = taskExecutionContext; this.taskExecutionContext = taskExecutionContext;
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override @Override
public void init() { public void init() {
final String taskParams = taskExecutionContext.getTaskParams(); final String taskParams = taskExecutionContext.getTaskParams();
@ -77,8 +87,9 @@ public class DinkyTask extends AbstractTaskExecutor {
} }
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
String address = this.dinkyParameters.getAddress(); String address = this.dinkyParameters.getAddress();
@ -130,6 +141,16 @@ public class DinkyTask extends AbstractTaskExecutor {
} }
} }
@Override
public void submitApplication() throws TaskException {
}
@Override
public void trackApplicationStatus() throws TaskException {
}
/** /**
* map dinky task status to exitStatusCode * map dinky task status to exitStatusCode
* *
@ -166,8 +187,7 @@ public class DinkyTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
super.cancelApplication(status);
String address = this.dinkyParameters.getAddress(); String address = this.dinkyParameters.getAddress();
String taskId = this.dinkyParameters.getTaskId(); String taskId = this.dinkyParameters.getTaskId();
logger.info("trying terminate dinky task, taskId: {}, address: {}, taskId: {}", logger.info("trying terminate dinky task, taskId: {}, address: {}, taskId: {}",

22
dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTask.java

@ -17,9 +17,11 @@
package org.apache.dolphinscheduler.plugin.task.dvc; package org.apache.dolphinscheduler.plugin.task.dvc;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse; import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse;
@ -29,12 +31,10 @@ import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
/** /**
* shell task * shell task
*/ */
public class DvcTask extends AbstractTaskExecutor { public class DvcTask extends AbstractTask {
/** /**
* dvc parameters * dvc parameters
@ -75,13 +75,12 @@ public class DvcTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// construct process // construct process
String command = buildCommand(); String command = buildCommand();
TaskResponse commandExecuteResult = shellCommandExecutor.run(command); TaskResponse commandExecuteResult = shellCommandExecutor.run(command);
setExitStatusCode(commandExecuteResult.getExitStatusCode()); setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(String.join(TaskConstants.COMMA, getApplicationIds()));
setProcessId(commandExecuteResult.getProcessId()); setProcessId(commandExecuteResult.getProcessId());
parameters.dealOutParam(shellCommandExecutor.getVarPool()); parameters.dealOutParam(shellCommandExecutor.getVarPool());
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -97,9 +96,13 @@ public class DvcTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancel() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
public String buildCommand() { public String buildCommand() {
@ -159,12 +162,9 @@ public class DvcTask extends AbstractTaskExecutor {
} }
@Override @Override
public AbstractParameters getParameters() { public AbstractParameters getParameters() {
return parameters; return parameters;
} }
} }

5
dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java

@ -22,7 +22,8 @@ import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKN
import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL;
import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
@ -44,7 +45,7 @@ import com.fasterxml.jackson.databind.PropertyNamingStrategy;
* *
* @since v3.1.0 * @since v3.1.0
*/ */
public abstract class AbstractEmrTask extends AbstractTaskExecutor { public abstract class AbstractEmrTask extends AbstractRemoteTask {
final TaskExecutionContext taskExecutionContext; final TaskExecutionContext taskExecutionContext;
EmrParameters emrParameters; EmrParameters emrParameters;

32
dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java

@ -30,11 +30,15 @@ import com.amazonaws.services.elasticmapreduce.model.StepState;
import com.amazonaws.services.elasticmapreduce.model.StepStatus; import com.amazonaws.services.elasticmapreduce.model.StepStatus;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
@ -62,7 +66,12 @@ public class EmrAddStepsTask extends AbstractEmrTask {
} }
@Override @Override
public void handle() throws TaskException { public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override
public void submitApplication() throws TaskException {
StepStatus stepStatus = null; StepStatus stepStatus = null;
try { try {
AddJobFlowStepsRequest addJobFlowStepsRequest = createAddJobFlowStepsRequest(); AddJobFlowStepsRequest addJobFlowStepsRequest = createAddJobFlowStepsRequest();
@ -77,13 +86,27 @@ public class EmrAddStepsTask extends AbstractEmrTask {
stepStatus = getStepStatus(); stepStatus = getStepStatus();
} catch (EmrTaskException | SdkBaseException e) {
logger.error("emr task submit failed with error", e);
throw new TaskException("emr task submit fail", e);
} finally {
final int exitStatusCode = calculateExitStatusCode(stepStatus);
setExitStatusCode(exitStatusCode);
logger.info("emr task finished with step status : {}", stepStatus);
}
}
@Override
public void trackApplicationStatus() throws TaskException {
StepStatus stepStatus = getStepStatus();
try {
while (waitingStateSet.contains(stepStatus.getState())) { while (waitingStateSet.contains(stepStatus.getState())) {
TimeUnit.SECONDS.sleep(10); TimeUnit.SECONDS.sleep(10);
stepStatus = getStepStatus(); stepStatus = getStepStatus();
} }
} catch (EmrTaskException | SdkBaseException e) { } catch (EmrTaskException | SdkBaseException e) {
logger.error("emr task submit failed with error", e); logger.error("emr task failed with error", e);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new TaskException("Execute emr task failed", e); throw new TaskException("Execute emr task failed", e);
@ -154,8 +177,7 @@ public class EmrAddStepsTask extends AbstractEmrTask {
} }
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
super.cancelApplication(status);
logger.info("trying cancel emr step, taskId:{}, clusterId:{}, stepId:{}", this.taskExecutionContext.getTaskInstanceId(), clusterId, stepId); logger.info("trying cancel emr step, taskId:{}, clusterId:{}, stepId:{}", this.taskExecutionContext.getTaskInstanceId(), clusterId, stepId);
CancelStepsRequest cancelStepsRequest = new CancelStepsRequest().withClusterId(clusterId).withStepIds(stepId); CancelStepsRequest cancelStepsRequest = new CancelStepsRequest().withClusterId(clusterId).withStepIds(stepId);
CancelStepsResult cancelStepsResult = emrClient.cancelSteps(cancelStepsRequest); CancelStepsResult cancelStepsResult = emrClient.cancelSteps(cancelStepsRequest);

43
dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java

@ -21,7 +21,9 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import com.amazonaws.SdkBaseException; import com.amazonaws.SdkBaseException;
@ -43,8 +45,7 @@ public class EmrJobFlowTask extends AbstractEmrTask {
private final HashSet<String> waitingStateSet = Sets.newHashSet( private final HashSet<String> waitingStateSet = Sets.newHashSet(
ClusterState.STARTING.toString(), ClusterState.STARTING.toString(),
ClusterState.BOOTSTRAPPING.toString(), ClusterState.BOOTSTRAPPING.toString(),
ClusterState.RUNNING.toString() ClusterState.RUNNING.toString());
);
/** /**
* constructor * constructor
@ -56,7 +57,12 @@ public class EmrJobFlowTask extends AbstractEmrTask {
} }
@Override @Override
public void handle() throws TaskException { public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override
public void submitApplication() throws TaskException {
ClusterStatus clusterStatus = null; ClusterStatus clusterStatus = null;
try { try {
RunJobFlowRequest runJobFlowRequest = createRunJobFlowRequest(); RunJobFlowRequest runJobFlowRequest = createRunJobFlowRequest();
@ -65,18 +71,34 @@ public class EmrJobFlowTask extends AbstractEmrTask {
RunJobFlowResult result = emrClient.runJobFlow(runJobFlowRequest); RunJobFlowResult result = emrClient.runJobFlow(runJobFlowRequest);
clusterId = result.getJobFlowId(); clusterId = result.getJobFlowId();
// Failover on EMR Task type has not been implemented. In this time, DS only supports failover on yarn task type . Other task type, such as EMR task, k8s task not ready yet. // Failover on EMR Task type has not been implemented. In this time, DS only supports failover on yarn task
// type . Other task type, such as EMR task, k8s task not ready yet.
setAppIds(clusterId); setAppIds(clusterId);
clusterStatus = getClusterStatus(); clusterStatus = getClusterStatus();
} catch (EmrTaskException | SdkBaseException e) {
logger.error("emr task submit failed with error", e);
throw new TaskException("emr task submit failed", e);
} finally {
final int exitStatusCode = calculateExitStatusCode(clusterStatus);
setExitStatusCode(exitStatusCode);
logger.info("emr task finished with cluster status : {}", clusterStatus);
}
}
@Override
public void trackApplicationStatus() throws TaskException {
ClusterStatus clusterStatus = null;
try {
clusterStatus = getClusterStatus();
while (waitingStateSet.contains(clusterStatus.getState())) { while (waitingStateSet.contains(clusterStatus.getState())) {
TimeUnit.SECONDS.sleep(10); TimeUnit.SECONDS.sleep(10);
clusterStatus = getClusterStatus(); clusterStatus = getClusterStatus();
} }
} catch (EmrTaskException | SdkBaseException e) { } catch (EmrTaskException | SdkBaseException e) {
logger.error("emr task submit failed with error", e); logger.error("emr task failed with error", e);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new TaskException("Execute emr task failed", e); throw new TaskException("Execute emr task failed", e);
@ -123,7 +145,8 @@ public class EmrJobFlowTask extends AbstractEmrTask {
case TERMINATED: case TERMINATED:
case TERMINATING: case TERMINATING:
String code = stateChangeReason.getCode(); String code = stateChangeReason.getCode();
if (code != null && code.equalsIgnoreCase(ClusterStateChangeReasonCode.ALL_STEPS_COMPLETED.toString())) { if (code != null
&& code.equalsIgnoreCase(ClusterStateChangeReasonCode.ALL_STEPS_COMPLETED.toString())) {
return TaskConstants.EXIT_CODE_SUCCESS; return TaskConstants.EXIT_CODE_SUCCESS;
} else { } else {
return TaskConstants.EXIT_CODE_KILL; return TaskConstants.EXIT_CODE_KILL;
@ -148,9 +171,9 @@ public class EmrJobFlowTask extends AbstractEmrTask {
} }
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
super.cancelApplication(status); logger.info("trying terminate job flow, taskId:{}, clusterId:{}", this.taskExecutionContext.getTaskInstanceId(),
logger.info("trying terminate job flow, taskId:{}, clusterId:{}", this.taskExecutionContext.getTaskInstanceId(), clusterId); clusterId);
TerminateJobFlowsRequest terminateJobFlowsRequest = new TerminateJobFlowsRequest().withJobFlowIds(clusterId); TerminateJobFlowsRequest terminateJobFlowsRequest = new TerminateJobFlowsRequest().withJobFlowIds(clusterId);
TerminateJobFlowsResult terminateJobFlowsResult = emrClient.terminateJobFlows(terminateJobFlowsRequest); TerminateJobFlowsResult terminateJobFlowsResult = emrClient.terminateJobFlows(terminateJobFlowsRequest);
logger.info("the result of terminate job flow is:{}", terminateJobFlowsResult); logger.info("the result of terminate job flow is:{}", terminateJobFlowsResult);

27
dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/test/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTaskTest.java

@ -28,6 +28,8 @@ import static org.powermock.api.mockito.PowerMockito.mockStatic;
import static org.powermock.api.mockito.PowerMockito.spy; import static org.powermock.api.mockito.PowerMockito.spy;
import static org.powermock.api.mockito.PowerMockito.when; import static org.powermock.api.mockito.PowerMockito.when;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils;
@ -88,6 +90,9 @@ public class EmrAddStepsTaskTest {
private EmrAddStepsTask emrAddStepsTask; private EmrAddStepsTask emrAddStepsTask;
private AmazonElasticMapReduce emrClient; private AmazonElasticMapReduce emrClient;
private Step step; private Step step;
private TaskCallBack taskCallBack = (taskInstanceId, appIds) -> {
};
@Before @Before
public void before() throws Exception { public void before() throws Exception {
@ -116,15 +121,14 @@ public class EmrAddStepsTaskTest {
emrAddStepsTask.init(); emrAddStepsTask.init();
} }
@Test @Test(expected = TaskException.class)
public void testCanNotParseJson() throws Exception { public void testCanNotParseJson() throws Exception {
mockStatic(JSONUtils.class); mockStatic(JSONUtils.class);
when(emrAddStepsTask, "createAddJobFlowStepsRequest").thenThrow(new EmrTaskException("can not parse AddJobFlowStepsRequest from json", new Exception("error"))); when(emrAddStepsTask, "createAddJobFlowStepsRequest").thenThrow(new EmrTaskException("can not parse AddJobFlowStepsRequest from json", new Exception("error")));
emrAddStepsTask.handle(); emrAddStepsTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrAddStepsTask.getExitStatusCode());
} }
@Test @Test(expected = TaskException.class)
public void testDefineJsonStepNotOne() throws Exception { public void testDefineJsonStepNotOne() throws Exception {
// mock EmrParameters and EmrAddStepsTask // mock EmrParameters and EmrAddStepsTask
EmrParameters emrParameters = buildErrorEmrTaskParameters(); EmrParameters emrParameters = buildErrorEmrTaskParameters();
@ -134,16 +138,14 @@ public class EmrAddStepsTaskTest {
emrAddStepsTask = spy(new EmrAddStepsTask(taskExecutionContext)); emrAddStepsTask = spy(new EmrAddStepsTask(taskExecutionContext));
doReturn(emrClient).when(emrAddStepsTask, "createEmrClient"); doReturn(emrClient).when(emrAddStepsTask, "createEmrClient");
emrAddStepsTask.init(); emrAddStepsTask.init();
emrAddStepsTask.handle(); emrAddStepsTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrAddStepsTask.getExitStatusCode());
} }
@Test @Test
public void testHandle() throws Exception { public void testHandle() throws Exception {
when(step.getStatus()).thenReturn(pendingState, runningState, completedState); when(step.getStatus()).thenReturn(pendingState, runningState, completedState);
emrAddStepsTask.handle(); emrAddStepsTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_SUCCESS, emrAddStepsTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, emrAddStepsTask.getExitStatusCode());
} }
@ -151,19 +153,18 @@ public class EmrAddStepsTaskTest {
public void testHandleUserRequestTerminate() throws Exception { public void testHandleUserRequestTerminate() throws Exception {
when(step.getStatus()).thenReturn(pendingState, runningState, cancelledState); when(step.getStatus()).thenReturn(pendingState, runningState, cancelledState);
emrAddStepsTask.handle(); emrAddStepsTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_KILL, emrAddStepsTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_KILL, emrAddStepsTask.getExitStatusCode());
} }
@Test @Test(expected = TaskException.class)
public void testHandleError() throws Exception { public void testHandleError() throws Exception {
when(step.getStatus()).thenReturn(pendingState, runningState, failedState); when(step.getStatus()).thenReturn(pendingState, runningState, failedState);
emrAddStepsTask.handle(); emrAddStepsTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrAddStepsTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_FAILURE, emrAddStepsTask.getExitStatusCode());
when(emrClient.addJobFlowSteps(any())).thenThrow(new AmazonElasticMapReduceException("error"), new EmrTaskException()); when(emrClient.addJobFlowSteps(any())).thenThrow(new AmazonElasticMapReduceException("error"), new EmrTaskException());
emrAddStepsTask.handle(); emrAddStepsTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrAddStepsTask.getExitStatusCode());
} }
private EmrParameters buildEmrTaskParameters() { private EmrParameters buildEmrTaskParameters() {

33
dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/test/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTaskTest.java

@ -28,6 +28,8 @@ import static org.powermock.api.mockito.PowerMockito.mockStatic;
import static org.powermock.api.mockito.PowerMockito.spy; import static org.powermock.api.mockito.PowerMockito.spy;
import static org.powermock.api.mockito.PowerMockito.when; import static org.powermock.api.mockito.PowerMockito.when;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils;
@ -117,6 +119,9 @@ public class EmrJobFlowTaskTest {
private EmrJobFlowTask emrJobFlowTask; private EmrJobFlowTask emrJobFlowTask;
private AmazonElasticMapReduce emrClient; private AmazonElasticMapReduce emrClient;
private Cluster cluster; private Cluster cluster;
private TaskCallBack taskCallBack = (taskInstanceId, appIds) -> {
};
@Before @Before
public void before() throws Exception { public void before() throws Exception {
@ -146,7 +151,7 @@ public class EmrJobFlowTaskTest {
when(cluster.getStatus()).thenReturn(startingStatus, softwareConfigStatus, runningStatus, terminatingStatus); when(cluster.getStatus()).thenReturn(startingStatus, softwareConfigStatus, runningStatus, terminatingStatus);
emrJobFlowTask.handle(); emrJobFlowTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_SUCCESS, emrJobFlowTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, emrJobFlowTask.getExitStatusCode());
} }
@ -155,7 +160,7 @@ public class EmrJobFlowTaskTest {
public void testHandleAliveWhenNoSteps() throws Exception { public void testHandleAliveWhenNoSteps() throws Exception {
when(cluster.getStatus()).thenReturn(startingStatus, softwareConfigStatus, runningStatus, waitingStatus); when(cluster.getStatus()).thenReturn(startingStatus, softwareConfigStatus, runningStatus, waitingStatus);
emrJobFlowTask.handle(); emrJobFlowTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_SUCCESS, emrJobFlowTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, emrJobFlowTask.getExitStatusCode());
} }
@ -163,7 +168,7 @@ public class EmrJobFlowTaskTest {
public void testHandleUserRequestTerminate() throws Exception { public void testHandleUserRequestTerminate() throws Exception {
when(cluster.getStatus()).thenReturn(startingStatus, userRequestTerminateStatus); when(cluster.getStatus()).thenReturn(startingStatus, userRequestTerminateStatus);
emrJobFlowTask.handle(); emrJobFlowTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_KILL, emrJobFlowTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_KILL, emrJobFlowTask.getExitStatusCode());
} }
@ -171,36 +176,28 @@ public class EmrJobFlowTaskTest {
public void testHandleTerminatedWithError() throws Exception { public void testHandleTerminatedWithError() throws Exception {
when(cluster.getStatus()).thenReturn(startingStatus, softwareConfigStatus, runningStatus, terminatedWithErrorsStatus); when(cluster.getStatus()).thenReturn(startingStatus, softwareConfigStatus, runningStatus, terminatedWithErrorsStatus);
emrJobFlowTask.handle(); emrJobFlowTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrJobFlowTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_FAILURE, emrJobFlowTask.getExitStatusCode());
} }
@Test @Test(expected = TaskException.class)
public void testCanNotParseJson() throws Exception { public void testCanNotParseJson() throws Exception {
mockStatic(JSONUtils.class); mockStatic(JSONUtils.class);
when(emrJobFlowTask, "createRunJobFlowRequest").thenThrow(new EmrTaskException("can not parse RunJobFlowRequest from json", new Exception("error"))); when(emrJobFlowTask, "createRunJobFlowRequest").thenThrow(new EmrTaskException("can not parse RunJobFlowRequest from json", new Exception("error")));
emrJobFlowTask.handle(); emrJobFlowTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrJobFlowTask.getExitStatusCode());
} }
@Test @Test(expected = TaskException.class)
public void testClusterStatusNull() throws Exception { public void testClusterStatusNull() throws Exception {
when(emrClient.describeCluster(any())).thenReturn(null); when(emrClient.describeCluster(any())).thenReturn(null);
emrJobFlowTask.handle(taskCallBack);
emrJobFlowTask.handle();
Assert.assertEquals(EXIT_CODE_FAILURE, emrJobFlowTask.getExitStatusCode());
} }
@Test @Test(expected = TaskException.class)
public void testRunJobFlowError() throws Exception { public void testRunJobFlowError() throws Exception {
when(emrClient.runJobFlow(any())).thenThrow(new AmazonElasticMapReduceException("error"), new EmrTaskException()); when(emrClient.runJobFlow(any())).thenThrow(new AmazonElasticMapReduceException("error"), new EmrTaskException());
emrJobFlowTask.handle(); emrJobFlowTask.handle(taskCallBack);
Assert.assertEquals(EXIT_CODE_FAILURE, emrJobFlowTask.getExitStatusCode());
emrJobFlowTask.handle();
Assert.assertEquals(EXIT_CODE_FAILURE, emrJobFlowTask.getExitStatusCode());
} }
private String buildEmrTaskParameters() { private String buildEmrTaskParameters() {

8
dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTask.java

@ -18,6 +18,7 @@
package org.apache.dolphinscheduler.plugin.task.flink; package org.apache.dolphinscheduler.plugin.task.flink;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
@ -97,7 +98,7 @@ public class FlinkStreamTask extends FlinkTask implements StreamTask {
} }
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
Set<String> appIds = getApplicationIds(); Set<String> appIds = getApplicationIds();
if (CollectionUtils.isEmpty(appIds)) { if (CollectionUtils.isEmpty(appIds)) {
logger.error("can not get appId, taskInstanceId:{}", taskExecutionContext.getTaskInstanceId()); logger.error("can not get appId, taskInstanceId:{}", taskExecutionContext.getTaskInstanceId());
@ -110,8 +111,11 @@ public class FlinkStreamTask extends FlinkTask implements StreamTask {
ProcessBuilder processBuilder = new ProcessBuilder(); ProcessBuilder processBuilder = new ProcessBuilder();
processBuilder.command(args); processBuilder.command(args);
try {
processBuilder.start(); processBuilder.start();
super.cancelApplication(status); } catch (IOException e) {
throw new TaskException("cancel application error", e);
}
} }
@Override @Override

8
dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.plugin.task.flink;
import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask; import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
@ -32,6 +33,7 @@ import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -112,7 +114,7 @@ public class FlinkTask extends AbstractYarnTask {
} }
@Override @Override
public Set<String> getApplicationIds() throws IOException { public Set<String> getApplicationIds() throws TaskException {
Set<String> appIds = new HashSet<>(); Set<String> appIds = new HashSet<>();
File file = new File(taskRequest.getLogPath()); File file = new File(taskRequest.getLogPath());
@ -131,6 +133,10 @@ public class FlinkTask extends AbstractYarnTask {
appIds.add(appId); appIds.add(appId);
} }
} }
} catch (FileNotFoundException e) {
throw new TaskException("get application id error, file not found, path:" + taskRequest.getLogPath());
} catch (IOException e) {
throw new TaskException("get application id error, path:" + taskRequest.getLogPath(), e);
} }
return appIds; return appIds;
} }

32
dolphinscheduler-task-plugin/dolphinscheduler-task-hivecli/src/main/java/org/apache/dolphinscheduler/plugin/task/hivecli/HiveCliTask.java

@ -19,8 +19,10 @@ package org.apache.dolphinscheduler.plugin.task.hivecli;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.Property;
@ -34,10 +36,12 @@ import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
public class HiveCliTask extends AbstractTaskExecutor { public class HiveCliTask extends AbstractRemoteTask {
private HiveCliParameters hiveCliParameters; private HiveCliParameters hiveCliParameters;
@ -54,6 +58,11 @@ public class HiveCliTask extends AbstractTaskExecutor {
logger); logger);
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override @Override
public void init() { public void init() {
logger.info("hiveCli task params {}", taskExecutionContext.getTaskParams()); logger.info("hiveCli task params {}", taskExecutionContext.getTaskParams());
@ -65,8 +74,9 @@ public class HiveCliTask extends AbstractTaskExecutor {
} }
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
final TaskResponse taskResponse = shellCommandExecutor.run(buildCommand()); final TaskResponse taskResponse = shellCommandExecutor.run(buildCommand());
setExitStatusCode(taskResponse.getExitStatusCode()); setExitStatusCode(taskResponse.getExitStatusCode());
@ -85,6 +95,16 @@ public class HiveCliTask extends AbstractTaskExecutor {
} }
} }
@Override
public void submitApplication() throws TaskException {
}
@Override
public void trackApplicationStatus() throws TaskException {
}
protected String buildCommand() { protected String buildCommand() {
final List<String> args = new ArrayList<>(); final List<String> args = new ArrayList<>();
@ -126,8 +146,12 @@ public class HiveCliTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancelApplication() throws TaskException {
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
} }

14
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java

@ -19,14 +19,14 @@ package org.apache.dolphinscheduler.plugin.task.http;
import static org.apache.dolphinscheduler.plugin.task.http.HttpTaskConstants.APPLICATION_JSON; import static org.apache.dolphinscheduler.plugin.task.http.HttpTaskConstants.APPLICATION_JSON;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils; import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils; import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils;
import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils;
import org.apache.dolphinscheduler.spi.utils.DateUtils; import org.apache.dolphinscheduler.spi.utils.DateUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils;
@ -48,13 +48,12 @@ import org.apache.http.util.EntityUtils;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
public class HttpTask extends AbstractTaskExecutor { public class HttpTask extends AbstractTask {
/** /**
* output * output
@ -90,7 +89,7 @@ public class HttpTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
long startTime = System.currentTimeMillis(); long startTime = System.currentTimeMillis();
String formatTimeStamp = DateUtils.formatTimeStamp(startTime); String formatTimeStamp = DateUtils.formatTimeStamp(startTime);
String statusCode = null; String statusCode = null;
@ -114,6 +113,11 @@ public class HttpTask extends AbstractTaskExecutor {
} }
@Override
public void cancel() throws TaskException {
}
/** /**
* send request * send request
* *

28
dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/test/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskTest.java

@ -76,11 +76,11 @@ public class HttpTaskTest {
HttpTask headHttpTask = generateHttpTask(HttpMethod.HEAD, HttpStatus.SC_OK); HttpTask headHttpTask = generateHttpTask(HttpMethod.HEAD, HttpStatus.SC_OK);
HttpTask putHttpTask = generateHttpTask(HttpMethod.PUT, HttpStatus.SC_OK); HttpTask putHttpTask = generateHttpTask(HttpMethod.PUT, HttpStatus.SC_OK);
HttpTask deleteHttpTask = generateHttpTask(HttpMethod.DELETE, HttpStatus.SC_OK); HttpTask deleteHttpTask = generateHttpTask(HttpMethod.DELETE, HttpStatus.SC_OK);
getHttpTask.handle(); getHttpTask.handle(null);
postHttpTask.handle(); postHttpTask.handle(null);
headHttpTask.handle(); headHttpTask.handle(null);
putHttpTask.handle(); putHttpTask.handle(null);
deleteHttpTask.handle(); deleteHttpTask.handle(null);
Assert.assertEquals(EXIT_CODE_SUCCESS, getHttpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, getHttpTask.getExitStatusCode());
Assert.assertEquals(EXIT_CODE_SUCCESS, postHttpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, postHttpTask.getExitStatusCode());
Assert.assertEquals(EXIT_CODE_SUCCESS, headHttpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, headHttpTask.getExitStatusCode());
@ -91,7 +91,7 @@ public class HttpTaskTest {
@Test @Test
public void testHandleCheckCodeDefaultError() throws Exception { public void testHandleCheckCodeDefaultError() throws Exception {
HttpTask getHttpTask = generateHttpTask(HttpMethod.GET, HttpStatus.SC_BAD_REQUEST); HttpTask getHttpTask = generateHttpTask(HttpMethod.GET, HttpStatus.SC_BAD_REQUEST);
getHttpTask.handle(); getHttpTask.handle(null);
Assert.assertEquals(EXIT_CODE_FAILURE, getHttpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_FAILURE, getHttpTask.getExitStatusCode());
} }
@ -102,8 +102,8 @@ public class HttpTaskTest {
condition, HttpStatus.SC_CREATED, ""); condition, HttpStatus.SC_CREATED, "");
HttpTask httpErrorTask = generateHttpTask(HttpMethod.GET, HttpCheckCondition.STATUS_CODE_CUSTOM, HttpTask httpErrorTask = generateHttpTask(HttpMethod.GET, HttpCheckCondition.STATUS_CODE_CUSTOM,
condition, HttpStatus.SC_OK, ""); condition, HttpStatus.SC_OK, "");
httpTask.handle(); httpTask.handle(null);
httpErrorTask.handle(); httpErrorTask.handle(null);
Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode());
Assert.assertEquals(EXIT_CODE_FAILURE, httpErrorTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_FAILURE, httpErrorTask.getExitStatusCode());
} }
@ -114,8 +114,8 @@ public class HttpTaskTest {
"success", HttpStatus.SC_OK, "{\"status\": \"success\"}"); "success", HttpStatus.SC_OK, "{\"status\": \"success\"}");
HttpTask httpErrorTask = generateHttpTask(HttpMethod.GET, HttpCheckCondition.BODY_CONTAINS, HttpTask httpErrorTask = generateHttpTask(HttpMethod.GET, HttpCheckCondition.BODY_CONTAINS,
"success", HttpStatus.SC_OK, "{\"status\": \"failed\"}"); "success", HttpStatus.SC_OK, "{\"status\": \"failed\"}");
httpTask.handle(); httpTask.handle(null);
httpErrorTask.handle(); httpErrorTask.handle(null);
Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode());
Assert.assertEquals(EXIT_CODE_FAILURE, httpErrorTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_FAILURE, httpErrorTask.getExitStatusCode());
} }
@ -126,8 +126,8 @@ public class HttpTaskTest {
"failed", HttpStatus.SC_OK, "{\"status\": \"success\"}"); "failed", HttpStatus.SC_OK, "{\"status\": \"success\"}");
HttpTask httpErrorTask = generateHttpTask(HttpMethod.GET, HttpCheckCondition.BODY_NOT_CONTAINS, HttpTask httpErrorTask = generateHttpTask(HttpMethod.GET, HttpCheckCondition.BODY_NOT_CONTAINS,
"failed", HttpStatus.SC_OK, "{\"status\": \"failed\"}"); "failed", HttpStatus.SC_OK, "{\"status\": \"failed\"}");
httpTask.handle(); httpTask.handle(null);
httpErrorTask.handle(); httpErrorTask.handle(null);
Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode());
Assert.assertEquals(EXIT_CODE_FAILURE, httpErrorTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_FAILURE, httpErrorTask.getExitStatusCode());
} }
@ -148,7 +148,7 @@ public class HttpTaskTest {
HttpTask httpTask = generateHttpTask(MOCK_DISPATCH_PATH_REQ_BODY_TO_RES_BODY, HttpMethod.POST, HttpTask httpTask = generateHttpTask(MOCK_DISPATCH_PATH_REQ_BODY_TO_RES_BODY, HttpMethod.POST,
httpParams, prepareParamsMap, HttpCheckCondition.BODY_CONTAINS, "20220812", httpParams, prepareParamsMap, HttpCheckCondition.BODY_CONTAINS, "20220812",
HttpStatus.SC_OK, ""); HttpStatus.SC_OK, "");
httpTask.handle(); httpTask.handle(null);
Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode());
} }
@ -168,7 +168,7 @@ public class HttpTaskTest {
HttpTask httpTask = generateHttpTask(MOCK_DISPATCH_PATH_REQ_PARAMS_TO_RES_BODY, HttpMethod.POST, HttpTask httpTask = generateHttpTask(MOCK_DISPATCH_PATH_REQ_PARAMS_TO_RES_BODY, HttpMethod.POST,
httpParams, prepareParamsMap, HttpCheckCondition.BODY_CONTAINS, "20220812", httpParams, prepareParamsMap, HttpCheckCondition.BODY_CONTAINS, "20220812",
HttpStatus.SC_OK, ""); HttpStatus.SC_OK, "");
httpTask.handle(); httpTask.handle(null);
Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode()); Assert.assertEquals(EXIT_CODE_SUCCESS, httpTask.getExitStatusCode());
} }

33
dolphinscheduler-task-plugin/dolphinscheduler-task-jupyter/src/main/java/org/apache/dolphinscheduler/plugin/task/jupyter/JupyterTask.java

@ -18,8 +18,11 @@
package org.apache.dolphinscheduler.plugin.task.jupyter; package org.apache.dolphinscheduler.plugin.task.jupyter;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -35,10 +38,12 @@ import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
public class JupyterTask extends AbstractTaskExecutor { public class JupyterTask extends AbstractRemoteTask {
/** /**
* jupyter parameters * jupyter parameters
@ -60,6 +65,11 @@ public class JupyterTask extends AbstractTaskExecutor {
logger); logger);
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override @Override
public void init() { public void init() {
logger.info("jupyter task params {}", taskExecutionContext.getTaskParams()); logger.info("jupyter task params {}", taskExecutionContext.getTaskParams());
@ -76,8 +86,9 @@ public class JupyterTask extends AbstractTaskExecutor {
} }
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// SHELL task exit code // SHELL task exit code
TaskResponse response = shellCommandExecutor.run(buildCommand()); TaskResponse response = shellCommandExecutor.run(buildCommand());
@ -96,6 +107,16 @@ public class JupyterTask extends AbstractTaskExecutor {
} }
} }
@Override
public void submitApplication() throws TaskException {
}
@Override
public void trackApplicationStatus() throws TaskException {
}
/** /**
* create command * create command
* *
@ -223,9 +244,13 @@ public class JupyterTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancelApplication() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
@Override @Override

7
dolphinscheduler-task-plugin/dolphinscheduler-task-k8s/src/main/java/org/apache/dolphinscheduler/plugin/task/k8s/K8sTask.java

@ -30,7 +30,9 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.K8sTaskParameters;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils; import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.util.Collections;
import java.util.Map; import java.util.Map;
import java.util.Set;
public class K8sTask extends AbstractK8sTask { public class K8sTask extends AbstractK8sTask {
@ -56,6 +58,11 @@ public class K8sTask extends AbstractK8sTask {
} }
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override @Override
public AbstractParameters getParameters() { public AbstractParameters getParameters() {
return k8sTaskParameters; return k8sTaskParameters;

14
dolphinscheduler-task-plugin/dolphinscheduler-task-mlflow/src/main/java/org/apache/dolphinscheduler/plugin/task/mlflow/MlflowTask.java

@ -18,8 +18,9 @@
package org.apache.dolphinscheduler.plugin.task.mlflow; package org.apache.dolphinscheduler.plugin.task.mlflow;
import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -40,7 +41,7 @@ import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_COD
/** /**
* shell task * shell task
*/ */
public class MlflowTask extends AbstractTaskExecutor { public class MlflowTask extends AbstractTask {
/** /**
* shell parameters * shell parameters
@ -81,7 +82,7 @@ public class MlflowTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// construct process // construct process
String command = buildCommand(); String command = buildCommand();
@ -93,7 +94,6 @@ public class MlflowTask extends AbstractTaskExecutor {
exitCode = commandExecuteResult.getExitStatusCode(); exitCode = commandExecuteResult.getExitStatusCode();
} }
setExitStatusCode(exitCode); setExitStatusCode(exitCode);
setAppIds(String.join(TaskConstants.COMMA, getApplicationIds()));
setProcessId(commandExecuteResult.getProcessId()); setProcessId(commandExecuteResult.getProcessId());
mlflowParameters.dealOutParam(shellCommandExecutor.getVarPool()); mlflowParameters.dealOutParam(shellCommandExecutor.getVarPool());
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -109,9 +109,13 @@ public class MlflowTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancel() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
public String buildCommand() { public String buildCommand() {

38
dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java

@ -18,7 +18,10 @@
package org.apache.dolphinscheduler.plugin.task.pigeon; package org.apache.dolphinscheduler.plugin.task.pigeon;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -27,6 +30,7 @@ import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
import org.apache.http.StatusLine; import org.apache.http.StatusLine;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
@ -36,18 +40,21 @@ import org.apache.http.util.EntityUtils;
import org.java_websocket.client.WebSocketClient; import org.java_websocket.client.WebSocketClient;
import org.java_websocket.handshake.ServerHandshake; import org.java_websocket.handshake.ServerHandshake;
import java.io.IOException;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URI; import java.net.URI;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
* TIS DataX Task * TIS DataX Task
**/ **/
public class PigeonTask extends AbstractTaskExecutor { public class PigeonTask extends AbstractRemoteTask {
public static final String KEY_POOL_VAR_PIGEON_HOST = "p_host"; public static final String KEY_POOL_VAR_PIGEON_HOST = "p_host";
private final TaskExecutionContext taskExecutionContext; private final TaskExecutionContext taskExecutionContext;
@ -62,6 +69,11 @@ public class PigeonTask extends AbstractTaskExecutor {
this.config = PigeonConfig.getInstance(); this.config = PigeonConfig.getInstance();
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override @Override
public void init() { public void init() {
super.init(); super.init();
@ -72,8 +84,9 @@ public class PigeonTask extends AbstractTaskExecutor {
} }
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
// Trigger PIGEON DataX pipeline // Trigger PIGEON DataX pipeline
logger.info("start execute PIGEON task"); logger.info("start execute PIGEON task");
long startTime = System.currentTimeMillis(); long startTime = System.currentTimeMillis();
@ -153,13 +166,22 @@ public class PigeonTask extends AbstractTaskExecutor {
} }
} }
@Override
public void submitApplication() throws TaskException {
}
@Override
public void trackApplicationStatus() throws TaskException {
}
private void addFormUrlencoded(HttpPost post) { private void addFormUrlencoded(HttpPost post) {
post.addHeader("content-type", "application/x-www-form-urlencoded"); post.addHeader("content-type", "application/x-www-form-urlencoded");
} }
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
super.cancelApplication(status);
logger.info("start to cancelApplication"); logger.info("start to cancelApplication");
Objects.requireNonNull(triggerResult, "triggerResult can not be null"); Objects.requireNonNull(triggerResult, "triggerResult can not be null");
logger.info("start to cancelApplication taskId:{}", triggerResult.getTaskId()); logger.info("start to cancelApplication taskId:{}", triggerResult.getTaskId());
@ -181,8 +203,12 @@ public class PigeonTask extends AbstractTaskExecutor {
if (CollectionUtils.isNotEmpty(errormsg)) { if (CollectionUtils.isNotEmpty(errormsg)) {
errs.append(",errs:").append(errormsg.stream().collect(Collectors.joining(","))); errs.append(",errs:").append(errormsg.stream().collect(Collectors.joining(",")));
} }
throw new Exception("cancel PIGEON job faild taskId:" + triggerResult.getTaskId() + errs.toString()); throw new TaskException("cancel PIGEON job faild taskId:" + triggerResult.getTaskId() + errs);
} }
} catch (ClientProtocolException e) {
throw new TaskException("client protocol error", e);
} catch (Exception e) {
throw new TaskException("pigeon execute error", e);
} }
} }

2
dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/test/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskTest.java

@ -114,7 +114,7 @@ public class PigeonTaskTest {
file("src/test/resources/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskTest.json")); file("src/test/resources/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskTest.json"));
running(server, () -> { running(server, () -> {
pigeonTask.handle(); pigeonTask.handle(null);
Assert.assertEquals("PIGEON execute be success", TaskExecutionStatus.SUCCESS, pigeonTask.getExitStatus()); Assert.assertEquals("PIGEON execute be success", TaskExecutionStatus.SUCCESS, pigeonTask.getExitStatus());
}); });

12
dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java

@ -19,7 +19,8 @@ package org.apache.dolphinscheduler.plugin.task.procedure;
import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; import org.apache.dolphinscheduler.plugin.task.api.enums.DataType;
@ -47,7 +48,7 @@ import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_COD
/** /**
* procedure task * procedure task
*/ */
public class ProcedureTask extends AbstractTaskExecutor { public class ProcedureTask extends AbstractTask {
/** /**
* procedure parameters * procedure parameters
@ -84,7 +85,7 @@ public class ProcedureTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
logger.info("procedure type : {}, datasource : {}, method : {} , localParams : {}", logger.info("procedure type : {}, datasource : {}, method : {} , localParams : {}",
procedureParameters.getType(), procedureParameters.getType(),
procedureParameters.getDatasource(), procedureParameters.getDatasource(),
@ -129,6 +130,11 @@ public class ProcedureTask extends AbstractTaskExecutor {
} }
} }
@Override
public void cancel() throws TaskException {
}
private String formatSql(Map<Integer, Property> sqlParamsMap, Map<String, Property> paramsMap) { private String formatSql(Map<Integer, Property> sqlParamsMap, Map<String, Property> paramsMap) {
// combining local and global parameters // combining local and global parameters
setSqlParamsMap(procedureParameters.getMethod(), rgex, sqlParamsMap, paramsMap, taskExecutionContext.getTaskInstanceId()); setSqlParamsMap(procedureParameters.getMethod(), rgex, sqlParamsMap, paramsMap, taskExecutionContext.getTaskInstanceId());

16
dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java

@ -17,8 +17,9 @@
package org.apache.dolphinscheduler.plugin.task.python; package org.apache.dolphinscheduler.plugin.task.python;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -27,7 +28,6 @@ import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils; import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils; import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils;
import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -37,7 +37,6 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
@ -45,7 +44,7 @@ import com.google.common.base.Preconditions;
/** /**
* python task * python task
*/ */
public class PythonTask extends AbstractTaskExecutor { public class PythonTask extends AbstractTask {
/** /**
* python parameters * python parameters
@ -100,7 +99,7 @@ public class PythonTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// generate the content of this python script // generate the content of this python script
String pythonScriptContent = buildPythonScriptContent(); String pythonScriptContent = buildPythonScriptContent();
@ -113,7 +112,6 @@ public class PythonTask extends AbstractTaskExecutor {
TaskResponse taskResponse = shellCommandExecutor.run(command); TaskResponse taskResponse = shellCommandExecutor.run(command);
setExitStatusCode(taskResponse.getExitStatusCode()); setExitStatusCode(taskResponse.getExitStatusCode());
setAppIds(String.join(TaskConstants.COMMA, getApplicationIds()));
setProcessId(taskResponse.getProcessId()); setProcessId(taskResponse.getProcessId());
setVarPool(shellCommandExecutor.getVarPool()); setVarPool(shellCommandExecutor.getVarPool());
} catch (Exception e) { } catch (Exception e) {
@ -124,9 +122,13 @@ public class PythonTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancel() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
@Override @Override

13
dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/PytorchTask.java

@ -17,8 +17,9 @@
package org.apache.dolphinscheduler.plugin.task.pytorch; package org.apache.dolphinscheduler.plugin.task.pytorch;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -33,7 +34,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
public class PytorchTask extends AbstractTaskExecutor { public class PytorchTask extends AbstractTask {
private final ShellCommandExecutor shellCommandExecutor; private final ShellCommandExecutor shellCommandExecutor;
protected PytorchParameters pytorchParameters; protected PytorchParameters pytorchParameters;
@ -65,12 +66,11 @@ public class PytorchTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
String command = buildPythonExecuteCommand(); String command = buildPythonExecuteCommand();
TaskResponse taskResponse = shellCommandExecutor.run(command); TaskResponse taskResponse = shellCommandExecutor.run(command);
setExitStatusCode(taskResponse.getExitStatusCode()); setExitStatusCode(taskResponse.getExitStatusCode());
setAppIds(taskResponse.getAppIds());
setProcessId(taskResponse.getProcessId()); setProcessId(taskResponse.getProcessId());
setVarPool(shellCommandExecutor.getVarPool()); setVarPool(shellCommandExecutor.getVarPool());
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -84,6 +84,11 @@ public class PytorchTask extends AbstractTaskExecutor {
} }
} }
@Override
public void cancel() throws TaskException {
}
public String buildPythonExecuteCommand() throws Exception { public String buildPythonExecuteCommand() throws Exception {
List<String> args = new ArrayList<>(); List<String> args = new ArrayList<>();

81
dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/PipelineUtils.java

@ -23,6 +23,10 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -39,75 +43,57 @@ import com.amazonaws.services.sagemaker.model.StopPipelineExecutionResult;
public class PipelineUtils { public class PipelineUtils {
protected final Logger logger =
LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass()));
private static final String EXECUTING = "Executing";
private static final String SUCCEEDED = "Succeeded";
protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); public PipelineId startPipelineExecution(AmazonSageMaker client, StartPipelineExecutionRequest request) {
private final AmazonSageMaker client;
private String pipelineExecutionArn;
private String clientRequestToken;
private String pipelineStatus;
public PipelineUtils(AmazonSageMaker client) {
this.client = client;
}
public int startPipelineExecution(StartPipelineExecutionRequest request) {
int exitStatusCode = TaskConstants.EXIT_CODE_FAILURE;
try {
StartPipelineExecutionResult result = client.startPipelineExecution(request); StartPipelineExecutionResult result = client.startPipelineExecution(request);
pipelineExecutionArn = result.getPipelineExecutionArn(); String pipelineExecutionArn = result.getPipelineExecutionArn();
clientRequestToken = request.getClientRequestToken(); String clientRequestToken = request.getClientRequestToken();
exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS; logger.info("Start success, pipeline: {}, token: {}", pipelineExecutionArn, clientRequestToken);
logger.info("Start pipeline: {} success", pipelineExecutionArn);
} catch (Exception e) {
logger.error("Start pipeline error: {}", e.getMessage());
}
return exitStatusCode; return new PipelineId(pipelineExecutionArn, clientRequestToken);
} }
public void stopPipelineExecution() { public void stopPipelineExecution(AmazonSageMaker client, PipelineId pipelineId) {
StopPipelineExecutionRequest request = new StopPipelineExecutionRequest(); StopPipelineExecutionRequest request = new StopPipelineExecutionRequest();
request.setPipelineExecutionArn(pipelineExecutionArn); request.setPipelineExecutionArn(pipelineId.getPipelineExecutionArn());
request.setClientRequestToken(clientRequestToken); request.setClientRequestToken(pipelineId.getClientRequestToken());
try {
StopPipelineExecutionResult result = client.stopPipelineExecution(request); StopPipelineExecutionResult result = client.stopPipelineExecution(request);
logger.info("Stop pipeline: {} success", result.getPipelineExecutionArn()); logger.info("Stop pipeline: {} success", result.getPipelineExecutionArn());
} catch (Exception e) {
logger.error("Stop pipeline error: {}", e.getMessage());
}
} }
public int checkPipelineExecutionStatus() { public int checkPipelineExecutionStatus(AmazonSageMaker client, PipelineId pipelineId) {
describePipelineExecution(); String pipelineStatus = describePipelineExecution(client, pipelineId);
while (pipelineStatus.equals("Executing")) { while (EXECUTING.equals(pipelineStatus)) {
logger.info("check Pipeline Steps running"); logger.info("check Pipeline Steps running");
listPipelineExecutionSteps(); listPipelineExecutionSteps(client, pipelineId);
ThreadUtils.sleep(SagemakerConstants.CHECK_PIPELINE_EXECUTION_STATUS_INTERVAL); ThreadUtils.sleep(SagemakerConstants.CHECK_PIPELINE_EXECUTION_STATUS_INTERVAL);
describePipelineExecution(); describePipelineExecution(client, pipelineId);
} }
int exitStatusCode = TaskConstants.EXIT_CODE_FAILURE; int exitStatusCode = TaskConstants.EXIT_CODE_FAILURE;
if (pipelineStatus.equals("Succeeded")) { if (SUCCEEDED.equals(pipelineStatus)) {
exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS; exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS;
} }
logger.info("exit : {}", exitStatusCode); logger.info("PipelineExecutionStatus : {}, exitStatusCode: {}", pipelineStatus, exitStatusCode);
logger.info("PipelineExecutionStatus : {}", pipelineStatus);
return exitStatusCode; return exitStatusCode;
} }
private void describePipelineExecution() { private String describePipelineExecution(AmazonSageMaker client, PipelineId pipelineId) {
DescribePipelineExecutionRequest request = new DescribePipelineExecutionRequest(); DescribePipelineExecutionRequest request = new DescribePipelineExecutionRequest();
request.setPipelineExecutionArn(pipelineExecutionArn); request.setPipelineExecutionArn(pipelineId.getPipelineExecutionArn());
DescribePipelineExecutionResult result = client.describePipelineExecution(request); DescribePipelineExecutionResult result = client.describePipelineExecution(request);
pipelineStatus = result.getPipelineExecutionStatus(); logger.info("PipelineExecutionStatus: {}", result.getPipelineExecutionStatus());
logger.info("PipelineExecutionStatus: {}", pipelineStatus); return result.getPipelineExecutionStatus();
} }
private void listPipelineExecutionSteps() { private void listPipelineExecutionSteps(AmazonSageMaker client, PipelineId pipelineId) {
ListPipelineExecutionStepsRequest request = new ListPipelineExecutionStepsRequest(); ListPipelineExecutionStepsRequest request = new ListPipelineExecutionStepsRequest();
request.setPipelineExecutionArn(pipelineExecutionArn); request.setPipelineExecutionArn(pipelineId.getPipelineExecutionArn());
request.setMaxResults(SagemakerConstants.PIPELINE_MAX_RESULTS); request.setMaxResults(SagemakerConstants.PIPELINE_MAX_RESULTS);
ListPipelineExecutionStepsResult result = client.listPipelineExecutionSteps(request); ListPipelineExecutionStepsResult result = client.listPipelineExecutionSteps(request);
List<PipelineExecutionStep> steps = result.getPipelineExecutionSteps(); List<PipelineExecutionStep> steps = result.getPipelineExecutionSteps();
@ -119,7 +105,12 @@ public class PipelineUtils {
} }
} }
public String getPipelineExecutionArn() { @Data
return pipelineExecutionArn; @AllArgsConstructor
@NoArgsConstructor
public static class PipelineId {
private String pipelineExecutionArn;
private String clientRequestToken;
} }
} }

96
dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTask.java

@ -22,7 +22,7 @@ import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKN
import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL;
import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -31,8 +31,11 @@ import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils; import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.PropertyUtils; import org.apache.dolphinscheduler.spi.utils.PropertyUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.util.Collections;
import java.util.Map; import java.util.Map;
import java.util.Set;
import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider;
@ -46,34 +49,43 @@ import com.fasterxml.jackson.databind.PropertyNamingStrategy;
/** /**
* SagemakerTask task, Used to start Sagemaker pipeline * SagemakerTask task, Used to start Sagemaker pipeline
*/ */
public class SagemakerTask extends AbstractTaskExecutor { public class SagemakerTask extends AbstractRemoteTask {
private static final ObjectMapper objectMapper = private static final ObjectMapper objectMapper =
new ObjectMapper().configure(FAIL_ON_UNKNOWN_PROPERTIES, false).configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true).configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true) new ObjectMapper().configure(FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(REQUIRE_SETTERS_FOR_GETTERS, true).setPropertyNamingStrategy(new PropertyNamingStrategy.UpperCamelCaseStrategy()); .configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
/** .configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true)
* taskExecutionContext .configure(REQUIRE_SETTERS_FOR_GETTERS, true)
*/ .setPropertyNamingStrategy(new PropertyNamingStrategy.UpperCamelCaseStrategy());
private final TaskExecutionContext taskExecutionContext;
/** /**
* SageMaker parameters * SageMaker parameters
*/ */
private SagemakerParameters parameters; private SagemakerParameters parameters;
private PipelineUtils utils;
private final AmazonSageMaker client;
private final PipelineUtils utils;
private PipelineUtils.PipelineId pipelineId;
public SagemakerTask(TaskExecutionContext taskExecutionContext) { public SagemakerTask(TaskExecutionContext taskExecutionContext) {
super(taskExecutionContext); super(taskExecutionContext);
client = createClient();
utils = new PipelineUtils();
}
this.taskExecutionContext = taskExecutionContext; @Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
} }
@Override @Override
public void init() { public void init() {
logger.info("Sagemaker task params {}", taskExecutionContext.getTaskParams()); logger.info("Sagemaker task params {}", taskRequest.getTaskParams());
parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SagemakerParameters.class); parameters = JSONUtils.parseObject(taskRequest.getTaskParams(), SagemakerParameters.class);
if (parameters == null) {
throw new SagemakerTaskException("Sagemaker task params is empty");
}
if (!parameters.checkParameters()) { if (!parameters.checkParameters()) {
throw new SagemakerTaskException("Sagemaker task params is not valid"); throw new SagemakerTaskException("Sagemaker task params is not valid");
} }
@ -81,41 +93,51 @@ public class SagemakerTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void submitApplication() throws TaskException {
try { try {
int exitStatusCode = handleStartPipeline(); StartPipelineExecutionRequest request = createStartPipelineRequest();
setExitStatusCode(exitStatusCode);
// Start pipeline
pipelineId = utils.startPipelineExecution(client, request);
// set AppId
setAppIds(JSONUtils.toJsonString(pipelineId));
} catch (Exception e) { } catch (Exception e) {
setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE);
throw new TaskException("SageMaker task error", e); throw new TaskException("SageMaker task submit error", e);
} }
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) { public void cancelApplication() {
// stop pipeline initPipelineId();
utils.stopPipelineExecution();
}
public int handleStartPipeline() {
int exitStatusCode;
StartPipelineExecutionRequest request = createStartPipelineRequest();
try { try {
AmazonSageMaker client = createClient(); // stop pipeline
utils = new PipelineUtils(client); utils.stopPipelineExecution(client, pipelineId);
setAppIds(utils.getPipelineExecutionArn());
} catch (Exception e) { } catch (Exception e) {
throw new SagemakerTaskException("can not connect aws ", e); throw new TaskException("cancel application error", e);
}
} }
// Start pipeline @Override
exitStatusCode = utils.startPipelineExecution(request); public void trackApplicationStatus() throws TaskException {
if (exitStatusCode == TaskConstants.EXIT_CODE_SUCCESS) { initPipelineId();
// Keep checking the health status // Keep checking the health status
exitStatusCode = utils.checkPipelineExecutionStatus(); exitStatusCode = utils.checkPipelineExecutionStatus(client, pipelineId);
}
/**
* init sagemaker applicationId if null
*/
private void initPipelineId() {
if (pipelineId == null) {
if (StringUtils.isNotEmpty(getAppIds())) {
pipelineId = JSONUtils.parseObject(getAppIds(), PipelineUtils.PipelineId.class);
}
}
if (pipelineId == null) {
throw new TaskException("sagemaker applicationID is null");
} }
return exitStatusCode;
} }
public StartPipelineExecutionRequest createStartPipelineRequest() throws SagemakerTaskException { public StartPipelineExecutionRequest createStartPipelineRequest() throws SagemakerTaskException {
@ -142,11 +164,11 @@ public class SagemakerTask extends AbstractTaskExecutor {
private String parseRequstJson(String requestJson) { private String parseRequstJson(String requestJson) {
// combining local and global parameters // combining local and global parameters
Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap(); Map<String, Property> paramsMap = taskRequest.getPrepareParamsMap();
return ParameterUtils.convertParameterPlaceholders(requestJson, ParamUtils.convert(paramsMap)); return ParameterUtils.convertParameterPlaceholders(requestJson, ParamUtils.convert(paramsMap));
} }
private AmazonSageMaker createClient() { protected AmazonSageMaker createClient() {
final String awsAccessKeyId = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID); final String awsAccessKeyId = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID);
final String awsSecretAccessKey = PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY); final String awsSecretAccessKey = PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY);
final String awsRegion = PropertyUtils.getString(TaskConstants.AWS_REGION); final String awsRegion = PropertyUtils.getString(TaskConstants.AWS_REGION);

23
dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/test/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTaskTest.java

@ -37,10 +37,8 @@ import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunner;
import com.amazonaws.services.sagemaker.AmazonSageMaker; import com.amazonaws.services.sagemaker.AmazonSageMaker;
@ -54,24 +52,23 @@ import com.amazonaws.services.sagemaker.model.StopPipelineExecutionResult;
@RunWith(PowerMockRunner.class) @RunWith(PowerMockRunner.class)
@PrepareForTest({JSONUtils.class, PropertyUtils.class,}) @PrepareForTest({JSONUtils.class, PropertyUtils.class,})
@PowerMockIgnore({"javax.*"}) @PowerMockIgnore({"javax.*"})
@SuppressStaticInitializationFor("org.apache.dolphinscheduler.spi.utils.PropertyUtils")
public class SagemakerTaskTest { public class SagemakerTaskTest {
private final String pipelineExecutionArn = "test-pipeline-arn"; private final String pipelineExecutionArn = "test-pipeline-arn";
private final String clientRequestToken = "test-pipeline-token";
private SagemakerTask sagemakerTask; private SagemakerTask sagemakerTask;
private AmazonSageMaker client; private AmazonSageMaker client;
private PipelineUtils pipelineUtils; private PipelineUtils pipelineUtils = new PipelineUtils();
@Before @Before
public void before() { public void before() {
PowerMockito.mockStatic(PropertyUtils.class);
String parameters = buildParameters(); String parameters = buildParameters();
TaskExecutionContext taskExecutionContext = Mockito.mock(TaskExecutionContext.class); TaskExecutionContext taskExecutionContext = Mockito.mock(TaskExecutionContext.class);
Mockito.when(taskExecutionContext.getTaskParams()).thenReturn(parameters); Mockito.when(taskExecutionContext.getTaskParams()).thenReturn(parameters);
client = mock(AmazonSageMaker.class);
sagemakerTask = new SagemakerTask(taskExecutionContext); sagemakerTask = new SagemakerTask(taskExecutionContext);
sagemakerTask.init(); sagemakerTask.init();
client = mock(AmazonSageMaker.class);
pipelineUtils = new PipelineUtils(client);
StartPipelineExecutionResult startPipelineExecutionResult = mock(StartPipelineExecutionResult.class); StartPipelineExecutionResult startPipelineExecutionResult = mock(StartPipelineExecutionResult.class);
when(startPipelineExecutionResult.getPipelineExecutionArn()).thenReturn(pipelineExecutionArn); when(startPipelineExecutionResult.getPipelineExecutionArn()).thenReturn(pipelineExecutionArn);
@ -82,7 +79,8 @@ public class SagemakerTaskTest {
DescribePipelineExecutionResult describePipelineExecutionResult = mock(DescribePipelineExecutionResult.class); DescribePipelineExecutionResult describePipelineExecutionResult = mock(DescribePipelineExecutionResult.class);
when(describePipelineExecutionResult.getPipelineExecutionStatus()).thenReturn("Executing", "Succeeded"); when(describePipelineExecutionResult.getPipelineExecutionStatus()).thenReturn("Executing", "Succeeded");
ListPipelineExecutionStepsResult listPipelineExecutionStepsResult = mock(ListPipelineExecutionStepsResult.class); ListPipelineExecutionStepsResult listPipelineExecutionStepsResult =
mock(ListPipelineExecutionStepsResult.class);
PipelineExecutionStep pipelineExecutionStep = mock(PipelineExecutionStep.class); PipelineExecutionStep pipelineExecutionStep = mock(PipelineExecutionStep.class);
List<PipelineExecutionStep> pipelineExecutionSteps = new ArrayList<>(); List<PipelineExecutionStep> pipelineExecutionSteps = new ArrayList<>();
pipelineExecutionSteps.add(pipelineExecutionStep); pipelineExecutionSteps.add(pipelineExecutionStep);
@ -110,10 +108,11 @@ public class SagemakerTaskTest {
@Test @Test
public void testPipelineExecution() throws Exception { public void testPipelineExecution() throws Exception {
pipelineUtils.startPipelineExecution(sagemakerTask.createStartPipelineRequest()); PipelineUtils.PipelineId pipelineId =
Assert.assertEquals(pipelineExecutionArn, pipelineUtils.getPipelineExecutionArn()); pipelineUtils.startPipelineExecution(client, sagemakerTask.createStartPipelineRequest());
Assert.assertEquals(0, pipelineUtils.checkPipelineExecutionStatus()); Assert.assertEquals(pipelineExecutionArn, pipelineId.getPipelineExecutionArn());
pipelineUtils.stopPipelineExecution(); Assert.assertEquals(0, pipelineUtils.checkPipelineExecutionStatus(client, pipelineId));
pipelineUtils.stopPipelineExecution(client, pipelineId);
} }
private String buildParameters() { private String buildParameters() {

33
dolphinscheduler-task-plugin/dolphinscheduler-task-seatunnel/src/main/java/org/apache/dolphinscheduler/plugin/task/seatunnel/SeatunnelTask.java

@ -19,8 +19,11 @@ package org.apache.dolphinscheduler.plugin.task.seatunnel;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.BooleanUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -36,8 +39,10 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.plugin.task.seatunnel.Constants.CONFIG_OPTIONS; import static org.apache.dolphinscheduler.plugin.task.seatunnel.Constants.CONFIG_OPTIONS;
@ -45,7 +50,7 @@ import static org.apache.dolphinscheduler.plugin.task.seatunnel.Constants.CONFIG
/** /**
* seatunnel task * seatunnel task
*/ */
public class SeatunnelTask extends AbstractTaskExecutor { public class SeatunnelTask extends AbstractRemoteTask {
/** /**
* seatunnel parameters * seatunnel parameters
@ -76,6 +81,11 @@ public class SeatunnelTask extends AbstractTaskExecutor {
logger); logger);
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
@Override @Override
public void init() { public void init() {
logger.info("SeaTunnel task params {}", taskExecutionContext.getTaskParams()); logger.info("SeaTunnel task params {}", taskExecutionContext.getTaskParams());
@ -84,8 +94,9 @@ public class SeatunnelTask extends AbstractTaskExecutor {
} }
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// construct process // construct process
String command = buildCommand(); String command = buildCommand();
@ -107,9 +118,23 @@ public class SeatunnelTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void submitApplication() throws TaskException {
}
@Override
public void trackApplicationStatus() throws TaskException {
}
@Override
public void cancelApplication() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
private String buildCommand() throws Exception { private String buildCommand() throws Exception {

15
dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java

@ -18,8 +18,10 @@
package org.apache.dolphinscheduler.plugin.task.shell; package org.apache.dolphinscheduler.plugin.task.shell;
import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -47,7 +49,7 @@ import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.RWXR_XR_
/** /**
* shell task * shell task
*/ */
public class ShellTask extends AbstractTaskExecutor { public class ShellTask extends AbstractTask {
/** /**
* shell parameters * shell parameters
@ -90,13 +92,12 @@ public class ShellTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
// construct process // construct process
String command = buildCommand(); String command = buildCommand();
TaskResponse commandExecuteResult = shellCommandExecutor.run(command); TaskResponse commandExecuteResult = shellCommandExecutor.run(command);
setExitStatusCode(commandExecuteResult.getExitStatusCode()); setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(String.join(TaskConstants.COMMA, getApplicationIds()));
setProcessId(commandExecuteResult.getProcessId()); setProcessId(commandExecuteResult.getProcessId());
shellParameters.dealOutParam(shellCommandExecutor.getVarPool()); shellParameters.dealOutParam(shellCommandExecutor.getVarPool());
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -112,9 +113,13 @@ public class ShellTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean cancelApplication) throws Exception { public void cancel() throws TaskException {
// cancel process // cancel process
try {
shellCommandExecutor.cancelApplication(); shellCommandExecutor.cancelApplication();
} catch (Exception e) {
throw new TaskException("cancel application error", e);
}
} }
/** /**

12
dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java

@ -23,8 +23,9 @@ import org.apache.commons.collections4.CollectionUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.SQLTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.SQLTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -60,7 +61,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class SqlTask extends AbstractTaskExecutor { public class SqlTask extends AbstractTask {
/** /**
* taskExecutionContext * taskExecutionContext
@ -114,7 +115,7 @@ public class SqlTask extends AbstractTaskExecutor {
} }
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
logger.info("Full sql parameters: {}", sqlParameters); logger.info("Full sql parameters: {}", sqlParameters);
logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {},varPool : {} ,query max result limit {}", logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {},varPool : {} ,query max result limit {}",
sqlParameters.getType(), sqlParameters.getType(),
@ -164,6 +165,11 @@ public class SqlTask extends AbstractTaskExecutor {
} }
} }
@Override
public void cancel() throws TaskException {
}
/** /**
* execute function and sql * execute function and sql
* *

33
dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTask.java

@ -19,7 +19,10 @@ package org.apache.dolphinscheduler.plugin.task.zeppelin;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import kong.unirest.Unirest; import kong.unirest.Unirest;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -32,11 +35,13 @@ import org.apache.zeppelin.client.ParagraphResult;
import org.apache.zeppelin.client.Status; import org.apache.zeppelin.client.Status;
import org.apache.zeppelin.client.ZeppelinClient; import org.apache.zeppelin.client.ZeppelinClient;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
public class ZeppelinTask extends AbstractTaskExecutor { public class ZeppelinTask extends AbstractRemoteTask {
/** /**
* taskExecutionContext * taskExecutionContext
@ -74,8 +79,9 @@ public class ZeppelinTask extends AbstractTaskExecutor {
this.zClient = getZeppelinClient(); this.zClient = getZeppelinClient();
} }
// todo split handle to submit and track
@Override @Override
public void handle() throws TaskException { public void handle(TaskCallBack taskCallBack) throws TaskException {
try { try {
final String paragraphId = this.zeppelinParameters.getParagraphId(); final String paragraphId = this.zeppelinParameters.getParagraphId();
final String productionNoteDirectory = this.zeppelinParameters.getProductionNoteDirectory(); final String productionNoteDirectory = this.zeppelinParameters.getProductionNoteDirectory();
@ -142,6 +148,15 @@ public class ZeppelinTask extends AbstractTaskExecutor {
logger.error("zeppelin task submit failed with error", e); logger.error("zeppelin task submit failed with error", e);
throw new TaskException("Execute ZeppelinTask exception"); throw new TaskException("Execute ZeppelinTask exception");
} }
}
@Override
public void submitApplication() throws TaskException {
}
@Override
public void trackApplicationStatus() throws TaskException {
} }
@ -188,9 +203,8 @@ public class ZeppelinTask extends AbstractTaskExecutor {
} }
@Override @Override
public void cancelApplication(boolean status) throws Exception { public void cancelApplication() throws TaskException {
final String restEndpoint = this.zeppelinParameters.getRestEndpoint(); final String restEndpoint = this.zeppelinParameters.getRestEndpoint();
super.cancelApplication(status);
final String noteId = this.zeppelinParameters.getNoteId(); final String noteId = this.zeppelinParameters.getNoteId();
final String paragraphId = this.zeppelinParameters.getParagraphId(); final String paragraphId = this.zeppelinParameters.getParagraphId();
if (paragraphId == null) { if (paragraphId == null) {
@ -207,7 +221,11 @@ public class ZeppelinTask extends AbstractTaskExecutor {
this.taskExecutionContext.getTaskInstanceId(), this.taskExecutionContext.getTaskInstanceId(),
noteId, noteId,
paragraphId); paragraphId);
try {
this.zClient.cancelParagraph(noteId, paragraphId); this.zClient.cancelParagraph(noteId, paragraphId);
} catch (Exception e) {
throw new TaskException("cancel paragraph error", e);
}
logger.info("zeppelin task terminated, taskId: {}, noteId: {}, paragraphId: {}", logger.info("zeppelin task terminated, taskId: {}, noteId: {}, paragraphId: {}",
this.taskExecutionContext.getTaskInstanceId(), this.taskExecutionContext.getTaskInstanceId(),
noteId, noteId,
@ -216,4 +234,9 @@ public class ZeppelinTask extends AbstractTaskExecutor {
} }
@Override
public Set<String> getApplicationIds() throws TaskException {
return Collections.emptySet();
}
} }

17
dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/test/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTaskTest.java

@ -28,6 +28,8 @@ import static org.powermock.api.mockito.PowerMockito.spy;
import static org.powermock.api.mockito.PowerMockito.when; import static org.powermock.api.mockito.PowerMockito.when;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.spi.utils.DateUtils; import org.apache.dolphinscheduler.spi.utils.DateUtils;
@ -73,6 +75,9 @@ public class ZeppelinTaskTest {
private ZeppelinTask zeppelinTask; private ZeppelinTask zeppelinTask;
private ParagraphResult paragraphResult; private ParagraphResult paragraphResult;
private NoteResult noteResult; private NoteResult noteResult;
private TaskCallBack taskCallBack = (taskInstanceId, appIds) -> {
};
@Before @Before
public void before() throws Exception { public void before() throws Exception {
@ -95,7 +100,7 @@ public class ZeppelinTaskTest {
@Test @Test
public void testHandleWithParagraphExecutionSuccess() throws Exception { public void testHandleWithParagraphExecutionSuccess() throws Exception {
when(this.paragraphResult.getStatus()).thenReturn(Status.FINISHED); when(this.paragraphResult.getStatus()).thenReturn(Status.FINISHED);
this.zeppelinTask.handle(); this.zeppelinTask.handle(taskCallBack);
Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID, Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID,
MOCK_PARAGRAPH_ID, MOCK_PARAGRAPH_ID,
(Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class)); (Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class));
@ -107,7 +112,7 @@ public class ZeppelinTaskTest {
@Test @Test
public void testHandleWithParagraphExecutionAborted() throws Exception { public void testHandleWithParagraphExecutionAborted() throws Exception {
when(this.paragraphResult.getStatus()).thenReturn(Status.ABORT); when(this.paragraphResult.getStatus()).thenReturn(Status.ABORT);
this.zeppelinTask.handle(); this.zeppelinTask.handle(taskCallBack);
Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID, Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID,
MOCK_PARAGRAPH_ID, MOCK_PARAGRAPH_ID,
(Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class)); (Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class));
@ -119,7 +124,7 @@ public class ZeppelinTaskTest {
@Test @Test
public void testHandleWithParagraphExecutionError() throws Exception { public void testHandleWithParagraphExecutionError() throws Exception {
when(this.paragraphResult.getStatus()).thenReturn(Status.ERROR); when(this.paragraphResult.getStatus()).thenReturn(Status.ERROR);
this.zeppelinTask.handle(); this.zeppelinTask.handle(taskCallBack);
Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID, Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID,
MOCK_PARAGRAPH_ID, MOCK_PARAGRAPH_ID,
(Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class)); (Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class));
@ -133,7 +138,7 @@ public class ZeppelinTaskTest {
when(this.zClient.executeParagraph(any(), any(), any(Map.class))). when(this.zClient.executeParagraph(any(), any(), any(Map.class))).
thenThrow(new TaskException("Something wrong happens from zeppelin side")); thenThrow(new TaskException("Something wrong happens from zeppelin side"));
// when(this.paragraphResult.getStatus()).thenReturn(Status.ERROR); // when(this.paragraphResult.getStatus()).thenReturn(Status.ERROR);
this.zeppelinTask.handle(); this.zeppelinTask.handle(taskCallBack);
Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID, Mockito.verify(this.zClient).executeParagraph(MOCK_NOTE_ID,
MOCK_PARAGRAPH_ID, MOCK_PARAGRAPH_ID,
(Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class)); (Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class));
@ -159,7 +164,7 @@ public class ZeppelinTaskTest {
when(paragraphResult.getResultInText()).thenReturn("mock-zeppelin-paragraph-execution-result"); when(paragraphResult.getResultInText()).thenReturn("mock-zeppelin-paragraph-execution-result");
this.zeppelinTask.init(); this.zeppelinTask.init();
when(this.paragraphResult.getStatus()).thenReturn(Status.FINISHED); when(this.paragraphResult.getStatus()).thenReturn(Status.FINISHED);
this.zeppelinTask.handle(); this.zeppelinTask.handle(taskCallBack);
Mockito.verify(this.zClient).executeNote(MOCK_NOTE_ID, Mockito.verify(this.zClient).executeNote(MOCK_NOTE_ID,
(Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class)); (Map<String, String>) mapper.readValue(MOCK_PARAMETERS, Map.class));
Mockito.verify(this.noteResult).getParagraphResultList(); Mockito.verify(this.noteResult).getParagraphResultList();
@ -186,7 +191,7 @@ public class ZeppelinTaskTest {
this.zeppelinTask.init(); this.zeppelinTask.init();
when(this.paragraphResult.getStatus()).thenReturn(Status.FINISHED); when(this.paragraphResult.getStatus()).thenReturn(Status.FINISHED);
when(DateUtils.getTimestampString()).thenReturn("123456789"); when(DateUtils.getTimestampString()).thenReturn("123456789");
this.zeppelinTask.handle(); this.zeppelinTask.handle(taskCallBack);
Mockito.verify(this.zClient).cloneNote( Mockito.verify(this.zClient).cloneNote(
MOCK_NOTE_ID, MOCK_NOTE_ID,
String.format("%s%s_%s", MOCK_PRODUCTION_DIRECTORY, MOCK_NOTE_ID, "123456789")); String.format("%s%s_%s", MOCK_PRODUCTION_DIRECTORY, MOCK_NOTE_ID, "123456789"));

112
dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/UpgradeDao.java

@ -78,6 +78,7 @@ import com.google.common.base.Joiner;
import com.google.common.base.Strings; import com.google.common.base.Strings;
public abstract class UpgradeDao { public abstract class UpgradeDao {
public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class);
private static final String T_VERSION_NAME = "t_escheduler_version"; private static final String T_VERSION_NAME = "t_escheduler_version";
private static final String T_NEW_VERSION_NAME = "t_ds_version"; private static final String T_NEW_VERSION_NAME = "t_ds_version";
@ -198,7 +199,8 @@ public abstract class UpgradeDao {
Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>();
try { try {
Map<Integer, String> oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection()); Map<Integer, String> oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection());
Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap =
processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection());
for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) { for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) {
ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue());
@ -225,7 +227,8 @@ public abstract class UpgradeDao {
replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString());
} }
if (replaceProcessDefinitionMap.size() > 0) { if (replaceProcessDefinitionMap.size() > 0) {
processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(),
replaceProcessDefinitionMap);
} }
} catch (Exception e) { } catch (Exception e) {
logger.error("update process definition json workergroup error", e); logger.error("update process definition json workergroup error", e);
@ -238,7 +241,8 @@ public abstract class UpgradeDao {
Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>();
try { try {
Map<String, Integer> resourcesMap = resourceDao.listAllResources(dataSource.getConnection()); Map<String, Integer> resourcesMap = resourceDao.listAllResources(dataSource.getConnection());
Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap =
processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection());
for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) { for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) {
ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue());
@ -249,10 +253,13 @@ public abstract class UpgradeDao {
ObjectNode param = (ObjectNode) task.get("params"); ObjectNode param = (ObjectNode) task.get("params");
if (param != null) { if (param != null) {
List<ResourceInfo> resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class); List<ResourceInfo> resourceList =
ResourceInfo mainJar = JSONUtils.parseObject(param.get("mainJar").toString(), ResourceInfo.class); JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class);
ResourceInfo mainJar =
JSONUtils.parseObject(param.get("mainJar").toString(), ResourceInfo.class);
if (mainJar != null && mainJar.getId() == null) { if (mainJar != null && mainJar.getId() == null) {
String fullName = mainJar.getRes().startsWith("/") ? mainJar.getRes() : String.format("/%s", mainJar.getRes()); String fullName = mainJar.getRes().startsWith("/") ? mainJar.getRes()
: String.format("/%s", mainJar.getRes());
if (resourcesMap.containsKey(fullName)) { if (resourcesMap.containsKey(fullName)) {
mainJar.setId(resourcesMap.get(fullName)); mainJar.setId(resourcesMap.get(fullName));
param.put("mainJar", JSONUtils.parseObject(JSONUtils.toJsonString(mainJar))); param.put("mainJar", JSONUtils.parseObject(JSONUtils.toJsonString(mainJar)));
@ -261,7 +268,8 @@ public abstract class UpgradeDao {
if (CollectionUtils.isNotEmpty(resourceList)) { if (CollectionUtils.isNotEmpty(resourceList)) {
List<ResourceInfo> newResourceList = resourceList.stream().map(resInfo -> { List<ResourceInfo> newResourceList = resourceList.stream().map(resInfo -> {
String fullName = resInfo.getRes().startsWith("/") ? resInfo.getRes() : String.format("/%s", resInfo.getRes()); String fullName = resInfo.getRes().startsWith("/") ? resInfo.getRes()
: String.format("/%s", resInfo.getRes());
if (resInfo.getId() == null && resourcesMap.containsKey(fullName)) { if (resInfo.getId() == null && resourcesMap.containsKey(fullName)) {
resInfo.setId(resourcesMap.get(fullName)); resInfo.setId(resourcesMap.get(fullName));
} }
@ -281,7 +289,8 @@ public abstract class UpgradeDao {
replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString());
} }
if (replaceProcessDefinitionMap.size() > 0) { if (replaceProcessDefinitionMap.size() > 0) {
processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(),
replaceProcessDefinitionMap);
} }
} catch (Exception e) { } catch (Exception e) {
logger.error("update process definition json resource list error", e); logger.error("update process definition json resource list error", e);
@ -291,7 +300,8 @@ public abstract class UpgradeDao {
private void upgradeDolphinSchedulerDML(String schemaDir) { private void upgradeDolphinSchedulerDML(String schemaDir) {
String schemaVersion = schemaDir.split("_")[0]; String schemaVersion = schemaDir.split("_")[0];
Resource sqlFilePath = new ClassPathResource(String.format("sql/upgrade/%s/%s/dolphinscheduler_dml.sql", schemaDir, getDbType().name().toLowerCase())); Resource sqlFilePath = new ClassPathResource(String.format("sql/upgrade/%s/%s/dolphinscheduler_dml.sql",
schemaDir, getDbType().name().toLowerCase()));
logger.info("sqlSQLFilePath: {}", sqlFilePath); logger.info("sqlSQLFilePath: {}", sqlFilePath);
Connection conn = null; Connection conn = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
@ -355,7 +365,8 @@ public abstract class UpgradeDao {
* @param schemaDir schemaDir * @param schemaDir schemaDir
*/ */
private void upgradeDolphinSchedulerDDL(String schemaDir, String scriptFile) { private void upgradeDolphinSchedulerDDL(String schemaDir, String scriptFile) {
Resource sqlFilePath = new ClassPathResource(String.format("sql/upgrade/%s/%s/%s", schemaDir, getDbType().name().toLowerCase(), scriptFile)); Resource sqlFilePath = new ClassPathResource(
String.format("sql/upgrade/%s/%s/%s", schemaDir, getDbType().name().toLowerCase(), scriptFile));
Connection conn = null; Connection conn = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
try { try {
@ -423,21 +434,26 @@ public abstract class UpgradeDao {
projectDao.updateProjectCode(dataSource.getConnection(), projectIdCodeMap); projectDao.updateProjectCode(dataSource.getConnection(), projectIdCodeMap);
// execute process definition code // execute process definition code
List<ProcessDefinition> processDefinitions = processDefinitionDao.queryProcessDefinition(dataSource.getConnection()); List<ProcessDefinition> processDefinitions =
processDefinitionDao.updateProcessDefinitionCode(dataSource.getConnection(), processDefinitions, projectIdCodeMap); processDefinitionDao.queryProcessDefinition(dataSource.getConnection());
processDefinitionDao.updateProcessDefinitionCode(dataSource.getConnection(), processDefinitions,
projectIdCodeMap);
// execute schedule // execute schedule
Map<Integer, Long> allSchedule = scheduleDao.queryAllSchedule(dataSource.getConnection()); Map<Integer, Long> allSchedule = scheduleDao.queryAllSchedule(dataSource.getConnection());
Map<Integer, Long> processIdCodeMap = processDefinitions.stream().collect(Collectors.toMap(ProcessDefinition::getId, ProcessDefinition::getCode)); Map<Integer, Long> processIdCodeMap = processDefinitions.stream()
.collect(Collectors.toMap(ProcessDefinition::getId, ProcessDefinition::getCode));
scheduleDao.updateScheduleCode(dataSource.getConnection(), allSchedule, processIdCodeMap); scheduleDao.updateScheduleCode(dataSource.getConnection(), allSchedule, processIdCodeMap);
// json split // json split
Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap =
processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection());
List<ProcessDefinitionLog> processDefinitionLogs = new ArrayList<>(); List<ProcessDefinitionLog> processDefinitionLogs = new ArrayList<>();
List<ProcessTaskRelationLog> processTaskRelationLogs = new ArrayList<>(); List<ProcessTaskRelationLog> processTaskRelationLogs = new ArrayList<>();
List<TaskDefinitionLog> taskDefinitionLogs = new ArrayList<>(); List<TaskDefinitionLog> taskDefinitionLogs = new ArrayList<>();
Map<Integer, Map<Long, Map<String, Long>>> processTaskMap = new HashMap<>(); Map<Integer, Map<Long, Map<String, Long>>> processTaskMap = new HashMap<>();
splitProcessDefinitionJson(processDefinitions, processDefinitionJsonMap, processDefinitionLogs, processTaskRelationLogs, taskDefinitionLogs, processTaskMap); splitProcessDefinitionJson(processDefinitions, processDefinitionJsonMap, processDefinitionLogs,
processTaskRelationLogs, taskDefinitionLogs, processTaskMap);
convertDependence(taskDefinitionLogs, projectIdCodeMap, processTaskMap); convertDependence(taskDefinitionLogs, projectIdCodeMap, processTaskMap);
// execute json split // execute json split
@ -465,7 +481,8 @@ public abstract class UpgradeDao {
ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue());
ProcessDefinition processDefinition = processDefinitionMap.get(entry.getKey()); ProcessDefinition processDefinition = processDefinitionMap.get(entry.getKey());
if (processDefinition != null) { if (processDefinition != null) {
processDefinition.setTenantId(jsonObject.get("tenantId") == null ? -1 : jsonObject.get("tenantId").asInt()); processDefinition
.setTenantId(jsonObject.get("tenantId") == null ? -1 : jsonObject.get("tenantId").asInt());
processDefinition.setTimeout(jsonObject.get("timeout").asInt()); processDefinition.setTimeout(jsonObject.get("timeout").asInt());
processDefinition.setGlobalParams(jsonObject.get("globalParams").toString()); processDefinition.setGlobalParams(jsonObject.get("globalParams").toString());
} else { } else {
@ -485,8 +502,10 @@ public abstract class UpgradeDao {
if (param != null) { if (param != null) {
JsonNode resourceJsonNode = param.get("resourceList"); JsonNode resourceJsonNode = param.get("resourceList");
if (resourceJsonNode != null && !resourceJsonNode.isEmpty()) { if (resourceJsonNode != null && !resourceJsonNode.isEmpty()) {
List<ResourceInfo> resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class); List<ResourceInfo> resourceList =
List<Integer> resourceIds = resourceList.stream().map(ResourceInfo::getId).collect(Collectors.toList()); JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class);
List<Integer> resourceIds =
resourceList.stream().map(ResourceInfo::getId).collect(Collectors.toList());
taskDefinitionLog.setResourceIds(Joiner.on(Constants.COMMA).join(resourceIds)); taskDefinitionLog.setResourceIds(Joiner.on(Constants.COMMA).join(resourceIds));
} else { } else {
taskDefinitionLog.setResourceIds(""); taskDefinitionLog.setResourceIds("");
@ -494,7 +513,8 @@ public abstract class UpgradeDao {
if (TASK_TYPE_SUB_PROCESS.equals(taskType)) { if (TASK_TYPE_SUB_PROCESS.equals(taskType)) {
JsonNode jsonNodeDefinitionId = param.get("processDefinitionId"); JsonNode jsonNodeDefinitionId = param.get("processDefinitionId");
if (jsonNodeDefinitionId != null) { if (jsonNodeDefinitionId != null) {
param.put("processDefinitionCode", processDefinitionMap.get(jsonNodeDefinitionId.asInt()).getCode()); param.put("processDefinitionCode",
processDefinitionMap.get(jsonNodeDefinitionId.asInt()).getCode());
param.remove("processDefinitionId"); param.remove("processDefinitionId");
} }
} }
@ -502,23 +522,29 @@ public abstract class UpgradeDao {
param.put("dependence", task.get("dependence")); param.put("dependence", task.get("dependence"));
taskDefinitionLog.setTaskParams(JSONUtils.toJsonString(param)); taskDefinitionLog.setTaskParams(JSONUtils.toJsonString(param));
} }
TaskTimeoutParameter timeout = JSONUtils.parseObject(JSONUtils.toJsonString(task.get("timeout")), TaskTimeoutParameter.class); TaskTimeoutParameter timeout =
JSONUtils.parseObject(JSONUtils.toJsonString(task.get("timeout")), TaskTimeoutParameter.class);
if (timeout != null) { if (timeout != null) {
taskDefinitionLog.setTimeout(timeout.getInterval()); taskDefinitionLog.setTimeout(timeout.getInterval());
taskDefinitionLog.setTimeoutFlag(timeout.getEnable() ? TimeoutFlag.OPEN : TimeoutFlag.CLOSE); taskDefinitionLog.setTimeoutFlag(timeout.getEnable() ? TimeoutFlag.OPEN : TimeoutFlag.CLOSE);
taskDefinitionLog.setTimeoutNotifyStrategy(timeout.getStrategy()); taskDefinitionLog.setTimeoutNotifyStrategy(timeout.getStrategy());
} }
String desc = task.get("description") != null ? task.get("description").asText() : String desc = task.get("description") != null ? task.get("description").asText()
task.get("desc") != null ? task.get("desc").asText() : ""; : task.get("desc") != null ? task.get("desc").asText() : "";
taskDefinitionLog.setDescription(desc); taskDefinitionLog.setDescription(desc);
taskDefinitionLog.setFlag(Constants.FLOWNODE_RUN_FLAG_NORMAL.equals(task.get("runFlag").asText()) ? Flag.YES : Flag.NO); taskDefinitionLog.setFlag(
Constants.FLOWNODE_RUN_FLAG_NORMAL.equals(task.get("runFlag").asText()) ? Flag.YES : Flag.NO);
taskDefinitionLog.setTaskType(taskType); taskDefinitionLog.setTaskType(taskType);
taskDefinitionLog.setFailRetryInterval(TASK_TYPE_SUB_PROCESS.equals(taskType) ? 1 : task.get("retryInterval").asInt()); taskDefinitionLog.setFailRetryInterval(
taskDefinitionLog.setFailRetryTimes(TASK_TYPE_SUB_PROCESS.equals(taskType) ? 0 : task.get("maxRetryTimes").asInt()); TASK_TYPE_SUB_PROCESS.equals(taskType) ? 1 : task.get("retryInterval").asInt());
taskDefinitionLog.setTaskPriority(JSONUtils.parseObject(JSONUtils.toJsonString(task.get("taskInstancePriority")), Priority.class)); taskDefinitionLog.setFailRetryTimes(
TASK_TYPE_SUB_PROCESS.equals(taskType) ? 0 : task.get("maxRetryTimes").asInt());
taskDefinitionLog.setTaskPriority(JSONUtils
.parseObject(JSONUtils.toJsonString(task.get("taskInstancePriority")), Priority.class));
String name = task.get("name").asText(); String name = task.get("name").asText();
taskDefinitionLog.setName(name); taskDefinitionLog.setName(name);
taskDefinitionLog.setWorkerGroup(task.get("workerGroup") == null ? "default" : task.get("workerGroup").asText()); taskDefinitionLog
.setWorkerGroup(task.get("workerGroup") == null ? "default" : task.get("workerGroup").asText());
long taskCode = CodeGenerateUtils.getInstance().genCode(); long taskCode = CodeGenerateUtils.getInstance().genCode();
taskDefinitionLog.setCode(taskCode); taskDefinitionLog.setCode(taskCode);
taskDefinitionLog.setVersion(Constants.VERSION_FIRST); taskDefinitionLog.setVersion(Constants.VERSION_FIRST);
@ -550,14 +576,16 @@ public abstract class UpgradeDao {
} }
} }
public void convertConditions(List<TaskDefinitionLog> taskDefinitionLogList, Map<String, Long> taskNameCodeMap) throws Exception { public void convertConditions(List<TaskDefinitionLog> taskDefinitionLogList,
Map<String, Long> taskNameCodeMap) throws Exception {
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogList) { for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogList) {
if (TASK_TYPE_CONDITIONS.equals(taskDefinitionLog.getTaskType())) { if (TASK_TYPE_CONDITIONS.equals(taskDefinitionLog.getTaskType())) {
ObjectMapper objectMapper = new ObjectMapper(); ObjectMapper objectMapper = new ObjectMapper();
ObjectNode taskParams = JSONUtils.parseObject(taskDefinitionLog.getTaskParams()); ObjectNode taskParams = JSONUtils.parseObject(taskDefinitionLog.getTaskParams());
// reset conditionResult // reset conditionResult
ObjectNode conditionResult = (ObjectNode) taskParams.get("conditionResult"); ObjectNode conditionResult = (ObjectNode) taskParams.get("conditionResult");
List<String> successNode = JSONUtils.toList(conditionResult.get("successNode").toString(), String.class); List<String> successNode =
JSONUtils.toList(conditionResult.get("successNode").toString(), String.class);
List<Long> nodeCode = new ArrayList<>(); List<Long> nodeCode = new ArrayList<>();
successNode.forEach(node -> nodeCode.add(taskNameCodeMap.get(node))); successNode.forEach(node -> nodeCode.add(taskNameCodeMap.get(node)));
conditionResult.set("successNode", objectMapper.readTree(objectMapper.writeValueAsString(nodeCode))); conditionResult.set("successNode", objectMapper.readTree(objectMapper.writeValueAsString(nodeCode)));
@ -567,10 +595,12 @@ public abstract class UpgradeDao {
conditionResult.set("failedNode", objectMapper.readTree(objectMapper.writeValueAsString(nodeCode))); conditionResult.set("failedNode", objectMapper.readTree(objectMapper.writeValueAsString(nodeCode)));
// reset dependItemList // reset dependItemList
ObjectNode dependence = (ObjectNode) taskParams.get("dependence"); ObjectNode dependence = (ObjectNode) taskParams.get("dependence");
ArrayNode dependTaskList = JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList"))); ArrayNode dependTaskList =
JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList")));
for (int i = 0; i < dependTaskList.size(); i++) { for (int i = 0; i < dependTaskList.size(); i++) {
ObjectNode dependTask = (ObjectNode) dependTaskList.path(i); ObjectNode dependTask = (ObjectNode) dependTaskList.path(i);
ArrayNode dependItemList = JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList"))); ArrayNode dependItemList =
JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList")));
for (int j = 0; j < dependItemList.size(); j++) { for (int j = 0; j < dependItemList.size(); j++) {
ObjectNode dependItem = (ObjectNode) dependItemList.path(j); ObjectNode dependItem = (ObjectNode) dependItemList.path(j);
JsonNode depTasks = dependItem.get("depTasks"); JsonNode depTasks = dependItem.get("depTasks");
@ -591,7 +621,8 @@ public abstract class UpgradeDao {
if (Strings.isNullOrEmpty(locations)) { if (Strings.isNullOrEmpty(locations)) {
return locations; return locations;
} }
Map<String, ObjectNode> locationsMap = JSONUtils.parseObject(locations, new TypeReference<Map<String, ObjectNode>>() { Map<String, ObjectNode> locationsMap =
JSONUtils.parseObject(locations, new TypeReference<Map<String, ObjectNode>>() {
}); });
if (locationsMap == null) { if (locationsMap == null) {
return locations; return locations;
@ -615,26 +646,33 @@ public abstract class UpgradeDao {
if (TASK_TYPE_DEPENDENT.equals(taskDefinitionLog.getTaskType())) { if (TASK_TYPE_DEPENDENT.equals(taskDefinitionLog.getTaskType())) {
ObjectNode taskParams = JSONUtils.parseObject(taskDefinitionLog.getTaskParams()); ObjectNode taskParams = JSONUtils.parseObject(taskDefinitionLog.getTaskParams());
ObjectNode dependence = (ObjectNode) taskParams.get("dependence"); ObjectNode dependence = (ObjectNode) taskParams.get("dependence");
ArrayNode dependTaskList = JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList"))); ArrayNode dependTaskList =
JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList")));
for (int i = 0; i < dependTaskList.size(); i++) { for (int i = 0; i < dependTaskList.size(); i++) {
ObjectNode dependTask = (ObjectNode) dependTaskList.path(i); ObjectNode dependTask = (ObjectNode) dependTaskList.path(i);
ArrayNode dependItemList = JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList"))); ArrayNode dependItemList =
JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList")));
for (int j = 0; j < dependItemList.size(); j++) { for (int j = 0; j < dependItemList.size(); j++) {
ObjectNode dependItem = (ObjectNode) dependItemList.path(j); ObjectNode dependItem = (ObjectNode) dependItemList.path(j);
dependItem.put("projectCode", projectIdCodeMap.get(dependItem.get("projectId").asInt())); dependItem.put("projectCode", projectIdCodeMap.get(dependItem.get("projectId").asInt()));
int definitionId = dependItem.get("definitionId").asInt(); int definitionId = dependItem.get("definitionId").asInt();
Map<Long, Map<String, Long>> processCodeTaskNameCodeMap = processTaskMap.get(definitionId); Map<Long, Map<String, Long>> processCodeTaskNameCodeMap = processTaskMap.get(definitionId);
if (processCodeTaskNameCodeMap == null) { if (processCodeTaskNameCodeMap == null) {
logger.warn("We can't find processDefinition [{}], please check it is not exist, remove this dependence", definitionId); logger.warn(
"We can't find processDefinition [{}], please check it is not exist, remove this dependence",
definitionId);
dependItemList.remove(j); dependItemList.remove(j);
continue; continue;
} }
Optional<Map.Entry<Long, Map<String, Long>>> mapEntry = processCodeTaskNameCodeMap.entrySet().stream().findFirst(); Optional<Map.Entry<Long, Map<String, Long>>> mapEntry =
processCodeTaskNameCodeMap.entrySet().stream().findFirst();
if (mapEntry.isPresent()) { if (mapEntry.isPresent()) {
Map.Entry<Long, Map<String, Long>> processCodeTaskNameCodeEntry = mapEntry.get(); Map.Entry<Long, Map<String, Long>> processCodeTaskNameCodeEntry = mapEntry.get();
dependItem.put("definitionCode", processCodeTaskNameCodeEntry.getKey()); dependItem.put("definitionCode", processCodeTaskNameCodeEntry.getKey());
String depTasks = dependItem.get("depTasks").asText(); String depTasks = dependItem.get("depTasks").asText();
long taskCode = "ALL".equals(depTasks) || processCodeTaskNameCodeEntry.getValue() == null ? 0L : processCodeTaskNameCodeEntry.getValue().get(depTasks); long taskCode =
"ALL".equals(depTasks) || processCodeTaskNameCodeEntry.getValue() == null ? 0L
: processCodeTaskNameCodeEntry.getValue().get(depTasks);
dependItem.put("depTaskCode", taskCode); dependItem.put("depTaskCode", taskCode);
} }
dependItem.remove("projectId"); dependItem.remove("projectId");

9
dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/message/TaskExecuteRunningMessageSender.java

@ -25,11 +25,11 @@ import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.rpc.WorkerRpcClient; import org.apache.dolphinscheduler.server.worker.rpc.WorkerRpcClient;
import lombok.NonNull;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import lombok.NonNull;
@Component @Component
public class TaskExecuteRunningMessageSender implements MessageSender<TaskExecuteRunningCommand> { public class TaskExecuteRunningMessageSender implements MessageSender<TaskExecuteRunningCommand> {
@ -46,8 +46,8 @@ public class TaskExecuteRunningMessageSender implements MessageSender<TaskExecut
public TaskExecuteRunningCommand buildMessage(@NonNull TaskExecutionContext taskExecutionContext, public TaskExecuteRunningCommand buildMessage(@NonNull TaskExecutionContext taskExecutionContext,
@NonNull String messageReceiverAddress) { @NonNull String messageReceiverAddress) {
TaskExecuteRunningCommand taskExecuteRunningMessage TaskExecuteRunningCommand taskExecuteRunningMessage =
= new TaskExecuteRunningCommand(workerConfig.getWorkerAddress(), new TaskExecuteRunningCommand(workerConfig.getWorkerAddress(),
messageReceiverAddress, messageReceiverAddress,
System.currentTimeMillis()); System.currentTimeMillis());
taskExecuteRunningMessage.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); taskExecuteRunningMessage.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
@ -57,6 +57,7 @@ public class TaskExecuteRunningMessageSender implements MessageSender<TaskExecut
taskExecuteRunningMessage.setHost(taskExecutionContext.getHost()); taskExecuteRunningMessage.setHost(taskExecutionContext.getHost());
taskExecuteRunningMessage.setStartTime(taskExecutionContext.getStartTime()); taskExecuteRunningMessage.setStartTime(taskExecutionContext.getStartTime());
taskExecuteRunningMessage.setExecutePath(taskExecutionContext.getExecutePath()); taskExecuteRunningMessage.setExecutePath(taskExecutionContext.getExecutePath());
taskExecuteRunningMessage.setAppIds(taskExecutionContext.getAppIds());
return taskExecuteRunningMessage; return taskExecuteRunningMessage;
} }

2
dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java

@ -178,7 +178,7 @@ public class TaskKillProcessor implements NettyRequestProcessor {
return; return;
} }
try { try {
task.cancelApplication(true); task.cancel();
} catch (Exception e) { } catch (Exception e) {
logger.error("kill task error", e); logger.error("kill task error", e);
} }

5
dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/DefaultWorkerDelayTaskExecuteRunnable.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.worker.runner;
import lombok.NonNull; import lombok.NonNull;
import org.apache.dolphinscheduler.common.storage.StorageOperate; import org.apache.dolphinscheduler.common.storage.StorageOperate;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
@ -41,11 +42,11 @@ public class DefaultWorkerDelayTaskExecuteRunnable extends WorkerDelayTaskExecut
} }
@Override @Override
public void executeTask() throws TaskException { public void executeTask(TaskCallBack taskCallBack) throws TaskException {
if (task == null) { if (task == null) {
throw new TaskException("The task plugin instance is not initialized"); throw new TaskException("The task plugin instance is not initialized");
} }
task.handle(); task.handle(taskCallBack);
} }
@Override @Override

62
dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskCallbackImpl.java

@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.runner;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.plugin.task.api.model.ApplicationInfo;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.server.worker.rpc.WorkerMessageSender;
import lombok.Builder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Builder
public class TaskCallbackImpl implements TaskCallBack {
protected final Logger logger =
LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, TaskCallbackImpl.class));
private final WorkerMessageSender workerMessageSender;
private final String masterAddress;
public TaskCallbackImpl(WorkerMessageSender workerMessageSender, String masterAddress) {
this.workerMessageSender = workerMessageSender;
this.masterAddress = masterAddress;
}
@Override
public void updateRemoteApplicationInfo(int taskInstanceId, ApplicationInfo applicationInfo) {
TaskExecutionContext taskExecutionContext =
TaskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId);
if (taskExecutionContext == null) {
logger.error("task execution context is empty, taskInstanceId: {}, applicationInfo:{}", taskInstanceId,
applicationInfo);
return;
}
logger.info("send remote application info {}", applicationInfo);
taskExecutionContext.setAppIds(applicationInfo.getAppIds());
workerMessageSender.sendMessageWithRetry(taskExecutionContext, masterAddress, CommandType.TASK_EXECUTE_RUNNING);
}
}

8
dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerTaskExecuteRunnable.java

@ -26,6 +26,7 @@ import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTask; import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskCallBack;
import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannel;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskException;
@ -90,7 +91,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable {
logger.info("Set task logger name: {}", taskLogName); logger.info("Set task logger name: {}", taskLogName);
} }
protected abstract void executeTask(); protected abstract void executeTask(TaskCallBack taskCallBack);
protected void afterExecute() throws TaskException { protected void afterExecute() throws TaskException {
if (task == null) { if (task == null) {
@ -118,7 +119,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable {
// cancel the task // cancel the task
if (task != null) { if (task != null) {
try { try {
task.cancelApplication(true); task.cancel();
ProcessUtils.killYarnJob(taskExecutionContext); ProcessUtils.killYarnJob(taskExecutionContext);
} catch (Exception e) { } catch (Exception e) {
logger.error("Task execute failed and cancel the application failed, this will not affect the taskInstance status, but you need to check manual", e); logger.error("Task execute failed and cancel the application failed, this will not affect the taskInstance status, but you need to check manual", e);
@ -148,7 +149,8 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable {
beforeExecute(); beforeExecute();
executeTask(); TaskCallBack taskCallBack = TaskCallbackImpl.builder().workerMessageSender(workerMessageSender).masterAddress(masterAddress).build();
executeTask(taskCallBack);
afterExecute(); afterExecute();

24
dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/utils/TaskExecutionCheckerUtils.java

@ -17,10 +17,6 @@
package org.apache.dolphinscheduler.server.worker.utils; package org.apache.dolphinscheduler.server.worker.utils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.dolphinscheduler.common.exception.StorageOperateNoConfiguredException; import org.apache.dolphinscheduler.common.exception.StorageOperateNoConfiguredException;
import org.apache.dolphinscheduler.common.storage.StorageOperate; import org.apache.dolphinscheduler.common.storage.StorageOperate;
import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils;
@ -31,7 +27,11 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.metrics.WorkerServerMetrics; import org.apache.dolphinscheduler.server.worker.metrics.WorkerServerMetrics;
import org.slf4j.Logger;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.tuple.Pair;
import java.io.File; import java.io.File;
import java.nio.file.Files; import java.nio.file.Files;
@ -40,6 +40,8 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.slf4j.Logger;
public class TaskExecutionCheckerUtils { public class TaskExecutionCheckerUtils {
public static void checkTenantExist(WorkerConfig workerConfig, TaskExecutionContext taskExecutionContext) { public static void checkTenantExist(WorkerConfig workerConfig, TaskExecutionContext taskExecutionContext) {
@ -59,12 +61,14 @@ public class TaskExecutionCheckerUtils {
osUserExistFlag = OSUtils.getUserList().contains(taskExecutionContext.getTenantCode()); osUserExistFlag = OSUtils.getUserList().contains(taskExecutionContext.getTenantCode());
} }
if (!osUserExistFlag) { if (!osUserExistFlag) {
throw new TaskException(String.format("TenantCode: %s doesn't exist", taskExecutionContext.getTenantCode())); throw new TaskException(
String.format("TenantCode: %s doesn't exist", taskExecutionContext.getTenantCode()));
} }
} catch (TaskException ex) { } catch (TaskException ex) {
throw ex; throw ex;
} catch (Exception ex) { } catch (Exception ex) {
throw new TaskException(String.format("TenantCode: %s doesn't exist", taskExecutionContext.getTenantCode())); throw new TaskException(
String.format("TenantCode: %s doesn't exist", taskExecutionContext.getTenantCode()));
} }
} }
@ -84,7 +88,8 @@ public class TaskExecutionCheckerUtils {
} }
} }
public static void downloadResourcesIfNeeded(StorageOperate storageOperate, TaskExecutionContext taskExecutionContext, Logger logger) { public static void downloadResourcesIfNeeded(StorageOperate storageOperate,
TaskExecutionContext taskExecutionContext, Logger logger) {
String execLocalPath = taskExecutionContext.getExecutePath(); String execLocalPath = taskExecutionContext.getExecutePath();
Map<String, String> projectRes = taskExecutionContext.getResources(); Map<String, String> projectRes = taskExecutionContext.getResources();
if (MapUtils.isEmpty(projectRes)) { if (MapUtils.isEmpty(projectRes)) {
@ -113,7 +118,8 @@ public class TaskExecutionCheckerUtils {
String resPath = storageOperate.getResourceFileName(tenantCode, fullName); String resPath = storageOperate.getResourceFileName(tenantCode, fullName);
logger.info("get resource file from path:{}", resPath); logger.info("get resource file from path:{}", resPath);
long resourceDownloadStartTime = System.currentTimeMillis(); long resourceDownloadStartTime = System.currentTimeMillis();
storageOperate.download(tenantCode, resPath, execLocalPath + File.separator + fullName, false, true); storageOperate.download(tenantCode, resPath, execLocalPath + File.separator + fullName, false,
true);
WorkerServerMetrics WorkerServerMetrics
.recordWorkerResourceDownloadTime(System.currentTimeMillis() - resourceDownloadStartTime); .recordWorkerResourceDownloadTime(System.currentTimeMillis() - resourceDownloadStartTime);
WorkerServerMetrics.recordWorkerResourceDownloadSize( WorkerServerMetrics.recordWorkerResourceDownloadSize(

Loading…
Cancel
Save