Browse Source

Merge remote-tracking branch 'upstream/2.0.1-prepare' into 2.0.1-prepare

2.0.7-release
CalvinKirs 3 years ago
parent
commit
2bff58e5e1
  1. 6
      dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java
  2. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java
  3. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
  4. 111
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  5. 297
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationController.java
  6. 43
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
  7. 29
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java
  8. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  9. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceService.java
  10. 59
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  11. 138
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationService.java
  12. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  13. 14
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskDefinitionService.java
  14. 18
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java
  15. 386
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  16. 653
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java
  17. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java
  18. 191
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
  19. 127
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java
  20. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java
  21. 84
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationControllerTest.java
  22. 25
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  23. 577
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationServiceTest.java
  24. 53
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java
  25. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FileUtilsTest.java
  26. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  27. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java
  28. 10
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.java
  29. 17
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.java
  30. 85
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.java
  31. 4
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java
  32. 69
      dolphinscheduler-dao/src/main/resources/datasource.properties
  33. 7
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.xml
  34. 24
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.xml
  35. 101
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.xml
  36. 2
      dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl.sql
  37. 6
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java
  38. 6
      dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java
  39. 9
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java
  40. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java
  41. 17
      dolphinscheduler-server/src/main/resources/config/install_config.conf
  42. 2
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactoryTest.java
  43. 3
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
  44. 72
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java
  45. 29
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.scss
  46. 325
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue
  47. 115
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/layoutConfigModal.vue
  48. 4
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss
  49. 9
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.vue
  50. 14
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.scss
  51. 101
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.vue
  52. 163
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-helper.js
  53. 145
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style-mixin.js
  54. 21
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style.scss
  55. 10
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
  56. 24
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue
  57. 1
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue
  58. 14
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/kinship/_source/graphGridOption.js
  59. 17
      dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
  60. 2
      dolphinscheduler-ui/src/js/conf/home/store/user/actions.js
  61. 2
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  62. 10
      install.sh
  63. 3
      script/dolphinscheduler-daemon.sh
  64. 2
      script/remove-zk-node.sh
  65. 2
      script/scp-hosts.sh
  66. 3
      script/start-all.sh
  67. 2
      script/status-all.sh
  68. 2
      script/stop-all.sh

6
dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java

@ -38,7 +38,9 @@ import javax.annotation.PreDestroy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan;
@EnableAutoConfiguration
@ -66,7 +68,9 @@ public class AlertServer implements Closeable {
}
public static void main(String[] args) {
SpringApplication.run(AlertServer.class, args);
new SpringApplicationBuilder(AlertServer.class)
.web(WebApplicationType.NONE)
.run(args);
}
@PostConstruct

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java

@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
@ -213,12 +214,14 @@ public class AlertPluginInstanceController extends BaseController {
* paging query alert plugin instance group list
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return alert plugin instance list page
*/
@ApiOperation(value = "queryAlertPluginInstanceListPaging", notes = "QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "20")
})
@ -227,13 +230,15 @@ public class AlertPluginInstanceController extends BaseController {
@ApiException(LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
Result result = checkPageParams(pageNo, pageSize);
if (!result.checkResult()) {
return result;
}
return alertPluginInstanceService.queryPluginPage(pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
return alertPluginInstanceService.listPaging(loginUser, searchVal, pageNo, pageSize);
}
}

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java

@ -99,7 +99,7 @@ public class ExecutorController extends BaseController {
@ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"),
@ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"),
@ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long", example = "default"),
@ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long", example = "-1"),
@ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "expectedParallelismNumber", value = "EXPECTED_PARALLELISM_NUMBER", dataType = "Int", example = "8")
})

111
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -696,4 +696,115 @@ public class ProcessDefinitionController extends BaseController {
Map<String, Object> result = processDefinitionService.importProcessDefinition(loginUser, projectCode, file);
return returnDataList(result);
}
/**
* create empty process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return process definition code
*/
@ApiOperation(value = "createEmptyProcessDefinition", notes = "CREATE_EMPTY_PROCESS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, dataType = "Long", example = "123456789"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String")
})
@PostMapping(value = "/empty")
@ResponseStatus(HttpStatus.OK)
@ApiException(CREATE_PROCESS_DEFINITION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result createEmptyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "globalParams", required = false, defaultValue = "[]") String globalParams,
@RequestParam(value = "timeout", required = false, defaultValue = "0") int timeout,
@RequestParam(value = "tenantCode", required = true) String tenantCode,
@RequestParam(value = "scheduleJson", required = false) String scheduleJson) {
return returnDataList(processDefinitionService.createEmptyProcessDefinition(loginUser, projectCode, name, description, globalParams,
timeout, tenantCode, scheduleJson));
}
/**
* update process definition basic info
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @param releaseState releaseState
* @return update result code
*/
@ApiOperation(value = "updateBasicInfo", notes = "UPDATE_PROCESS_DEFINITION_BASIC_INFO_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "code", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "123456789"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"),
@ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = false, dataType = "ReleaseState")
})
@PutMapping(value = "/{code}/basic-info")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROCESS_DEFINITION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result updateProcessDefinitionBasicInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "name", required = true) String name,
@PathVariable(value = "code", required = true) long code,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "globalParams", required = false, defaultValue = "[]") String globalParams,
@RequestParam(value = "timeout", required = false, defaultValue = "0") int timeout,
@RequestParam(value = "tenantCode", required = true) String tenantCode,
@RequestParam(value = "scheduleJson", required = false) String scheduleJson,
@RequestParam(value = "releaseState", required = false, defaultValue = "OFFLINE") ReleaseState releaseState) {
Map<String, Object> result = processDefinitionService.updateProcessDefinitionBasicInfo(loginUser, projectCode, name, code, description, globalParams,
timeout, tenantCode, scheduleJson);
// If the update fails, the result will be returned directly
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataList(result);
}
// Judge whether to go online after editing,0 means offline, 1 means online
if (releaseState == ReleaseState.ONLINE) {
result = processDefinitionService.releaseWorkflowAndSchedule(loginUser, projectCode, code, releaseState);
}
return returnDataList(result);
}
/**
* release process definition and schedule
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState releaseState
* @return update result code
*/
@ApiOperation(value = "releaseWorkflowAndSchedule", notes = "RELEASE_WORKFLOW_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROCESS_DEFINITION_NAME", required = true, type = "Long"),
@ApiImplicitParam(name = "code", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "123456789"),
@ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = true, dataType = "ReleaseState")
})
@PostMapping(value = "/{code}/release-workflow")
@ResponseStatus(HttpStatus.OK)
@ApiException(RELEASE_PROCESS_DEFINITION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result releaseWorkflowAndSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable(value = "code", required = true) long code,
@RequestParam(value = "releaseState", required = true, defaultValue = "OFFLINE") ReleaseState releaseState) {
return returnDataList(processDefinitionService.releaseWorkflowAndSchedule(loginUser, projectCode, code, releaseState));
}
}

297
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationController.java

@ -0,0 +1,297 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_TASK_RELATION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DATA_IS_NOT_VALID;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_EDGE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_TASK_PROCESS_RELATION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.MOVE_PROCESS_TASK_RELATION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_PROCESS_RELATION_ERROR;
import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.HashMap;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/**
* process task relation controller
*/
@Api(tags = "PROCESS_TASK_RELATION_TAG")
@RestController
@RequestMapping("projects/{projectCode}/process-task-relation")
public class ProcessTaskRelationController extends BaseController {
@Autowired
private ProcessTaskRelationService processTaskRelationService;
/**
* create process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode processDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return create result code
*/
@ApiOperation(value = "save", notes = "CREATE_PROCESS_TASK_RELATION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "preTaskCode", value = "PRE_TASK_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "postTaskCode", value = "POST_TASK_CODE", required = true, type = "Long")
})
@PostMapping()
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_PROCESS_TASK_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result createProcessTaskRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode,
@RequestParam(name = "preTaskCode", required = true) long preTaskCode,
@RequestParam(name = "postTaskCode", required = true) long postTaskCode) {
Map<String, Object> result = new HashMap<>();
if (postTaskCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "postTaskCode");
} else if (processDefinitionCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "processDefinitionCode");
} else {
result = processTaskRelationService.createProcessTaskRelation(loginUser, projectCode, processDefinitionCode, preTaskCode, postTaskCode);
}
return returnDataList(result);
}
/**
* move task to other processDefinition
*
* @param loginUser login user info
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param targetProcessDefinitionCode target process definition code
* @param taskCode the current task code (the post task code)
* @return move result code
*/
@ApiOperation(value = "moveRelation", notes = "MOVE_TASK_TO_OTHER_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "targetProcessDefinitionCode", value = "TARGET_PROCESS_DEFINITION_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long")
})
@PostMapping(value = "/move")
@ResponseStatus(HttpStatus.OK)
@ApiException(MOVE_PROCESS_TASK_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result moveTaskProcessRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode,
@RequestParam(name = "targetProcessDefinitionCode", required = true) long targetProcessDefinitionCode,
@RequestParam(name = "taskCode", required = true) long taskCode) {
Map<String, Object> result = new HashMap<>();
if (processDefinitionCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "processDefinitionCode");
} else if (targetProcessDefinitionCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "targetProcessDefinitionCode");
} else if (taskCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "taskCode");
} else {
result = processTaskRelationService.moveTaskProcessRelation(loginUser, projectCode, processDefinitionCode,
targetProcessDefinitionCode, taskCode);
}
return returnDataList(result);
}
/**
* delete process task relation (delete task from workflow)
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param taskCode the post task code
* @return delete result code
*/
@ApiOperation(value = "deleteRelation", notes = "DELETE_PROCESS_TASK_RELATION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long")
})
@DeleteMapping(value = "/{taskCode}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_TASK_PROCESS_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteTaskProcessRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode,
@PathVariable("taskCode") long taskCode) {
return returnDataList(processTaskRelationService.deleteTaskProcessRelation(loginUser, projectCode, processDefinitionCode, taskCode));
}
/**
* delete task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param preTaskCodes the pre task codes, sep ','
* @param taskCode the post task code
* @return delete result code
*/
@ApiOperation(value = "deleteUpstreamRelation", notes = "DELETE_UPSTREAM_RELATION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "preTaskCodes", value = "PRE_TASK_CODES", required = true, type = "String", example = "3,4"),
@ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long")
})
@DeleteMapping(value = "/{taskCode}/upstream")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_TASK_PROCESS_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteUpstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "preTaskCodes", required = true) String preTaskCodes,
@PathVariable("taskCode") long taskCode) {
return returnDataList(processTaskRelationService.deleteUpstreamRelation(loginUser, projectCode, preTaskCodes, taskCode));
}
/**
* delete task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param postTaskCodes the post task codes, sep ','
* @param taskCode the pre task code
* @return delete result code
*/
@ApiOperation(value = "deleteDownstreamRelation", notes = "DELETE_DOWNSTREAM_RELATION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "postTaskCodes", value = "POST_TASK_CODES", required = true, type = "String", example = "3,4"),
@ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long")
})
@DeleteMapping(value = "/{taskCode}/downstream")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_TASK_PROCESS_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteDownstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "postTaskCodes", required = true) String postTaskCodes,
@PathVariable("taskCode") long taskCode) {
return returnDataList(processTaskRelationService.deleteDownstreamRelation(loginUser, projectCode, postTaskCodes, taskCode));
}
/**
* query task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode current task code (post task code)
* @return process task relation list
*/
@ApiOperation(value = "queryUpstreamRelation", notes = "QUERY_UPSTREAM_RELATION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long")
})
@GetMapping(value = "/{taskCode}/upstream")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_PROCESS_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result queryUpstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable("taskCode") long taskCode) {
return returnDataList(processTaskRelationService.queryUpstreamRelation(loginUser, projectCode, taskCode));
}
/**
* query task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode pre task code
* @return process task relation list
*/
@ApiOperation(value = "queryDownstreamRelation", notes = "QUERY_DOWNSTREAM_RELATION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long")
})
@GetMapping(value = "/{taskCode}/downstream")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_PROCESS_RELATION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result queryDownstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable("taskCode") long taskCode) {
return returnDataList(processTaskRelationService.queryDownstreamRelation(loginUser, projectCode, taskCode));
}
/**
* delete edge
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param preTaskCode pre task code
* @param postTaskCode post task code
* @return delete result code
*/
@ApiOperation(value = "deleteEdge", notes = "DELETE_EDGE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "preTaskCode", value = "PRE_TASK_CODE", required = true, type = "Long"),
@ApiImplicitParam(name = "postTaskCode", value = "POST_TASK_CODE", required = true, type = "Long")
})
@DeleteMapping(value = "/{processDefinitionCode}/{preTaskCode}/{postTaskCode}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_EDGE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteEdge(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true)
@PathVariable long projectCode,
@PathVariable long processDefinitionCode,
@PathVariable long preTaskCode,
@PathVariable long postTaskCode) {
return returnDataList(processTaskRelationService.deleteEdge(loginUser, projectCode, processDefinitionCode, preTaskCode, postTaskCode));
}
}

43
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java

@ -308,4 +308,47 @@ public class SchedulerController extends BaseController {
Map<String, Object> result = schedulerService.previewSchedule(loginUser, schedule);
return returnDataList(result);
}
/**
* update process definition schedule
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @return update result code
*/
@ApiOperation(value = "updateScheduleByProcessDefinitionCode", notes = "UPDATE_SCHEDULE_BY_PROCESS_DEFINITION_CODE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "12345678"),
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"),
@ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"),
@ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long"),
})
@PutMapping("/{code}")
@ApiException(UPDATE_SCHEDULE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result updateScheduleByProcessDefinitionCode(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable(value = "code") long processDefinitionCode,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) int warningGroupId,
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
Map<String, Object> result = schedulerService.updateScheduleByProcessDefinitionCode(loginUser, projectCode, processDefinitionCode, schedule,
warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, environmentCode);
return returnDataList(result);
}
}

29
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java

@ -24,6 +24,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.LOGIN_USER_QUERY_PROJ
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DETAIL_OF_TASK_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_DEFINITION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_DEFINITION_VERSIONS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_TASK_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_TASK_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TASK_DEFINITION_ERROR;
@ -32,6 +33,7 @@ import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TaskDefinitionService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
@ -310,4 +312,31 @@ public class TaskDefinitionController extends BaseController {
Map<String, Object> result = taskDefinitionService.genTaskCodeList(genNum);
return returnDataList(result);
}
/**
* release task definition
*
* @param loginUser login user
* @param projectCode project code
* @param code task definition code
* @param releaseState releaseState
* @return update result code
*/
@ApiOperation(value = "releaseTaskDefinition", notes = "RELEASE_TASK_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectCode", value = "PROCESS_DEFINITION_NAME", required = true, type = "Long"),
@ApiImplicitParam(name = "code", value = "TASK_DEFINITION_CODE", required = true, dataType = "Long", example = "123456789"),
@ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = true, dataType = "ReleaseState")
})
@PostMapping(value = "/{code}/release")
@ResponseStatus(HttpStatus.OK)
@ApiException(RELEASE_TASK_DEFINITION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result releaseTaskDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable(value = "code", required = true) long code,
@RequestParam(value = "releaseState", required = true, defaultValue = "OFFLINE") ReleaseState releaseState) {
Map<String, Object> result = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, code, releaseState);
return returnDataList(result);
}
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -269,7 +269,7 @@ public enum Status {
CREATE_PROCESS_TASK_RELATION_ERROR(50032, "create process task relation error", "创建工作流任务关系错误"),
PROCESS_TASK_RELATION_NOT_EXIST(50033, "process task relation {0} does not exist", "工作流任务关系[{0}]不存在"),
PROCESS_TASK_RELATION_EXIST(50034, "process task relation is already exist, processCode:[{0}]", "工作流任务关系已存在, processCode:[{0}]"),
PROCESS_DAG_IS_EMPTY(50035, "process dag can not be empty", "工作流dag不能为空"),
PROCESS_DAG_IS_EMPTY(50035, "process dag is empty", "工作流dag是空"),
CHECK_PROCESS_TASK_RELATION_ERROR(50036, "check process task relation error", "工作流任务关系参数错误"),
CREATE_TASK_DEFINITION_ERROR(50037, "create task definition error", "创建任务错误"),
UPDATE_TASK_DEFINITION_ERROR(50038, "update task definition error", "更新任务定义错误"),
@ -280,13 +280,22 @@ public enum Status {
QUERY_DETAIL_OF_TASK_DEFINITION_ERROR(50043, "query detail of task definition error", "查询任务详细信息错误"),
QUERY_TASK_DEFINITION_LIST_PAGING_ERROR(50044, "query task definition list paging error", "分页查询任务定义列表错误"),
TASK_DEFINITION_NAME_EXISTED(50045, "task definition name [{0}] already exists", "任务定义名称[{0}]已经存在"),
RELEASE_TASK_DEFINITION_ERROR(50046, "release task definition error", "上线任务错误"),
MOVE_PROCESS_TASK_RELATION_ERROR(50047, "move process task relation error", "移动任务到其他工作流错误"),
DELETE_TASK_PROCESS_RELATION_ERROR(50048, "delete process task relation error", "删除工作流任务关系错误"),
QUERY_TASK_PROCESS_RELATION_ERROR(50049, "query process task relation error", "查询工作流任务关系错误"),
TASK_DEFINE_STATE_ONLINE(50050, "task definition {0} is already on line", "任务定义[{0}]已上线"),
TASK_HAS_DOWNSTREAM(50051, "Task [{0}] exists downstream dependence", "任务[{0}]存在下游依赖"),
TASK_HAS_UPSTREAM(50052, "Task [{0}] exists upstream dependence", "任务[{0}]存在上游依赖"),
MAIN_TABLE_USING_VERSION(50053, "the version that the master table is using", "主表正在使用该版本"),
PROJECT_PROCESS_NOT_MATCH(50054, "the project and the process is not match", "项目和工作流不匹配"),
DELETE_EDGE_ERROR(50055, "delete edge error", "删除工作流任务连接线错误"),
HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"),
/**
* for monitor
*/
QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"),
CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"),

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceService.java

@ -82,9 +82,11 @@ public interface AlertPluginInstanceService {
/**
* queryPluginPage
* @param pageIndex page index
* @param loginUser login user
* @param searchVal search value
* @param pageNo page index
* @param pageSize page size
* @return plugins
*/
Result queryPluginPage(int pageIndex, int pageSize);
Result listPaging(User loginUser, String searchVal, int pageNo, int pageSize);
}

59
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

@ -332,5 +332,64 @@ public interface ProcessDefinitionService {
long code,
int version);
/**
* create empty process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return process definition code
*/
Map<String, Object> createEmptyProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson);
/**
* update process definition basic info
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return update result code
*/
Map<String, Object> updateProcessDefinitionBasicInfo(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson);
/**
* release process definition and schedule
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState releaseState
* @return update result code
*/
Map<String, Object> releaseWorkflowAndSchedule(User loginUser,
long projectCode,
long code,
ReleaseState releaseState);
}

138
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationService.java

@ -0,0 +1,138 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
/**
* process task relation service
*/
public interface ProcessTaskRelationService {
/**
* create process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode processDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return create result code
*/
Map<String, Object> createProcessTaskRelation(User loginUser,
long projectCode,
long processDefinitionCode,
long preTaskCode,
long postTaskCode);
/**
* move task to other processDefinition
*
* @param loginUser login user info
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param targetProcessDefinitionCode target process definition code
* @param taskCode the current task code (the post task code)
* @return move result code
*/
Map<String, Object> moveTaskProcessRelation(User loginUser,
long projectCode,
long processDefinitionCode,
long targetProcessDefinitionCode,
long taskCode);
/**
* delete process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param taskCode the post task code
* @return delete result code
*/
Map<String, Object> deleteTaskProcessRelation(User loginUser,
long projectCode,
long processDefinitionCode,
long taskCode);
/**
* delete task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param preTaskCodes the pre task codes, sep ','
* @param taskCode the post task code
* @return delete result code
*/
Map<String, Object> deleteUpstreamRelation(User loginUser,
long projectCode,
String preTaskCodes,
long taskCode);
/**
* delete task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param postTaskCodes the post task codes, sep ','
* @param taskCode the pre task code
* @return delete result code
*/
Map<String, Object> deleteDownstreamRelation(User loginUser,
long projectCode,
String postTaskCodes,
long taskCode);
/**
* query task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode current task code (post task code)
* @return process task relation list
*/
Map<String, Object> queryUpstreamRelation(User loginUser,
long projectCode,
long taskCode);
/**
* query task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode pre task code
* @return process task relation list
*/
Map<String, Object> queryDownstreamRelation(User loginUser,
long projectCode,
long taskCode);
/**
* delete edge
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param preTaskCode pre task code
* @param postTaskCode post task code
* @return delete result code
*/
Map<String, Object> deleteEdge(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode);
}

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -148,4 +148,29 @@ public interface SchedulerService {
* @return the next five fire time
*/
Map<String, Object> previewSchedule(User loginUser, String schedule);
/**
* update process definition schedule
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param scheduleExpression scheduleExpression
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @return update result code
*/
Map<String, Object> updateScheduleByProcessDefinitionCode(User loginUser,
long projectCode,
long processDefinitionCode,
String scheduleExpression,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority processInstancePriority,
String workerGroup,
long environmentCode);
}

14
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskDefinitionService.java

@ -18,6 +18,7 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
@ -156,4 +157,17 @@ public interface TaskDefinitionService {
*/
Map<String, Object> genTaskCodeList(Integer genNum);
/**
* release task definition
*
* @param loginUser login user
* @param projectCode project code
* @param code task definition code
* @param releaseState releaseState
* @return update result code
*/
Map<String, Object> releaseTaskDefinition(User loginUser,
long projectCode,
long code,
ReleaseState releaseState);
}

18
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java

@ -188,14 +188,20 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A
}
@Override
public Result queryPluginPage(int pageIndex, int pageSize) {
IPage<AlertPluginInstance> pluginInstanceIPage = new Page<>(pageIndex, pageSize);
pluginInstanceIPage = alertPluginInstanceMapper.selectPage(pluginInstanceIPage, null);
public Result listPaging(User loginUser, String searchVal, int pageNo, int pageSize) {
PageInfo<AlertPluginInstanceVO> pageInfo = new PageInfo<>(pageIndex, pageSize);
pageInfo.setTotal((int) pluginInstanceIPage.getTotal());
pageInfo.setTotalList(buildPluginInstanceVOList(pluginInstanceIPage.getRecords()));
Result result = new Result();
if (!isAdmin(loginUser)) {
putMsg(result,Status.USER_NO_OPERATION_PERM);
return result;
}
Page<AlertPluginInstance> page = new Page<>(pageNo, pageSize);
IPage<AlertPluginInstance> alertPluginInstanceIPage = alertPluginInstanceMapper.queryByInstanceNamePage(page, searchVal);
PageInfo<AlertPluginInstance> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) alertPluginInstanceIPage.getTotal());
pageInfo.setTotalList(alertPluginInstanceIPage.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;

386
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -20,6 +20,7 @@ package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE;
import org.apache.dolphinscheduler.api.dto.DagDataSchedule;
import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
@ -33,8 +34,11 @@ import org.apache.dolphinscheduler.api.utils.FileUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
@ -68,9 +72,9 @@ import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.BufferedOutputStream;
import java.io.IOException;
@ -228,6 +232,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description,
globalParams, locations, timeout, loginUser.getId(), tenantId);
return createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs);
}
@ -292,8 +297,8 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result;
}
List<ProcessTaskRelation> processTaskRelations = taskRelationList.stream()
.map(processTaskRelationLog -> JSONUtils.parseObject(JSONUtils.toJsonString(processTaskRelationLog), ProcessTaskRelation.class))
.collect(Collectors.toList());
.map(processTaskRelationLog -> JSONUtils.parseObject(JSONUtils.toJsonString(processTaskRelationLog), ProcessTaskRelation.class))
.collect(Collectors.toList());
List<TaskNode> taskNodeList = processService.transformTask(processTaskRelations, taskDefinitionLogs);
if (taskNodeList.size() != taskRelationList.size()) {
Set<Long> postTaskCodes = taskRelationList.stream().map(ProcessTaskRelationLog::getPostTaskCode).collect(Collectors.toSet());
@ -404,7 +409,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
Page<ProcessDefinition> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(
page, searchVal, userId, project.getCode(), isAdmin(loginUser));
page, searchVal, userId, project.getCode(), isAdmin(loginUser));
List<ProcessDefinition> records = processDefinitionIPage.getRecords();
for (ProcessDefinition pd : records) {
@ -581,7 +586,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs);
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
@ -659,31 +664,30 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
// get the timing according to the process definition
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionCode(code);
if (!schedules.isEmpty() && schedules.size() > 1) {
logger.warn("scheduler num is {},Greater than 1", schedules.size());
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
return result;
} else if (schedules.size() == 1) {
Schedule schedule = schedules.get(0);
if (schedule.getReleaseState() == ReleaseState.OFFLINE) {
int delete = scheduleMapper.deleteById(schedule.getId());
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj != null) {
if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) {
int delete = scheduleMapper.deleteById(scheduleObj.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
throw new ServiceException(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
}
} else if (schedule.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId());
}
if (scheduleObj.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, scheduleObj.getId());
return result;
}
}
int delete = processDefinitionMapper.deleteById(processDefinition.getId());
int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode());
if (delete == 0 || deleteRelation == 0) {
if (delete == 0) {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
}
int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode());
if (deleteRelation == 0) {
logger.warn("The process definition has not relation, it will be delete successfully");
}
putMsg(result, Status.SUCCESS);
return result;
}
@ -714,23 +718,28 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
switch (releaseState) {
case ONLINE:
List<ProcessTaskRelation> relationList = processService.findRelationByCode(projectCode, code);
if (CollectionUtils.isEmpty(relationList)) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(
new long[]{processDefinition.getCode()}
);
if (updateProcess > 0 && scheduleList.size() == 1) {
Schedule schedule = scheduleList.get(0);
logger.info("set schedule offline, project id: {}, schedule id: {}, process definition code: {}", project.getId(), schedule.getId(), code);
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(code);
if (updateProcess > 0 && schedule != null) {
logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, schedule.getId(), code);
// set status
schedule.setReleaseState(ReleaseState.OFFLINE);
schedule.setReleaseState(releaseState);
int updateSchedule = scheduleMapper.updateById(schedule);
if (updateSchedule == 0) {
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
@ -810,12 +819,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
* @return DagDataSchedule
*/
public DagDataSchedule exportProcessDagData(ProcessDefinition processDefinition) {
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionCode(processDefinition.getCode());
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinition.getCode());
DagDataSchedule dagDataSchedule = new DagDataSchedule(processService.genDagData(processDefinition));
if (!schedules.isEmpty()) {
Schedule schedule = schedules.get(0);
schedule.setReleaseState(ReleaseState.OFFLINE);
dagDataSchedule.setSchedule(schedule);
if (scheduleObj != null) {
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
dagDataSchedule.setSchedule(scheduleObj);
}
return dagDataSchedule;
}
@ -1353,7 +1361,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
diffCode.forEach(code -> failedProcessList.add(code + "[null]"));
for (ProcessDefinition processDefinition : processDefinitionList) {
List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode());
processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode());
List<ProcessTaskRelationLog> taskRelationList = processTaskRelations.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList());
processDefinition.setProjectCode(targetProjectCode);
if (isCopy) {
@ -1504,6 +1512,10 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
} else {
if (processDefinition.getVersion() == version) {
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
return result;
}
int deleteLog = processDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version);
int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(processDefinition.getCode(), processDefinition.getVersion());
if (deleteLog == 0 || deleteRelationLog == 0) {
@ -1514,4 +1526,314 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
return result;
}
/**
* create empty process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return process definition code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> createEmptyProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
long processDefinitionCode;
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
return result;
}
ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description,
globalParams, "", timeout, loginUser.getId(), tenantId);
result = createEmptyDagDefine(loginUser, processDefinition);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isBlank(scheduleJson)) {
return result;
}
// save dag schedule
Map<String, Object> scheduleResult = createDagSchedule(loginUser, processDefinition, scheduleJson);
if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) {
Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS);
putMsg(result, scheduleResultStatus);
throw new ServiceException(scheduleResultStatus);
}
return result;
}
private Map<String, Object> createEmptyDagDefine(User loginUser, ProcessDefinition processDefinition) {
Map<String, Object> result = new HashMap<>();
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, true);
if (insertVersion == 0) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
return result;
}
private Map<String, Object> createDagSchedule(User loginUser, ProcessDefinition processDefinition, String scheduleJson) {
Map<String, Object> result = new HashMap<>();
Schedule scheduleObj = JSONUtils.parseObject(scheduleJson, Schedule.class);
if (scheduleObj == null) {
putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
Date now = new Date();
scheduleObj.setProcessDefinitionCode(processDefinition.getCode());
if (DateUtils.differSec(scheduleObj.getStartTime(), scheduleObj.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
if (!org.quartz.CronExpression.isValidExpression(scheduleObj.getCrontab())) {
logger.error("{} verify failure", scheduleObj.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleObj.getCrontab());
return result;
}
scheduleObj.setWarningType(scheduleObj.getWarningType() == null ? WarningType.NONE : scheduleObj.getWarningType());
scheduleObj.setWarningGroupId(scheduleObj.getWarningGroupId() == 0 ? 1 : scheduleObj.getWarningGroupId());
scheduleObj.setFailureStrategy(scheduleObj.getFailureStrategy() == null ? FailureStrategy.CONTINUE : scheduleObj.getFailureStrategy());
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(scheduleObj.getProcessInstancePriority() == null ? Priority.MEDIUM : scheduleObj.getProcessInstancePriority());
scheduleObj.setWorkerGroup(scheduleObj.getWorkerGroup() == null ? "default" : scheduleObj.getWorkerGroup());
scheduleObj.setEnvironmentCode(scheduleObj.getEnvironmentCode() == null ? -1 : scheduleObj.getEnvironmentCode());
scheduleMapper.insert(scheduleObj);
putMsg(result, Status.SUCCESS);
result.put("scheduleId", scheduleObj.getId());
return result;
}
/**
* update process definition basic info
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return update result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> updateProcessDefinitionBasicInfo(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
}
if (!name.equals(processDefinition.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
}
ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class);
processDefinition.set(projectCode, name, description, globalParams, "", timeout, tenantId);
List<ProcessTaskRelationLog> taskRelationList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion());
result = updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, Lists.newArrayList());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isBlank(scheduleJson)) {
return result;
}
// update dag schedule
Map<String, Object> scheduleResult = updateDagSchedule(loginUser, projectCode, code, scheduleJson);
if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) {
Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS);
putMsg(result, scheduleResultStatus);
throw new ServiceException(scheduleResultStatus);
}
return result;
}
private Map<String, Object> updateDagSchedule(User loginUser,
long projectCode,
long processDefinitionCode,
String scheduleJson) {
Map<String, Object> result = new HashMap<>();
Schedule schedule = JSONUtils.parseObject(scheduleJson, Schedule.class);
if (schedule == null) {
putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
// set default value
FailureStrategy failureStrategy = schedule.getFailureStrategy() == null ? FailureStrategy.CONTINUE : schedule.getFailureStrategy();
WarningType warningType = schedule.getWarningType() == null ? WarningType.NONE : schedule.getWarningType();
Priority processInstancePriority = schedule.getProcessInstancePriority() == null ? Priority.MEDIUM : schedule.getProcessInstancePriority();
int warningGroupId = schedule.getWarningGroupId() == 0 ? 1 : schedule.getWarningGroupId();
String workerGroup = schedule.getWorkerGroup() == null ? "default" : schedule.getWorkerGroup();
long environmentCode = schedule.getEnvironmentCode() == null ? -1 : schedule.getEnvironmentCode();
ScheduleParam param = new ScheduleParam();
param.setStartTime(schedule.getStartTime());
param.setEndTime(schedule.getEndTime());
param.setCrontab(schedule.getCrontab());
param.setTimezoneId(schedule.getTimezoneId());
return schedulerService.updateScheduleByProcessDefinitionCode(
loginUser,
projectCode,
processDefinitionCode,
JSONUtils.toJsonString(param),
warningType,
warningGroupId,
failureStrategy,
processInstancePriority,
workerGroup,
environmentCode);
}
/**
* release process definition and schedule
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState releaseState
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> releaseWorkflowAndSchedule(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check state
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, "processDefinitionCode:" + code);
return result;
}
switch (releaseState) {
case ONLINE:
List<ProcessTaskRelation> relationList = processService.findRelationByCode(projectCode, code);
if (CollectionUtils.isEmpty(relationList)) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
scheduleObj.setReleaseState(ReleaseState.ONLINE);
scheduleMapper.updateById(scheduleObj);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
if (updateProcess > 0) {
logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, scheduleObj.getId(), code);
// set status
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
int updateSchedule = scheduleMapper.updateById(scheduleObj);
if (updateSchedule == 0) {
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR);
}
schedulerService.deleteSchedule(project.getId(), scheduleObj.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
}

653
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java

@ -0,0 +1,653 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.enums.Status.DATA_IS_NOT_VALID;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ConditionType;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections.CollectionUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.Lists;
/**
* process task relation service impl
*/
@Service
public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements ProcessTaskRelationService {
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private ProcessTaskRelationLogMapper processTaskRelationLogMapper;
@Autowired
private TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode processDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return create result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> createProcessTaskRelation(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
if (processDefinition.getProjectCode() != projectCode) {
putMsg(result, Status.PROJECT_PROCESS_NOT_MATCH);
return result;
}
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, 0L, postTaskCode);
if (!processTaskRelations.isEmpty()) {
Map<Long, ProcessTaskRelation> preTaskCodeMap = processTaskRelations.stream()
.collect(Collectors.toMap(ProcessTaskRelation::getPreTaskCode, processTaskRelation -> processTaskRelation));
if (preTaskCodeMap.containsKey(preTaskCode) || (!preTaskCodeMap.containsKey(0L) && preTaskCode == 0L)) {
putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, processDefinitionCode);
return result;
}
if (preTaskCodeMap.containsKey(0L) && preTaskCode != 0L) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(preTaskCodeMap.get(0L));
// delete no upstream
int delete = processTaskRelationMapper.deleteRelation(processTaskRelationLog);
int deleteLog = processTaskRelationLogMapper.deleteRelation(processTaskRelationLog);
if ((delete & deleteLog) == 0) {
putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
}
}
Date now = new Date();
List<ProcessTaskRelationLog> processTaskRelationLogs = new ArrayList<>();
if (preTaskCode != 0L) {
// upstream is or not exist
List<ProcessTaskRelation> upstreamProcessTaskRelations = processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, 0L, preTaskCode);
TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(preTaskCode);
if (upstreamProcessTaskRelations.isEmpty()) {
ProcessTaskRelationLog processTaskRelationLog = setRelationLog(processDefinition, now, loginUser.getId(), preTaskDefinition);
processTaskRelationLog.setPreTaskCode(0L);
processTaskRelationLog.setPreTaskVersion(0);
processTaskRelationLogs.add(processTaskRelationLog);
}
TaskDefinition postTaskDefinition = taskDefinitionMapper.queryByCode(postTaskCode);
ProcessTaskRelationLog processTaskRelationLog = setRelationLog(processDefinition, now, loginUser.getId(), postTaskDefinition);
processTaskRelationLog.setPreTaskCode(preTaskDefinition.getCode());
processTaskRelationLog.setPreTaskVersion(preTaskDefinition.getVersion());
processTaskRelationLogs.add(processTaskRelationLog);
} else {
TaskDefinition postTaskDefinition = taskDefinitionMapper.queryByCode(postTaskCode);
ProcessTaskRelationLog processTaskRelationLog = setRelationLog(processDefinition, now, loginUser.getId(), postTaskDefinition);
processTaskRelationLog.setPreTaskCode(0L);
processTaskRelationLog.setPreTaskVersion(0);
processTaskRelationLogs.add(processTaskRelationLog);
}
int insert = processTaskRelationMapper.batchInsert(processTaskRelationLogs);
int insertLog = processTaskRelationLogMapper.batchInsert(processTaskRelationLogs);
if ((insert & insertLog) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
return result;
}
private ProcessTaskRelationLog setRelationLog(ProcessDefinition processDefinition, Date now, int userId, TaskDefinition taskDefinition) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(processDefinition.getProjectCode());
processTaskRelationLog.setProcessDefinitionCode(processDefinition.getCode());
processTaskRelationLog.setProcessDefinitionVersion(processDefinition.getVersion());
processTaskRelationLog.setPostTaskCode(taskDefinition.getCode());
processTaskRelationLog.setPostTaskVersion(taskDefinition.getVersion());
processTaskRelationLog.setConditionType(ConditionType.NONE);
processTaskRelationLog.setConditionParams("{}");
processTaskRelationLog.setCreateTime(now);
processTaskRelationLog.setUpdateTime(now);
processTaskRelationLog.setOperator(userId);
processTaskRelationLog.setOperateTime(now);
return processTaskRelationLog;
}
/**
* move task to other processDefinition
*
* @param loginUser login user info
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param targetProcessDefinitionCode target process definition code
* @param taskCode the current task code (the post task code)
* @return move result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> moveTaskProcessRelation(User loginUser, long projectCode, long processDefinitionCode, long targetProcessDefinitionCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(targetProcessDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, targetProcessDefinitionCode);
return result;
}
if (processDefinition.getProjectCode() != projectCode) {
putMsg(result, Status.PROJECT_PROCESS_NOT_MATCH);
return result;
}
List<ProcessTaskRelation> downstreamList = processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, taskCode, 0L);
if (CollectionUtils.isNotEmpty(downstreamList)) {
Set<Long> postTaskCodes = downstreamList
.stream()
.map(ProcessTaskRelation::getPostTaskCode)
.collect(Collectors.toSet());
putMsg(result, Status.TASK_HAS_DOWNSTREAM, org.apache.commons.lang.StringUtils.join(postTaskCodes, ","));
return result;
}
List<ProcessTaskRelation> upstreamList = processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, 0L, taskCode);
if (upstreamList.isEmpty()) {
putMsg(result, Status.PROCESS_TASK_RELATION_NOT_EXIST, "taskCode:" + taskCode);
return result;
} else {
Set<Long> preTaskCodes = upstreamList
.stream()
.map(ProcessTaskRelation::getPreTaskCode)
.collect(Collectors.toSet());
if (preTaskCodes.size() > 1 || !preTaskCodes.contains(0L)) {
putMsg(result, Status.TASK_HAS_UPSTREAM, org.apache.commons.lang.StringUtils.join(preTaskCodes, ","));
return result;
}
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (null == taskDefinition) {
putMsg(result, Status.DATA_IS_NULL, "taskDefinition");
return result;
}
ObjectNode paramNode = JSONUtils.parseObject(taskDefinition.getTaskParams());
if (TaskType.DEPENDENT.getDesc().equals(taskDefinition.getTaskType())) {
Set<Long> depProcessDefinitionCodes = new HashSet<>();
ObjectNode dependence = (ObjectNode) paramNode.get("dependence");
ArrayNode dependTaskList = JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList")));
for (int i = 0; i < dependTaskList.size(); i++) {
ObjectNode dependTask = (ObjectNode) dependTaskList.path(i);
ArrayNode dependItemList = JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList")));
for (int j = 0; j < dependItemList.size(); j++) {
ObjectNode dependItem = (ObjectNode) dependItemList.path(j);
long definitionCode = dependItem.get("definitionCode").asLong();
depProcessDefinitionCodes.add(definitionCode);
}
}
if (depProcessDefinitionCodes.contains(targetProcessDefinitionCode)) {
putMsg(result, DATA_IS_NOT_VALID, "targetProcessDefinitionCode");
return result;
}
}
if (TaskType.SUB_PROCESS.getDesc().equals(taskDefinition.getTaskType())) {
long subProcessDefinitionCode = paramNode.get("processDefinitionCode").asLong();
if (targetProcessDefinitionCode == subProcessDefinitionCode) {
putMsg(result, DATA_IS_NOT_VALID, "targetProcessDefinitionCode");
return result;
}
}
Date now = new Date();
ProcessTaskRelation processTaskRelation = upstreamList.get(0);
ProcessTaskRelationLog processTaskRelationLog = processTaskRelationLogMapper.queryRelationLogByRelation(processTaskRelation);
processTaskRelation.setProcessDefinitionCode(processDefinition.getCode());
processTaskRelation.setProcessDefinitionVersion(processDefinition.getVersion());
processTaskRelation.setUpdateTime(now);
processTaskRelationLog.setProcessDefinitionCode(processDefinition.getCode());
processTaskRelationLog.setProcessDefinitionVersion(processDefinition.getVersion());
processTaskRelationLog.setUpdateTime(now);
processTaskRelationLog.setOperator(loginUser.getId());
processTaskRelationLog.setOperateTime(now);
int update = processTaskRelationMapper.updateById(processTaskRelation);
int updateLog = processTaskRelationLogMapper.updateById(processTaskRelationLog);
if (update == 0 || updateLog == 0) {
putMsg(result, Status.MOVE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.MOVE_PROCESS_TASK_RELATION_ERROR);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* delete process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param taskCode the post task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteTaskProcessRelation(User loginUser, long projectCode, long processDefinitionCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (taskCode == 0) {
putMsg(result, Status.DELETE_TASK_PROCESS_RELATION_ERROR);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (null == taskDefinition) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
return result;
}
List<ProcessTaskRelation> downstreamList = processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, taskCode, 0L);
if (CollectionUtils.isNotEmpty(downstreamList)) {
Set<Long> postTaskCodes = downstreamList
.stream()
.map(ProcessTaskRelation::getPostTaskCode)
.collect(Collectors.toSet());
putMsg(result, Status.TASK_HAS_DOWNSTREAM, org.apache.commons.lang.StringUtils.join(postTaskCodes, ","));
return result;
}
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(projectCode);
processTaskRelationLog.setPostTaskCode(taskCode);
processTaskRelationLog.setPostTaskVersion(taskDefinition.getVersion());
processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode);
processTaskRelationLog.setProcessDefinitionVersion(processDefinition.getVersion());
int deleteRelation = processTaskRelationMapper.deleteRelation(processTaskRelationLog);
int deleteRelationLog = processTaskRelationLogMapper.deleteRelation(processTaskRelationLog);
if (0 == deleteRelation || 0 == deleteRelationLog) {
putMsg(result, Status.DELETE_TASK_PROCESS_RELATION_ERROR);
throw new ServiceException(Status.DELETE_TASK_PROCESS_RELATION_ERROR);
}
if (TaskType.CONDITIONS.getDesc().equals(taskDefinition.getTaskType())
|| TaskType.DEPENDENT.getDesc().equals(taskDefinition.getTaskType())
|| TaskType.SUB_PROCESS.getDesc().equals(taskDefinition.getTaskType())) {
int deleteTaskDefinition = taskDefinitionMapper.deleteByCode(taskCode);
if (0 == deleteTaskDefinition) {
putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
}
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param preTaskCodes the pre task codes, sep ','
* @param taskCode the post task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteUpstreamRelation(User loginUser, long projectCode, String preTaskCodes, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isEmpty(preTaskCodes)) {
putMsg(result, Status.DATA_IS_NULL, "preTaskCodes");
return result;
}
Status status = deleteUpstreamRelation(loginUser.getId(), projectCode,
Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).distinct().toArray(Long[]::new), taskCode);
if (status != Status.SUCCESS) {
putMsg(result, status);
}
return result;
}
/**
* delete task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param postTaskCodes the post task codes, sep ','
* @param taskCode the pre task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteDownstreamRelation(User loginUser, long projectCode, String postTaskCodes, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isEmpty(postTaskCodes)) {
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
Map<Long, ProcessTaskRelationLog> taskRelationLogMap =
processTaskRelationList.stream()
.map(ProcessTaskRelationLog::new)
.collect(Collectors.toMap(ProcessTaskRelationLog::getPostTaskCode, processTaskRelationLog -> processTaskRelationLog));
Set<Long> postTaskCodesSet = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet());
int delete = 0;
int deleteLog = 0;
for (long postTaskCode : postTaskCodesSet) {
ProcessTaskRelationLog processTaskRelationLog = taskRelationLogMap.get(postTaskCode);
if (processTaskRelationLog != null) {
delete += processTaskRelationMapper.deleteRelation(processTaskRelationLog);
deleteLog += processTaskRelationLogMapper.deleteRelation(processTaskRelationLog);
}
}
if ((delete & deleteLog) == 0) {
throw new ServiceException(Status.DELETE_TASK_PROCESS_RELATION_ERROR);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* query task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode current task code (post task code)
* @return the upstream task definitions
*/
@Override
public Map<String, Object> queryUpstreamRelation(User loginUser, long projectCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinition taskDefinition = buildTaskDefinition();
taskDefinition.setProjectCode(processTaskRelation.getProjectCode());
taskDefinition.setCode(processTaskRelation.getPreTaskCode());
taskDefinition.setVersion(processTaskRelation.getPreTaskVersion());
return taskDefinition;
})
.collect(Collectors.toSet());
taskDefinitionLogList = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions);
}
result.put(Constants.DATA_LIST, taskDefinitionLogList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode pre task code
* @return the downstream task definitions
*/
@Override
public Map<String, Object> queryDownstreamRelation(User loginUser, long projectCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinition taskDefinition = buildTaskDefinition();
taskDefinition.setProjectCode(processTaskRelation.getProjectCode());
taskDefinition.setCode(processTaskRelation.getPostTaskCode());
taskDefinition.setVersion(processTaskRelation.getPostTaskVersion());
return taskDefinition;
})
.collect(Collectors.toSet());
taskDefinitionLogList = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions);
}
result.put(Constants.DATA_LIST, taskDefinitionLogList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete edge
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param preTaskCode pre task code
* @param postTaskCode post task code
* @return delete result code
*/
@Override
public Map<String, Object> deleteEdge(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode);
if (CollectionUtils.isEmpty(processTaskRelationList)) {
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
return result;
}
if (processTaskRelationList.size() > 1) {
putMsg(result, Status.DATA_IS_NOT_VALID, "processTaskRelationList");
return result;
}
ProcessTaskRelation processTaskRelation = processTaskRelationList.get(0);
int upstreamCount = processTaskRelationMapper.countByCode(projectCode, processTaskRelation.getProcessDefinitionCode(),
0L, processTaskRelation.getPostTaskCode());
if (upstreamCount == 0) {
putMsg(result, Status.DATA_IS_NULL, "upstreamCount");
return result;
}
if (upstreamCount > 1) {
int delete = processTaskRelationMapper.deleteById(processTaskRelation.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_EDGE_ERROR);
}
return result;
}
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPreTaskCode(0L);
int update = processTaskRelationMapper.updateById(processTaskRelation);
if (update == 0) {
putMsg(result, Status.DELETE_EDGE_ERROR);
}
return result;
}
/**
* build task definition
*
* @return task definition
*/
private TaskDefinition buildTaskDefinition() {
return new TaskDefinition() {
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TaskDefinition)) {
return false;
}
TaskDefinition that = (TaskDefinition) o;
return getCode() == that.getCode()
&& getVersion() == that.getVersion()
&& getProjectCode() == that.getProjectCode();
}
@Override
public int hashCode() {
return Objects.hash(getCode(), getVersion(), getProjectCode());
}
};
}
/**
* delete upstream relation
*
* @param projectCode project code
* @param preTaskCodes pre task codes
* @param taskCode pre task code
* @return status
*/
private Status deleteUpstreamRelation(int userId, long projectCode, Long[] preTaskCodes, long taskCode) {
List<ProcessTaskRelation> upstreamList = processTaskRelationMapper.queryUpstreamByCodes(projectCode, taskCode, preTaskCodes);
if (CollectionUtils.isEmpty(upstreamList)) {
return Status.SUCCESS;
}
List<ProcessTaskRelationLog> upstreamLogList = new ArrayList<>();
Date now = new Date();
for (ProcessTaskRelation processTaskRelation : upstreamList) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation);
processTaskRelationLog.setOperator(userId);
processTaskRelationLog.setOperateTime(now);
processTaskRelationLog.setUpdateTime(now);
upstreamLogList.add(processTaskRelationLog);
}
Map<Long, List<ProcessTaskRelationLog>> processTaskRelationListGroupByProcessDefinitionCode = upstreamLogList.stream()
.collect(Collectors.groupingBy(ProcessTaskRelationLog::getProcessDefinitionCode));
// count upstream relation group by process definition code
List<Map<String, Long>> countListGroupByProcessDefinitionCode = processTaskRelationMapper
.countUpstreamByCodeGroupByProcessDefinitionCode(projectCode, processTaskRelationListGroupByProcessDefinitionCode.keySet().toArray(new Long[0]), taskCode);
List<ProcessTaskRelationLog> deletes = new ArrayList<>();
List<ProcessTaskRelationLog> updates = new ArrayList<>();
for (Map<String, Long> codeCountMap : countListGroupByProcessDefinitionCode) {
long processDefinitionCode = codeCountMap.get("processDefinitionCode");
long countValue = codeCountMap.get("countValue");
List<ProcessTaskRelationLog> processTaskRelationLogList = processTaskRelationListGroupByProcessDefinitionCode.get(processDefinitionCode);
if (countValue <= processTaskRelationLogList.size()) {
ProcessTaskRelationLog processTaskRelationLog = processTaskRelationLogList.remove(0);
if (processTaskRelationLog.getPreTaskCode() != 0) {
processTaskRelationLog.setPreTaskCode(0);
processTaskRelationLog.setPreTaskVersion(0);
updates.add(processTaskRelationLog);
}
}
if (!processTaskRelationLogList.isEmpty()) {
deletes.addAll(processTaskRelationLogList);
}
}
deletes.addAll(updates);
int delete = 0;
int deleteLog = 0;
for (ProcessTaskRelationLog processTaskRelationLog : deletes) {
delete += processTaskRelationMapper.deleteRelation(processTaskRelationLog);
deleteLog += processTaskRelationLogMapper.deleteRelation(processTaskRelationLog);
}
if ((delete & deleteLog) == 0) {
throw new ServiceException(Status.DELETE_TASK_PROCESS_RELATION_ERROR);
} else {
if (!updates.isEmpty()) {
int insert = processTaskRelationMapper.batchInsert(updates);
int insertLog = processTaskRelationLogMapper.batchInsert(updates);
if ((insert & insertLog) == 0) {
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
}
}
return Status.SUCCESS;
}
}

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java

@ -575,7 +575,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (!HadoopUtils.getInstance().exists(resourcePath)) {
createTenantDirIfNotExists(tenantCode);
}
org.apache.dolphinscheduler.api.utils.FileUtils.copyFile(file, localFilename);
org.apache.dolphinscheduler.api.utils.FileUtils.copyInputStreamToFile(file, localFilename);
HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true);
} catch (Exception e) {
FileUtils.deleteFile(localFilename);

191
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java

@ -36,10 +36,12 @@ import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
@ -57,6 +59,9 @@ import java.util.List;
import java.util.Map;
import org.quartz.CronExpression;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -95,6 +100,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private Scheduler scheduler;
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
/**
* save schedule
*
@ -241,57 +252,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
return result;
}
/**
* scheduling on-line status forbid modification
*/
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
return result;
}
Date now = new Date();
// updateProcessInstance param
if (!StringUtils.isEmpty(scheduleExpression)) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
schedule.setStartTime(scheduleParam.getStartTime());
schedule.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
return result;
}
schedule.setCrontab(scheduleParam.getCrontab());
schedule.setTimezoneId(scheduleParam.getTimezoneId());
}
if (warningType != null) {
schedule.setWarningType(warningType);
}
schedule.setWarningGroupId(warningGroupId);
if (failureStrategy != null) {
schedule.setFailureStrategy(failureStrategy);
}
schedule.setWorkerGroup(workerGroup);
schedule.setEnvironmentCode(environmentCode);
schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.updateById(schedule);
/**
* updateProcessInstance recipients and cc by process definition ID
*/
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
updateSchedule(result, schedule, processDefinition, scheduleExpression, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, environmentCode);
return result;
}
@ -339,7 +300,11 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionCode());
return result;
}
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, scheduleObj.getProcessDefinitionCode());
if (processTaskRelations.isEmpty()) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
if (scheduleStatus == ReleaseState.ONLINE) {
// check process definition release state
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
@ -494,11 +459,16 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
String jobName = QuartzExecutors.buildJobName(scheduleId);
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId);
if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) {
logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId);
throw new ServiceException("set offline failure");
JobKey jobKey = new JobKey(jobName, jobGroupName);
try {
if (scheduler.checkExists(jobKey)) {
logger.info("Try to delete job: {}, group name: {},", jobName, jobGroupName);
scheduler.deleteJob(jobKey);
}
} catch (SchedulerException e) {
logger.error("Failed to delete job: {}", jobKey);
throw new ServiceException("Failed to delete job: " + jobKey);
}
}
/**
@ -596,4 +566,113 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process definition schedule
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param scheduleExpression scheduleExpression
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @return update result code
*/
@Override
public Map<String, Object> updateScheduleByProcessDefinitionCode(User loginUser,
long projectCode,
long processDefinitionCode,
String scheduleExpression,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority processInstancePriority,
String workerGroup,
long environmentCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check schedule exists
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode);
if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, processDefinitionCode);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
updateSchedule(result, schedule, processDefinition, scheduleExpression, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, environmentCode);
return result;
}
private void updateSchedule(Map<String, Object> result,
Schedule schedule,
ProcessDefinition processDefinition,
String scheduleExpression,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority processInstancePriority,
String workerGroup,
long environmentCode) {
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
return;
}
Date now = new Date();
// updateProcessInstance param
if (!StringUtils.isEmpty(scheduleExpression)) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class);
if (scheduleParam == null) {
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return;
}
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return;
}
schedule.setStartTime(scheduleParam.getStartTime());
schedule.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
return;
}
schedule.setCrontab(scheduleParam.getCrontab());
schedule.setTimezoneId(scheduleParam.getTimezoneId());
}
if (warningType != null) {
schedule.setWarningType(warningType);
}
schedule.setWarningGroupId(warningGroupId);
if (failureStrategy != null) {
schedule.setFailureStrategy(failureStrategy);
}
schedule.setWorkerGroup(workerGroup);
schedule.setEnvironmentCode(environmentCode);
schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.updateById(schedule);
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
}
}

127
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java

@ -25,24 +25,31 @@ import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.service.permission.PermissionCheck;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -67,6 +74,8 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
private static final Logger logger = LoggerFactory.getLogger(TaskDefinitionServiceImpl.class);
private static final String RELEASESTATE = "releaseState";
@Autowired
private ProjectMapper projectMapper;
@ -82,6 +91,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private ProcessTaskRelationLogMapper processTaskRelationLogMapper;
@Autowired
private ProcessService processService;
@ -161,7 +173,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
/**
* delete task definition
*
* Only offline and no downstream dependency can be deleted
* @param loginUser login user
* @param projectCode project code
* @param taskCode task code
@ -174,13 +186,22 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByTaskCode(taskCode);
if (taskCode == 0) {
putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition.getFlag() == Flag.YES) {
putMsg(result, Status.TASK_DEFINE_STATE_ONLINE, taskCode);
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryDownstreamByTaskCode(taskCode);
if (!processTaskRelationList.isEmpty()) {
Set<Long> processDefinitionCodes = processTaskRelationList
Set<Long> postTaskCodes = processTaskRelationList
.stream()
.map(ProcessTaskRelation::getProcessDefinitionCode)
.map(ProcessTaskRelation::getPostTaskCode)
.collect(Collectors.toSet());
putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, StringUtils.join(processDefinitionCodes, ","));
putMsg(result, Status.TASK_HAS_DOWNSTREAM, StringUtils.join(postTaskCodes, ","));
return result;
}
int delete = taskDefinitionMapper.deleteByCode(taskCode);
@ -239,7 +260,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
taskDefinitionToUpdate.setId(taskDefinition.getId());
taskDefinitionToUpdate.setProjectCode(projectCode);
taskDefinitionToUpdate.setUserId(taskDefinition.getUserId());
taskDefinitionToUpdate.setVersion(version + 1);
taskDefinitionToUpdate.setVersion(++version);
taskDefinitionToUpdate.setTaskType(taskDefinitionToUpdate.getTaskType().toUpperCase());
taskDefinitionToUpdate.setResourceIds(processService.getResourceIds(taskDefinitionToUpdate));
taskDefinitionToUpdate.setUpdateTime(now);
@ -252,6 +273,36 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByTaskCode(taskCode);
if (!processTaskRelationList.isEmpty()) {
List<ProcessTaskRelationLog> processTaskRelationLogList = new ArrayList<>();
int delete = 0;
int deleteLog = 0;
for (ProcessTaskRelation processTaskRelation : processTaskRelationList) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation);
delete += processTaskRelationMapper.deleteRelation(processTaskRelationLog);
deleteLog += processTaskRelationLogMapper.deleteRelation(processTaskRelationLog);
if (processTaskRelationLog.getPreTaskCode() == taskCode) {
processTaskRelationLog.setPreTaskVersion(version);
}
if (processTaskRelationLog.getPostTaskCode() == taskCode) {
processTaskRelationLog.setPostTaskVersion(version);
}
processTaskRelationLog.setOperator(loginUser.getId());
processTaskRelationLog.setOperateTime(now);
processTaskRelationLog.setUpdateTime(now);
processTaskRelationLogList.add(processTaskRelationLog);
}
if ((delete & deleteLog) == 0) {
throw new ServiceException(Status.DELETE_TASK_PROCESS_RELATION_ERROR);
} else {
int insertRelation = processTaskRelationMapper.batchInsert(processTaskRelationLogList);
int insertRelationLog = processTaskRelationLogMapper.batchInsert(processTaskRelationLogList);
if ((insertRelation & insertRelationLog) == 0) {
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
}
}
result.put(Constants.DATA_LIST, taskCode);
putMsg(result, Status.SUCCESS, update);
return result;
@ -336,6 +387,10 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
if (taskDefinition == null) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
} else {
if (taskDefinition.getVersion() == version) {
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
return result;
}
int delete = taskDefinitionLogMapper.deleteByCodeAndVersion(taskCode, version);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
@ -427,4 +482,62 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
result.put(Constants.DATA_LIST, taskCodes);
return result;
}
/**
* release task definition
*
* @param loginUser login user
* @param projectCode project code
* @param code task definition code
* @param releaseState releaseState
* @return update result code
*/
@Override
public Map<String, Object> releaseTaskDefinition(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) result.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return result;
}
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(code);
if (taskDefinition == null) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, code);
return result;
}
switch (releaseState) {
case OFFLINE:
taskDefinition.setFlag(Flag.NO);
taskDefinitionMapper.updateById(taskDefinition);
break;
case ONLINE:
String resourceIds = taskDefinition.getResourceIds();
if (StringUtils.isNotBlank(resourceIds)) {
Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,processService,resourceIdArray,loginUser.getId(),logger);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION);
return result;
}
}
taskDefinition.setFlag(Flag.YES);
taskDefinitionMapper.updateById(taskDefinition);
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
}

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java

@ -37,14 +37,13 @@ public class FileUtils {
private static final Logger logger = LoggerFactory.getLogger(FileUtils.class);
/**
* copy source file to target file
*
* @param file file
* @param destFilename destination file name
* copy source InputStream to target file
* @param file
* @param destFilename
*/
public static void copyFile(MultipartFile file, String destFilename) {
public static void copyInputStreamToFile(MultipartFile file, String destFilename) {
try {
org.apache.commons.io.FileUtils.copyFile(file.getResource().getFile(), new File(destFilename));
org.apache.commons.io.FileUtils.copyInputStreamToFile(file.getInputStream(), new File(destFilename));
} catch (IOException e) {
logger.error("failed to copy file , {} is empty file", file.getOriginalFilename(), e);
}

84
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationControllerTest.java

@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
/**
* process task relation controller test
*/
public class ProcessTaskRelationControllerTest extends AbstractControllerTest {
@MockBean
private ProcessTaskRelationService processTaskRelationService;
@Test
public void testQueryDownstreamRelation() throws Exception {
Map<String, Object> mockResult = new HashMap<>();
mockResult.put(Constants.STATUS, Status.SUCCESS);
PowerMockito.when(processTaskRelationService.queryDownstreamRelation(Mockito.any(), Mockito.anyLong(), Mockito.anyLong()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/downstream", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testQueryUpstreamRelation() throws Exception {
Map<String, Object> mockResult = new HashMap<>();
mockResult.put(Constants.STATUS, Status.SUCCESS);
PowerMockito.when(processTaskRelationService.queryUpstreamRelation(Mockito.any(), Mockito.anyLong(), Mockito.anyLong()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/upstream", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
}

25
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -377,32 +377,28 @@ public class ProcessDefinitionServiceTest {
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
List<Schedule> schedules = new ArrayList<>();
schedules.add(getSchedule());
schedules.add(getSchedule());
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedules);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule());
Mockito.when(scheduleMapper.deleteById(46)).thenReturn(1);
Mockito.when(processDefineMapper.deleteById(processDefinition.getId())).thenReturn(1);
Mockito.when(processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode())).thenReturn(1);
Map<String, Object> schedulerGreaterThanOneRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR, schedulerGreaterThanOneRes.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, schedulerGreaterThanOneRes.get(Constants.STATUS));
//scheduler online
schedules.clear();
Schedule schedule = getSchedule();
schedule.setReleaseState(ReleaseState.ONLINE);
schedules.add(schedule);
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedules);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedule);
Map<String, Object> schedulerOnlineRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.SCHEDULE_CRON_STATE_ONLINE, schedulerOnlineRes.get(Constants.STATUS));
//delete success
schedules.clear();
schedule.setReleaseState(ReleaseState.OFFLINE);
schedules.add(schedule);
Mockito.when(processDefineMapper.deleteById(46)).thenReturn(1);
Mockito.when(scheduleMapper.deleteById(schedule.getId())).thenReturn(1);
Mockito.when(processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode())).thenReturn(1);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedules);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule());
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> deleteSuccess = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
@ -430,6 +426,13 @@ public class ProcessDefinitionServiceTest {
// project check auth success, processs definition online
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(getProcessDefinition());
List<ProcessTaskRelation> processTaskRelationList = new ArrayList<>();
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(projectCode);
processTaskRelation.setProcessDefinitionCode(46L);
processTaskRelation.setPostTaskCode(123L);
processTaskRelationList.add(processTaskRelation);
Mockito.when(processService.findRelationByCode(projectCode, 46L)).thenReturn(processTaskRelationList);
Map<String, Object> onlineRes = processDefinitionService.releaseProcessDefinition(
loginUser, projectCode, 46, ReleaseState.ONLINE);
Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS));

577
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationServiceTest.java

@ -0,0 +1,577 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProcessTaskRelationServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.commons.collections.CollectionUtils;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import com.google.common.collect.Lists;
/**
* process task instance relation service test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class ProcessTaskRelationServiceTest {
@InjectMocks
ProcessTaskRelationServiceImpl processTaskRelationService;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectServiceImpl projectService;
@Mock
private ProcessTaskRelationMapper processTaskRelationMapper;
@Mock
private TaskDefinitionLogMapper taskDefinitionLogMapper;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
@Mock
private TaskDefinitionMapper taskDefinitionMapper;
@Mock
private ProcessTaskRelationLogMapper processTaskRelationLogMapper;
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserName("admin");
loginUser.setUserType(UserType.GENERAL_USER);
return loginUser;
}
/**
* get mock Project
*
* @param projectCode projectCode
* @return Project
*/
private Project getProject(long projectCode) {
Project project = new Project();
project.setCode(projectCode);
project.setId(1);
project.setName("project_test1");
project.setUserId(1);
return project;
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
private TaskDefinitionLog buildTaskDefinitionLog(long projectCode, long code, int version) {
TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog() {
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TaskDefinitionLog)) {
return false;
}
TaskDefinitionLog that = (TaskDefinitionLog) o;
return getCode() == that.getCode()
&& getVersion() == that.getVersion()
&& getProjectCode() == that.getProjectCode();
}
@Override
public int hashCode() {
return Objects.hash(getCode(), getVersion(), getProjectCode());
}
};
taskDefinitionLog.setProjectCode(projectCode);
taskDefinitionLog.setCode(code);
taskDefinitionLog.setVersion(version);
return taskDefinitionLog;
}
private TaskDefinition buildTaskDefinition(long projectCode, long code, int version) {
TaskDefinition taskDefinition = new TaskDefinition() {
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TaskDefinition)) {
return false;
}
TaskDefinition that = (TaskDefinition) o;
return getCode() == that.getCode()
&& getVersion() == that.getVersion()
&& getProjectCode() == that.getProjectCode();
}
@Override
public int hashCode() {
return Objects.hash(getCode(), getVersion(), getProjectCode());
}
};
taskDefinition.setProjectCode(projectCode);
taskDefinition.setCode(code);
taskDefinition.setVersion(version);
return taskDefinition;
}
private List<ProcessTaskRelation> getProcessTaskUpstreamRelationList(long projectCode, long taskCode) {
ProcessTaskRelation processTaskRelationUpstream0 = new ProcessTaskRelation();
processTaskRelationUpstream0.setPostTaskCode(taskCode);
processTaskRelationUpstream0.setPreTaskVersion(1);
processTaskRelationUpstream0.setProjectCode(projectCode);
processTaskRelationUpstream0.setPreTaskCode(123);
processTaskRelationUpstream0.setProcessDefinitionCode(123);
ProcessTaskRelation processTaskRelationUpstream1 = new ProcessTaskRelation();
processTaskRelationUpstream1.setPostTaskCode(taskCode);
processTaskRelationUpstream1.setPreTaskVersion(1);
processTaskRelationUpstream1.setPreTaskCode(123);
processTaskRelationUpstream1.setProcessDefinitionCode(124);
processTaskRelationUpstream1.setProjectCode(projectCode);
ProcessTaskRelation processTaskRelationUpstream2 = new ProcessTaskRelation();
processTaskRelationUpstream2.setPostTaskCode(taskCode);
processTaskRelationUpstream2.setPreTaskVersion(2);
processTaskRelationUpstream2.setPreTaskCode(123);
processTaskRelationUpstream2.setProcessDefinitionCode(125);
processTaskRelationUpstream2.setProjectCode(projectCode);
List<ProcessTaskRelation> processTaskRelationList = new ArrayList<>();
processTaskRelationList.add(processTaskRelationUpstream0);
processTaskRelationList.add(processTaskRelationUpstream1);
processTaskRelationList.add(processTaskRelationUpstream2);
return processTaskRelationList;
}
private List<ProcessTaskRelation> getProcessTaskDownstreamRelationList(long projectCode,long taskCode) {
ProcessTaskRelation processTaskRelationDownstream0 = new ProcessTaskRelation();
processTaskRelationDownstream0.setPreTaskCode(taskCode);
processTaskRelationDownstream0.setPostTaskCode(456);
processTaskRelationDownstream0.setPostTaskVersion(1);
processTaskRelationDownstream0.setProjectCode(projectCode);
ProcessTaskRelation processTaskRelationDownstream1 = new ProcessTaskRelation();
processTaskRelationDownstream1.setPreTaskCode(taskCode);
processTaskRelationDownstream1.setPostTaskCode(456);
processTaskRelationDownstream1.setPostTaskVersion(1);
processTaskRelationDownstream1.setProjectCode(projectCode);
ProcessTaskRelation processTaskRelationDownstream2 = new ProcessTaskRelation();
processTaskRelationDownstream2.setPreTaskCode(taskCode);
processTaskRelationDownstream2.setPostTaskCode(4567);
processTaskRelationDownstream2.setPostTaskVersion(1);
processTaskRelationDownstream2.setProjectCode(projectCode);
List<ProcessTaskRelation> processTaskRelationList = new ArrayList<>();
processTaskRelationList.add(processTaskRelationDownstream0);
processTaskRelationList.add(processTaskRelationDownstream1);
processTaskRelationList.add(processTaskRelationDownstream2);
return processTaskRelationList;
}
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(1);
processDefinition.setProjectCode(1L);
processDefinition.setName("test_pdf");
processDefinition.setTenantId(1);
processDefinition.setDescription("");
processDefinition.setCode(1L);
processDefinition.setVersion(1);
return processDefinition;
}
private TaskDefinition getTaskDefinition() {
TaskDefinition taskDefinition = new TaskDefinition();
taskDefinition.setProjectCode(1L);
taskDefinition.setCode(1L);
taskDefinition.setVersion(1);
taskDefinition.setTaskType(TaskType.SHELL.getDesc());
return taskDefinition;
}
@Test
public void testCreateProcessTaskRelation() {
long projectCode = 1L;
long processDefinitionCode = 1L;
long preTaskCode = 0L;
long postTaskCode = 1L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition());
Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)).thenReturn(Lists.newArrayList());
Mockito.when(taskDefinitionMapper.queryByCode(postTaskCode)).thenReturn(getTaskDefinition());
List<ProcessTaskRelationLog> processTaskRelationList = Lists.newArrayList();
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(projectCode);
processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode);
processTaskRelationLog.setPreTaskCode(0L);
processTaskRelationLog.setPreTaskVersion(0);
processTaskRelationLog.setPostTaskCode(postTaskCode);
processTaskRelationLog.setPostTaskVersion(1);
processTaskRelationList.add(processTaskRelationLog);
Mockito.when(processTaskRelationMapper.batchInsert(processTaskRelationList)).thenReturn(1);
Mockito.when(processTaskRelationLogMapper.batchInsert(processTaskRelationList)).thenReturn(1);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testMoveTaskProcessRelation() {
long projectCode = 1L;
long processDefinitionCode = 1L;
long taskCode = 1L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition());
Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, taskCode, 0L)).thenReturn(Lists.newArrayList());
Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(getTaskDefinition());
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList();
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(projectCode);
processTaskRelation.setProcessDefinitionCode(processDefinitionCode);
processTaskRelation.setPreTaskCode(0L);
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPostTaskCode(taskCode);
processTaskRelation.setPostTaskVersion(1);
processTaskRelationList.add(processTaskRelation);
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(projectCode);
processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode);
processTaskRelationLog.setPreTaskCode(0L);
processTaskRelationLog.setPreTaskVersion(0);
processTaskRelationLog.setPostTaskCode(taskCode);
processTaskRelationLog.setPostTaskVersion(1);
Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, 0L, taskCode)).thenReturn(processTaskRelationList);
Mockito.when(processTaskRelationLogMapper.queryRelationLogByRelation(processTaskRelation)).thenReturn(processTaskRelationLog);
Mockito.when(processTaskRelationMapper.updateById(processTaskRelation)).thenReturn(1);
Mockito.when(processTaskRelationLogMapper.updateById(processTaskRelationLog)).thenReturn(1);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testQueryDownstreamRelation() {
long projectCode = 1L;
long taskCode = 2L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
List<ProcessTaskRelation> processTaskRelationList = getProcessTaskDownstreamRelationList(projectCode,taskCode);
Mockito.when(processTaskRelationMapper.queryDownstreamByCode(projectCode,taskCode))
.thenReturn(processTaskRelationList);
if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinition taskDefinition = buildTaskDefinition(
processTaskRelation.getProjectCode(),
processTaskRelation.getPostTaskCode(),
processTaskRelation.getPostTaskVersion());
return taskDefinition;
})
.collect(Collectors.toSet());
Set<TaskDefinitionLog> taskDefinitionLogSet = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinitionLog taskDefinitionLog = buildTaskDefinitionLog(
processTaskRelation.getProjectCode(),
processTaskRelation.getPostTaskCode(),
processTaskRelation.getPostTaskVersion()
);
return taskDefinitionLog;
})
.collect(Collectors.toSet());
List<TaskDefinitionLog> taskDefinitionLogList = taskDefinitionLogSet.stream().collect(Collectors.toList());
Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions))
.thenReturn(taskDefinitionLogList);
}
Map<String, Object> relation = processTaskRelationService
.queryDownstreamRelation(loginUser, projectCode, taskCode);
Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS));
Assert.assertEquals(2, ((List) relation.get("data")).size());
}
@Test
public void testQueryUpstreamRelation() {
long projectCode = 1L;
long taskCode = 2L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
List<ProcessTaskRelation> processTaskRelationList = getProcessTaskUpstreamRelationList(projectCode,taskCode);
Mockito.when(processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode)).thenReturn(processTaskRelationList);
if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinition taskDefinition = buildTaskDefinition(
processTaskRelation.getProjectCode(),
processTaskRelation.getPreTaskCode(),
processTaskRelation.getPreTaskVersion());
return taskDefinition;
})
.collect(Collectors.toSet());
Set<TaskDefinitionLog> taskDefinitionLogSet = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinitionLog taskDefinitionLog = buildTaskDefinitionLog(
processTaskRelation.getProjectCode(),
processTaskRelation.getPreTaskCode(),
processTaskRelation.getPreTaskVersion());
return taskDefinitionLog;
})
.collect(Collectors.toSet());
List<TaskDefinitionLog> taskDefinitionLogList = taskDefinitionLogSet.stream().collect(Collectors.toList());
Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions))
.thenReturn(taskDefinitionLogList);
}
Map<String, Object> relation = processTaskRelationService
.queryUpstreamRelation(loginUser, projectCode, taskCode);
Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS));
Assert.assertEquals(2, ((List) relation.get("data")).size());
}
@Test
public void testDeleteDownstreamRelation() {
long projectCode = 1L;
long taskCode = 2L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
List<ProcessTaskRelation> processTaskRelationList = new ArrayList<>();
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(projectCode);
processTaskRelation.setProcessDefinitionCode(1L);
processTaskRelation.setPreTaskCode(taskCode);
processTaskRelation.setPostTaskCode(123L);
processTaskRelationList.add(processTaskRelation);
Mockito.when(processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode)).thenReturn(processTaskRelationList);
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation);
Mockito.when(processTaskRelationMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
Mockito.when(processTaskRelationLogMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
Map<String, Object> result1 = processTaskRelationService.deleteDownstreamRelation(loginUser, projectCode, "123", taskCode);
Assert.assertEquals(Status.SUCCESS, result1.get(Constants.STATUS));
}
@Test
public void testDeleteUpstreamRelation() {
long projectCode = 1L;
long taskCode = 2L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processTaskRelationMapper.queryUpstreamByCodes(projectCode, taskCode, new Long[]{123L})).thenReturn(Lists.newArrayList());
List<Map<String, Long>> countListGroupByProcessDefinitionCode = new ArrayList<>();
countListGroupByProcessDefinitionCode.add(new HashMap<String, Long>() {
{
put("processDefinitionCode", 123L);
put("countValue", 2L);
}
});
countListGroupByProcessDefinitionCode.add(new HashMap<String, Long>() {
{
put("processDefinitionCode", 124L);
put("countValue", 1L);
}
});
countListGroupByProcessDefinitionCode.add(new HashMap<String, Long>() {
{
put("processDefinitionCode", 125L);
put("countValue", 3L);
}
});
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(projectCode);
processTaskRelationLog.setPreTaskCode(0L);
processTaskRelationLog.setPreTaskVersion(0);
processTaskRelationLog.setPostTaskCode(taskCode);
processTaskRelationLog.setPostTaskVersion(2);
Mockito.when(processTaskRelationMapper.countUpstreamByCodeGroupByProcessDefinitionCode(projectCode, new Long[]{123L, 124L, 125L}, 2)).thenReturn(countListGroupByProcessDefinitionCode);
Mockito.when(processTaskRelationMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
Mockito.when(processTaskRelationLogMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
Map<String, Object> result1 = processTaskRelationService.deleteUpstreamRelation(loginUser, projectCode, "123", taskCode);
Assert.assertEquals(Status.SUCCESS, result1.get(Constants.STATUS));
}
@Test
public void testDeleteTaskProcessRelation() {
long projectCode = 1L;
long taskCode = 1L;
long processDefinitionCode = 1L;
long preTaskCode = 4L;
long postTaskCode = 5L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)).thenReturn(Lists.newArrayList());
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(projectCode);
processTaskRelationLog.setPreTaskCode(taskCode);
processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode);
Mockito.when(processTaskRelationMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition());
Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(getTaskDefinition());
TaskDefinition taskDefinition = new TaskDefinition();
taskDefinition.setTaskType(TaskType.CONDITIONS.getDesc());
Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(taskDefinition);
Mockito.when(taskDefinitionMapper.deleteByCode(taskCode)).thenReturn(1);
processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setProjectCode(projectCode);
processTaskRelationLog.setPostTaskCode(taskCode);
processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode);
processTaskRelationLog.setProcessDefinitionVersion(1);
Mockito.when(processTaskRelationMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
Mockito.when(processTaskRelationLogMapper.deleteRelation(processTaskRelationLog)).thenReturn(1);
result = processTaskRelationService.deleteTaskProcessRelation(loginUser, projectCode, processDefinitionCode, taskCode);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testDeleteEdge() {
long projectCode = 1L;
long processDefinitionCode = 3L;
long preTaskCode = 4L;
long postTaskCode = 5L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(projectCode);
processTaskRelation.setProcessDefinitionCode(processDefinitionCode);
processTaskRelation.setPreTaskCode(preTaskCode);
processTaskRelation.setPostTaskCode(postTaskCode);
List<ProcessTaskRelation> processTaskRelationList = new ArrayList<>();
processTaskRelationList.add(processTaskRelation);
Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)).thenReturn(processTaskRelationList);
Mockito.when(processTaskRelationMapper.countByCode(projectCode, processDefinitionCode, 0L, postTaskCode)).thenReturn(1);
Mockito.when(processTaskRelationMapper.deleteById(processTaskRelation.getId())).thenReturn(1);
Mockito.when(processTaskRelationMapper.updateById(processTaskRelation)).thenReturn(1);
result = processTaskRelationService.deleteEdge(loginUser, projectCode, processDefinitionCode, preTaskCode, postTaskCode);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
}

53
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java

@ -21,6 +21,8 @@ import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.TaskDefinitionServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.task.shell.ShellParameters;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
@ -71,7 +73,6 @@ public class TaskDefinitionServiceImplTest {
@Mock
private ProcessTaskRelationMapper processTaskRelationMapper;
;
@Test
public void createTaskDefinition() {
@ -163,6 +164,7 @@ public class TaskDefinitionServiceImplTest {
@Test
public void deleteTaskDefinitionByCode() {
long projectCode = 1L;
long taskCode = 1L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
@ -174,13 +176,14 @@ public class TaskDefinitionServiceImplTest {
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processTaskRelationMapper.queryByTaskCode(Mockito.anyLong()))
Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(getTaskDefinition());
Mockito.when(processTaskRelationMapper.queryDownstreamByTaskCode(taskCode))
.thenReturn(new ArrayList<>());
Mockito.when(taskDefinitionMapper.deleteByCode(Mockito.anyLong()))
Mockito.when(taskDefinitionMapper.deleteByCode(taskCode))
.thenReturn(1);
Map<String, Object> relation = taskDefinitionService
.deleteTaskDefinitionByCode(loginUser, projectCode, Mockito.anyLong());
.deleteTaskDefinitionByCode(loginUser, projectCode, taskCode);
Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS));
}
@ -238,6 +241,15 @@ public class TaskDefinitionServiceImplTest {
return project;
}
private TaskDefinition getTaskDefinition() {
TaskDefinition taskDefinition = new TaskDefinition();
taskDefinition.setProjectCode(1L);
taskDefinition.setCode(1L);
taskDefinition.setVersion(1);
taskDefinition.setTaskType(TaskType.SHELL.getDesc());
return taskDefinition;
}
@Test
public void checkJson() {
String taskDefinitionJson = "[{\"name\":\"detail_up\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":"
@ -273,4 +285,37 @@ public class TaskDefinitionServiceImplTest {
Map<String, Object> genTaskCodeList = taskDefinitionService.genTaskCodeList(10);
Assert.assertEquals(Status.SUCCESS, genTaskCodeList.get(Constants.STATUS));
}
@Test
public void testReleaseTaskDefinition() {
long projectCode = 1L;
long taskCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
// check task dose not exist
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.OFFLINE);
Assert.assertEquals(Status.TASK_DEFINE_NOT_EXIST, map.get(Constants.STATUS));
// process definition offline
putMsg(result, Status.SUCCESS);
Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(new TaskDefinition());
Map<String, Object> offlineTaskResult = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.OFFLINE);
Assert.assertEquals(Status.SUCCESS, offlineTaskResult.get(Constants.STATUS));
// process definition online, resource exist
Map<String, Object> onlineTaskResult = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.ONLINE);
Assert.assertEquals(Status.SUCCESS, onlineTaskResult.get(Constants.STATUS));
// release error code
Map<String, Object> failResult = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.getEnum(2));
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failResult.get(Constants.STATUS));
}
}

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FileUtilsTest.java

@ -76,7 +76,7 @@ public class FileUtilsTest {
Mockito.when(file.getResource().getFile()).thenReturn(new File(src));
//Invoke copyFile
FileUtils.copyFile(file,destFilename);
FileUtils.copyInputStreamToFile(file,destFilename);
//Test file exists
File destFile = new File(destFilename);

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -402,7 +402,6 @@ public final class Constants {
/**
* datasource configuration path
*/
public static final String DATASOURCE_PROPERTIES = "/datasource.properties";
public static final String COMMON_TASK_TYPE = "common";
@ -454,11 +453,6 @@ public final class Constants {
*/
public static final String DEFAULT_CRON_STRING = "0 0 0 * * ? *";
/**
* data source config
*/
public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name";
public static final String SPRING_DATASOURCE_URL = "spring.datasource.url";

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java

@ -31,6 +31,7 @@ import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import com.baomidou.mybatisplus.annotation.FieldStrategy;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
@ -157,6 +158,7 @@ public class TaskDefinition {
/**
* timeout notify strategy
*/
@TableField(updateStrategy = FieldStrategy.IGNORED)
private TaskTimeoutStrategy timeoutNotifyStrategy;
/**

10
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.java

@ -24,6 +24,8 @@ import org.apache.ibatis.annotations.Param;
import java.util.List;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
public interface AlertPluginInstanceMapper extends BaseMapper<AlertPluginInstance> {
@ -42,7 +44,13 @@ public interface AlertPluginInstanceMapper extends BaseMapper<AlertPluginInstanc
*/
List<AlertPluginInstance> queryByIds(@Param("ids") List<Integer> ids);
List<AlertPluginInstance> queryByInstanceName(@Param("instanceName")String instanceName);
/**
* Query alert plugin instance by given name
* @param page page
* @param instanceName Alert plugin name
* @return alertPluginInstance Ipage
*/
IPage<AlertPluginInstance> queryByInstanceNamePage(Page page, @Param("instanceName") String instanceName);
/**
*

17
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.ibatis.annotations.Param;
@ -57,4 +58,20 @@ public interface ProcessTaskRelationLogMapper extends BaseMapper<ProcessTaskRela
*/
int deleteByCode(@Param("processCode") long processCode,
@Param("processVersion") int processVersion);
/**
* delete process task relation
*
* @param processTaskRelationLog processTaskRelationLog
* @return int
*/
int deleteRelation(@Param("processTaskRelationLog") ProcessTaskRelationLog processTaskRelationLog);
/**
* query process task relation log
*
* @param processTaskRelation processTaskRelation
* @return process task relation log
*/
ProcessTaskRelationLog queryRelationLogByRelation(@Param("processTaskRelation") ProcessTaskRelation processTaskRelation);
}

85
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.java

@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
@ -74,4 +75,88 @@ public interface ProcessTaskRelationMapper extends BaseMapper<ProcessTaskRelatio
* @return int
*/
int batchInsert(@Param("taskRelationList") List<ProcessTaskRelationLog> taskRelationList);
/**
* query downstream process task relation by taskCode
*
* @param taskCode taskCode
* @return ProcessTaskRelation
*/
List<ProcessTaskRelation> queryDownstreamByTaskCode(@Param("taskCode") long taskCode);
/**
* query upstream process task relation by taskCode
*
* @param projectCode projectCode
* @param taskCode taskCode
* @return ProcessTaskRelation
*/
List<ProcessTaskRelation> queryUpstreamByCode(@Param("projectCode") long projectCode, @Param("taskCode") long taskCode);
/**
* query downstream process task relation by taskCode
*
* @param projectCode projectCode
* @param taskCode taskCode
* @return ProcessTaskRelation
*/
List<ProcessTaskRelation> queryDownstreamByCode(@Param("projectCode") long projectCode, @Param("taskCode") long taskCode);
/**
* query task relation by codes
*
* @param projectCode projectCode
* @param taskCode taskCode
* @param preTaskCodes preTaskCode list
* @return ProcessTaskRelation
*/
List<ProcessTaskRelation> queryUpstreamByCodes(@Param("projectCode") long projectCode, @Param("taskCode") long taskCode,@Param("preTaskCodes") Long[] preTaskCodes);
/**
* count upstream by codes
*
* @param projectCode projectCode
* @param taskCode taskCode
* @param processDefinitionCodes processDefinitionCodes
* @return upstream count list group by process definition code
*/
List<Map<String, Long>> countUpstreamByCodeGroupByProcessDefinitionCode(@Param("projectCode") long projectCode,
@Param("processDefinitionCodes") Long[] processDefinitionCodes,
@Param("taskCode") long taskCode);
/**
* query by code
*
* @param projectCode projectCode
* @param processDefinitionCode processDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return ProcessTaskRelation
*/
List<ProcessTaskRelation> queryByCode(@Param("projectCode") long projectCode,
@Param("processDefinitionCode") long processDefinitionCode,
@Param("preTaskCode") long preTaskCode,
@Param("postTaskCode") long postTaskCode);
/**
* delete process task relation
*
* @param processTaskRelationLog processTaskRelationLog
* @return int
*/
int deleteRelation(@Param("processTaskRelationLog") ProcessTaskRelationLog processTaskRelationLog);
/**
* count by code
*
* @param projectCode projectCode
* @param processDefinitionCode processDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return ProcessTaskRelation
*/
int countByCode(@Param("projectCode") long projectCode,
@Param("processDefinitionCode") long processDefinitionCode,
@Param("preTaskCode") long preTaskCode,
@Param("postTaskCode") long postTaskCode);
}

4
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java

@ -58,9 +58,9 @@ public interface ScheduleMapper extends BaseMapper<Schedule> {
/**
* query schedule list by process definition code
* @param processDefinitionCode processDefinitionCode
* @return schedule list
* @return schedule
*/
List<Schedule> queryByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode);
Schedule queryByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode);
/**
* query schedule list by process definition code

69
dolphinscheduler-dao/src/main/resources/datasource.properties

@ -1,69 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# datasource configuration
spring.datasource.driver-class-name=org.postgresql.Driver
spring.datasource.url=jdbc:postgresql://127.0.0.1:5432/dolphinscheduler
spring.datasource.username=root
spring.datasource.password=root
# mysql example
#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
#spring.datasource.url=jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8
#spring.datasource.username=ds_user
#spring.datasource.password=dolphinscheduler
# connection configuration
#spring.datasource.initialSize=5
# min connection number
#spring.datasource.minIdle=5
# max connection number
#spring.datasource.maxActive=50
# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases.
# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true.
#spring.datasource.maxWait=60000
# milliseconds for check to close free connections
#spring.datasource.timeBetweenEvictionRunsMillis=60000
# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis.
#spring.datasource.timeBetweenConnectErrorMillis=60000
# the longest time a connection remains idle without being evicted, in milliseconds
#spring.datasource.minEvictableIdleTimeMillis=300000
#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work.
#spring.datasource.validationQuery=SELECT 1
#check whether the connection is valid for timeout, in seconds
#spring.datasource.validationQueryTimeout=3
# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis,
# validation Query is performed to check whether the connection is valid
#spring.datasource.testWhileIdle=true
#execute validation to check if the connection is valid when applying for a connection
#spring.datasource.testOnBorrow=true
#execute validation to check if the connection is valid when the connection is returned
#spring.datasource.testOnReturn=false
#spring.datasource.defaultAutoCommit=true
#spring.datasource.keepAlive=true
# open PSCache, specify count PSCache for every connection
#spring.datasource.poolPreparedStatements=true
#spring.datasource.maxPoolPreparedStatementPerConnectionSize=20

7
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.xml

@ -36,11 +36,14 @@
</foreach>
</select>
<select id="queryByInstanceName" resultType="org.apache.dolphinscheduler.dao.entity.AlertPluginInstance">
<select id="queryByInstanceNamePage" resultType="org.apache.dolphinscheduler.dao.entity.AlertPluginInstance">
select
*
from t_ds_alert_plugin_instance
where instance_name = #{instanceName}
where 1 = 1
<if test="instanceName != null and instanceName != ''">
and instance_name like concat('%', #{instanceName}, '%')
</if>
</select>
<select id="existInstanceName" resultType="java.lang.Boolean">

24
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.xml

@ -46,4 +46,28 @@
WHERE process_definition_code = #{processCode}
and process_definition_version = #{processVersion}
</delete>
<delete id="deleteRelation">
delete from t_ds_process_task_relation_log
WHERE project_code = #{processTaskRelationLog.projectCode}
and process_definition_code = #{processTaskRelationLog.processDefinitionCode}
and process_definition_version = #{processTaskRelationLog.processDefinitionVersion}
<if test="processTaskRelationLog.preTaskCode !=0 and processTaskRelationLog.preTaskVersion != 0">
and pre_task_code = #{processTaskRelationLog.preTaskCode}
and pre_task_version = #{processTaskRelationLog.preTaskVersion}
</if>
and post_task_code = #{processTaskRelationLog.postTaskCode}
and post_task_version = #{processTaskRelationLog.postTaskVersion}
</delete>
<select id="queryRelationLogByRelation" resultType="org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog">
select
<include refid="baseSql"/>
from t_ds_process_task_relation_log
WHERE project_code = #{processTaskRelation.projectCode}
and process_definition_code = #{processTaskRelation.processDefinitionCode}
and process_definition_version = #{processTaskRelation.processDefinitionVersion}
and pre_task_code = #{processTaskRelation.preTaskCode}
and pre_task_version = #{processTaskRelation.preTaskVersion}
and post_task_code = #{processTaskRelation.postTaskCode}
and post_task_version = #{processTaskRelation.postTaskVersion}
</select>
</mapper>

101
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.xml

@ -69,4 +69,105 @@
#{relation.conditionType},#{relation.conditionParams},#{relation.createTime},#{relation.updateTime})
</foreach>
</insert>
<select id="queryDownstreamByTaskCode" resultType="org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation">
select
<include refid="baseSql"/>
from t_ds_process_task_relation
WHERE pre_task_code = #{taskCode}
</select>
<select id="queryDownstreamByCode" resultType="org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation">
select
<include refid="baseSql"/>
from t_ds_process_task_relation
WHERE project_code = #{projectCode}
and pre_task_code = #{taskCode}
</select>
<select id="queryUpstreamByCode" resultType="org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation">
select
<include refid="baseSql"/>
from t_ds_process_task_relation
WHERE project_code = #{projectCode}
and post_task_code = #{taskCode}
</select>
<select id="queryUpstreamByCodes" resultType="org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation">
select
<include refid="baseSql"/>
from t_ds_process_task_relation
WHERE project_code = #{projectCode}
and post_task_code = #{taskCode}
<if test="preTaskCodes != null and preTaskCodes.length != 0">
and pre_task_code in
<foreach collection="preTaskCodes" index="index" item="i" open="(" separator="," close=")">
#{i}
</foreach>
</if>
</select>
<select id="countUpstreamByCodeGroupByProcessDefinitionCode" resultType="java.util.HashMap">
select process_definition_code as processDefinitionCode,
count(0) as countValue
from t_ds_process_task_relation
WHERE project_code = #{projectCode}
and post_task_code = #{taskCode}
<if test="processDefinitionCodes != null and processDefinitionCodes.length != 0">
and process_definition_code in
<foreach collection="processDefinitionCodes" index="index" item="i" open="(" separator="," close=")">
#{i}
</foreach>
</if>
group by process_definition_code
</select>
<select id="queryByCode" resultType="org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation">
select
<include refid="baseSql"/>
from t_ds_process_task_relation
WHERE 1=1
<if test="projectCode != 0">
and project_code = #{projectCode}
</if>
<if test="processDefinitionCode != 0">
and process_definition_code = #{processDefinitionCode}
</if>
<if test="preTaskCode != 0">
and pre_task_code = #{preTaskCode}
</if>
<if test="postTaskCode != 0">
and post_task_code = #{postTaskCode}
</if>
</select>
<delete id="deleteRelation">
delete from t_ds_process_task_relation
WHERE project_code = #{processTaskRelationLog.projectCode}
and process_definition_code = #{processTaskRelationLog.processDefinitionCode}
and process_definition_version = #{processTaskRelationLog.processDefinitionVersion}
<if test="processTaskRelationLog.preTaskCode !=0 and processTaskRelationLog.preTaskVersion != 0">
and pre_task_code = #{processTaskRelationLog.preTaskCode}
and pre_task_version = #{processTaskRelationLog.preTaskVersion}
</if>
and post_task_code = #{processTaskRelationLog.postTaskCode}
and post_task_version = #{processTaskRelationLog.postTaskVersion}
</delete>
<select id="countByCode" resultType="int">
select count(0)
from t_ds_process_task_relation
WHERE 1=1
<if test="projectCode != 0">
and project_code = #{projectCode}
</if>
<if test="processDefinitionCode != 0">
and process_definition_code = #{processDefinitionCode}
</if>
<if test="preTaskCode != 0">
and pre_task_code = #{preTaskCode}
</if>
<if test="postTaskCode != 0">
and post_task_code = #{postTaskCode}
</if>
</select>
</mapper>

2
dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl.sql

@ -289,7 +289,7 @@ CREATE TABLE `t_ds_task_definition_log` (
`project_code` bigint(20) NOT NULL COMMENT 'project code',
`user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id',
`task_type` varchar(50) NOT NULL COMMENT 'task type',
`task_params` text COMMENT 'job custom parameters',
`task_params` longtext COMMENT 'job custom parameters',
`flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available',
`task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority',
`worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping',

6
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java

@ -209,12 +209,12 @@ public class ScheduleMapperTest {
* test query by process definition id
*/
@Test
public void queryByProcessDefinitionId() {
public void queryByProcessDefinitionCode() {
Schedule schedule = insertOne();
schedule.setProcessDefinitionCode(12345);
scheduleMapper.updateById(schedule);
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionCode(schedule.getProcessDefinitionCode());
Assert.assertNotEquals(schedules.size(), 0);
Schedule schedules = scheduleMapper.queryByProcessDefinitionCode(schedule.getProcessDefinitionCode());
Assert.assertNotNull(schedules);
}
}

6
dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java

@ -254,16 +254,16 @@ public class PythonGatewayServer extends SpringBootServletInitializer {
long processDefinitionCode,
String schedule,
String workerGroup) {
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode);
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode);
// create or update schedule
int scheduleId;
if (schedules.isEmpty()) {
if (scheduleObj == null) {
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.ONLINE);
Map<String, Object> result = schedulerService.insertSchedule(user, projectCode, processDefinitionCode, schedule, DEFAULT_WARNING_TYPE,
DEFAULT_WARNING_GROUP_ID, DEFAULT_FAILURE_STRATEGY, DEFAULT_PRIORITY, workerGroup, DEFAULT_ENVIRONMENT_CODE);
scheduleId = (int) result.get("scheduleId");
} else {
scheduleId = schedules.get(0).getId();
scheduleId = scheduleObj.getId();
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.OFFLINE);
schedulerService.updateSchedule(user, projectCode, scheduleId, schedule, DEFAULT_WARNING_TYPE,
DEFAULT_WARNING_GROUP_ID, DEFAULT_FAILURE_STRATEGY, DEFAULT_PRIORITY, workerGroup, DEFAULT_ENVIRONMENT_CODE);

9
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java

@ -20,6 +20,7 @@ package org.apache.dolphinscheduler.server.master;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.IStoppable;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
@ -43,12 +44,15 @@ import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME;
/**
* master server
*/
@ -101,6 +105,9 @@ public class MasterServer implements IStoppable {
@Autowired
private EventExecuteService eventExecuteService;
@Value("${spring.datasource.driver-class-name}")
private String driverClassName;
private ConcurrentHashMap<Integer, WorkflowExecuteThread> processInstanceExecMaps = new ConcurrentHashMap<>();
/**
@ -118,6 +125,8 @@ public class MasterServer implements IStoppable {
*/
@PostConstruct
public void run() {
PropertyUtils.setValue(SPRING_DATASOURCE_DRIVER_CLASS_NAME, driverClassName);
// init remoting server
NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(masterConfig.getListenPort());

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java

@ -35,6 +35,7 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
@ -108,6 +109,7 @@ public class WorkerServer implements IStoppable {
public static void main(String[] args) {
Thread.currentThread().setName(Constants.THREAD_NAME_WORKER_SERVER);
new SpringApplicationBuilder(WorkerServer.class)
.web(WebApplicationType.NONE)
.profiles("worker")
.run(args);
}

17
dolphinscheduler-server/src/main/resources/config/install_config.conf

@ -76,19 +76,16 @@ apiServerPort="12345"
# ---------------------------------------------------------
# The type for the metadata database
# Supported values: ``postgresql``, ``mysql``.
dbtype="mysql"
DATABASE_TYPE="mysql"
# The <HOST>:<PORT> connection pair DolphinScheduler connect to the metadata database
dbhost="192.168.xx.xx:3306"
# Spring datasource url, following <HOST>:<PORT>/<database>?<parameter> format
SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8"
# The username DolphinScheduler connect to the metadata database
username="xx"
# Spring datasource username
SPRING_DATASOURCE_USERNAME="ds_user"
# The password DolphinScheduler connect to the metadata database
password="xx"
# The database DolphinScheduler connect to the metadata database
dbname="dolphinscheduler"
# Spring datasource password
SPRING_DATASOURCE_PASSWORD="dolphinscheduler"
# ---------------------------------------------------------
# Registry Server

2
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactoryTest.java

@ -20,8 +20,10 @@ package org.apache.dolphinscheduler.server.master.runner.task;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public class TaskProcessorFactoryTest {
@Test

3
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java

@ -2273,6 +2273,9 @@ public class ProcessService {
*/
public int saveTaskRelation(User operator, long projectCode, long processDefinitionCode, int processDefinitionVersion,
List<ProcessTaskRelationLog> taskRelationList, List<TaskDefinitionLog> taskDefinitionLogs) {
if (taskRelationList.isEmpty()) {
return Constants.EXIT_CODE_SUCCESS;
}
Map<Long, TaskDefinitionLog> taskDefinitionLogMap = null;
if (CollectionUtils.isNotEmpty(taskDefinitionLogs)) {
taskDefinitionLogMap = taskDefinitionLogs.stream()

72
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java

@ -17,7 +17,39 @@
package org.apache.dolphinscheduler.service.quartz;
import static org.apache.dolphinscheduler.common.Constants.DATASOURCE_PROPERTIES;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.service.exceptions.ServiceException;
import org.quartz.CronTrigger;
import org.quartz.Job;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.TriggerKey;
import org.quartz.impl.StdSchedulerFactory;
import org.quartz.impl.jdbcjobstore.JobStoreTX;
import org.quartz.impl.jdbcjobstore.PostgreSQLDelegate;
import org.quartz.impl.jdbcjobstore.StdJDBCDelegate;
import org.quartz.impl.matchers.GroupMatcher;
import org.quartz.simpl.SimpleThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.dolphinscheduler.common.Constants.ORG_POSTGRESQL_DRIVER;
import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS;
import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK;
@ -55,47 +87,10 @@ import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRI
import static org.apache.dolphinscheduler.common.Constants.STRING_FALSE;
import static org.apache.dolphinscheduler.common.Constants.STRING_TRUE;
import static org.apache.dolphinscheduler.common.Constants.UNDERLINE;
import static org.quartz.CronScheduleBuilder.cronSchedule;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.TriggerBuilder.newTrigger;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.service.exceptions.ServiceException;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.quartz.CronTrigger;
import org.quartz.Job;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.TriggerKey;
import org.quartz.impl.StdSchedulerFactory;
import org.quartz.impl.jdbcjobstore.JobStoreTX;
import org.quartz.impl.jdbcjobstore.PostgreSQLDelegate;
import org.quartz.impl.jdbcjobstore.StdJDBCDelegate;
import org.quartz.impl.matchers.GroupMatcher;
import org.quartz.simpl.SimpleThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* single Quartz executors instance
*/
@ -150,7 +145,6 @@ public class QuartzExecutors {
*/
private void init() {
try {
PropertyUtils.loadPropertyFile(DATASOURCE_PROPERTIES);
StdSchedulerFactory schedulerFactory = new StdSchedulerFactory();
Properties properties = new Properties();

29
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.scss

@ -34,14 +34,35 @@
.minimap {
position: absolute;
width: 300px;
height: 200px;
right: 10px;
bottom: 10px;
right: 0px;
bottom: 0px;
border: dashed 1px #e4e4e4;
z-index: 9;
}
.scale-slider{
position: absolute;
height: 140px;
width: 70px;
right: 0px;
bottom: 140px;
z-index: 9;
display: flex;
justify-content: center;
::v-deep .el-slider__runway{
background-color: #fff;
}
.scale-title{
position: absolute;
top: -30px;
left: 22px;
font-size: 12px;
color: #666;
}
}
.context-menu{
position: absolute;
left: 100px;

325
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue

@ -27,8 +27,21 @@
>
<div ref="paper" class="paper"></div>
<div ref="minimap" class="minimap"></div>
<div class="scale-slider">
<span class="scale-title">{{$t('dagScale')}}</span>
<el-slider
v-model="scale"
vertical
:max="2"
:min="0.2"
:step="0.2"
:marks="SCALE_MARKS"
@input='scaleChange'
/>
</div>
<context-menu ref="contextMenu" />
</div>
<layout-config-modal ref="layoutModal" @submit="format" />
</div>
</template>
@ -37,23 +50,25 @@
import { Graph, DataUri } from '@antv/x6'
import dagTaskbar from './taskbar.vue'
import contextMenu from './contextMenu.vue'
import layoutConfigModal, { LAYOUT_TYPE, DEFAULT_LAYOUT_CONFIG } from './layoutConfigModal.vue'
import {
NODE_PROPS,
EDGE_PROPS,
PORT_PROPS,
NODE,
EDGE,
X6_NODE_NAME,
X6_PORT_OUT_NAME,
X6_PORT_IN_NAME,
X6_EDGE_NAME,
NODE_HIGHLIGHT_PROPS,
PORT_HIGHLIGHT_PROPS,
EDGE_HIGHLIGHT_PROPS,
NODE_STATUS_MARKUP
} from './x6-helper'
import { DagreLayout } from '@antv/layout'
import { DagreLayout, GridLayout } from '@antv/layout'
import { tasksType, tasksState } from '../config'
import { mapActions, mapMutations, mapState } from 'vuex'
import nodeStatus from './nodeStatus'
import x6StyleMixin from './x6-style-mixin'
const SCALE_MARKS = {
0.2: '0.2',
1: '1',
2: '2'
}
export default {
name: 'dag-canvas',
@ -71,7 +86,10 @@
x: 0,
y: 0,
type: ''
}
},
// The canvas scale
scale: 1,
SCALE_MARKS
}
},
provide () {
@ -79,10 +97,12 @@
dagCanvas: this
}
},
mixins: [x6StyleMixin],
inject: ['dagChart'],
components: {
dagTaskbar,
contextMenu
contextMenu,
layoutConfigModal
},
computed: {
...mapState('dag', ['tasks'])
@ -118,6 +138,14 @@
movable: true,
showNodeSelectionBox: false
},
scaling: {
min: 0.2,
max: 2
},
mousewheel: {
enabled: true,
modifiers: ['ctrl', 'meta']
},
scroller: true,
grid: {
size: 10,
@ -126,7 +154,10 @@
snapline: true,
minimap: {
enabled: true,
container: minimap
container: minimap,
scalable: false,
width: 200,
height: 120
},
interacting: {
edgeLabelMovable: false,
@ -134,9 +165,6 @@
magnetConnectable: !!editable
},
connecting: {
snap: {
radius: 30
},
// Whether multiple edges can be created between the same start node and end
allowMulti: false,
// Whether a point is allowed to connect to a blank position on the canvas
@ -148,32 +176,14 @@
// Whether edges are allowed to link to nodes
allowNode: true,
// Whether to allow edge links to ports
allowPort: true,
allowPort: false,
// Whether all available ports or nodes are highlighted when you drag the edge
highlight: true,
createEdge () {
return graph.createEdge({ shape: X6_EDGE_NAME })
},
validateMagnet ({ magnet }) {
return magnet.getAttribute('port-group') !== X6_PORT_IN_NAME
},
validateConnection (data) {
const { sourceCell, targetCell, sourceMagnet, targetMagnet } = data
// Connections can only be created from the output link post
if (
!sourceMagnet ||
sourceMagnet.getAttribute('port-group') !== X6_PORT_OUT_NAME
) {
return false
}
// Can only be connected to the input link post
if (
!targetMagnet ||
targetMagnet.getAttribute('port-group') !== X6_PORT_IN_NAME
) {
return false
}
const { sourceCell, targetCell } = data
if (
sourceCell &&
@ -214,6 +224,7 @@
}
}
}))
this.registerX6Shape()
this.bindGraphEvent()
this.originalScrollPosition = graph.getScrollbarPosition()
@ -224,37 +235,17 @@
registerX6Shape () {
Graph.unregisterNode(X6_NODE_NAME)
Graph.unregisterEdge(X6_EDGE_NAME)
Graph.registerNode(X6_NODE_NAME, { ...NODE_PROPS })
Graph.registerEdge(X6_EDGE_NAME, { ...EDGE_PROPS })
Graph.registerNode(X6_NODE_NAME, { ...NODE })
Graph.registerEdge(X6_EDGE_NAME, { ...EDGE })
},
/**
* Bind grap event
*/
bindGraphEvent () {
// nodes and edges hover
this.graph.on('cell:mouseenter', (data) => {
const { cell, e } = data
const isStatusIcon = (tagName) =>
tagName &&
(tagName.toLocaleLowerCase() === 'em' ||
tagName.toLocaleLowerCase() === 'body')
if (!isStatusIcon(e.target.tagName)) {
this.setHighlight(cell)
}
})
this.graph.on('cell:mouseleave', ({ cell }) => {
if (!this.graph.isSelected(cell)) {
this.resetHighlight(cell)
}
})
// select
this.graph.on('cell:selected', ({ cell }) => {
this.setHighlight(cell)
})
this.graph.on('cell:unselected', ({ cell }) => {
if (!this.graph.isSelected(cell)) {
this.resetHighlight(cell)
}
this.bindStyleEvent(this.graph)
// update scale bar
this.graph.on('scale', ({ sx }) => {
this.scale = sx
})
// right click
this.graph.on('node:contextmenu', ({ x, y, cell }) => {
@ -279,6 +270,13 @@
label: labelName
})
})
// Make sure the edge starts with node, not port
this.graph.on('edge:connected', ({ isNew, edge }) => {
if (isNew) {
const sourceNode = edge.getSourceNode()
edge.setSource(sourceNode)
}
})
},
/**
* @param {Edge|string} edge
@ -297,9 +295,6 @@
setEdgeLabel (id, label) {
const edge = this.graph.getCellById(id)
edge.setLabels(label)
if (this.graph.isSelected(edge)) {
this.setEdgeHighlight(edge)
}
},
/**
* @param {number} limit
@ -348,94 +343,6 @@
node.setData({ taskName: name })
}
},
/**
* Set node highlight
* @param {Node} node
*/
setNodeHighlight (node) {
const url = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}_hover.png`)
node.setAttrs(NODE_HIGHLIGHT_PROPS.attrs)
node.setAttrByPath('image/xlink:href', url)
node.setPortProp(
X6_PORT_OUT_NAME,
'attrs',
PORT_HIGHLIGHT_PROPS[X6_PORT_OUT_NAME].attrs
)
},
/**
* Reset node style
* @param {Node} node
*/
resetNodeStyle (node) {
const url = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}.png`)
node.setAttrs(NODE_PROPS.attrs)
node.setAttrByPath('image/xlink:href', url)
node.setPortProp(
X6_PORT_OUT_NAME,
'attrs',
PORT_PROPS.groups[X6_PORT_OUT_NAME].attrs
)
},
/**
* Set edge highlight
* @param {Edge} edge
*/
setEdgeHighlight (edge) {
const labelName = this.getEdgeLabelName(edge)
edge.setAttrs(EDGE_HIGHLIGHT_PROPS.attrs)
edge.setLabels([
_.merge(
{
attrs: _.cloneDeep(EDGE_HIGHLIGHT_PROPS.defaultLabel.attrs)
},
{
attrs: { label: { text: labelName } }
}
)
])
},
/**
* Reset edge style
* @param {Edge} edge
*/
resetEdgeStyle (edge) {
const labelName = this.getEdgeLabelName(edge)
edge.setAttrs(EDGE_PROPS.attrs)
edge.setLabels([
{
..._.merge(
{
attrs: _.cloneDeep(EDGE_PROPS.defaultLabel.attrs)
},
{
attrs: { label: { text: labelName } }
}
)
}
])
},
/**
* Set cell highlight
* @param {Cell} cell
*/
setHighlight (cell) {
if (cell.isEdge()) {
this.setEdgeHighlight(cell)
} else if (cell.isNode()) {
this.setNodeHighlight(cell)
}
},
/**
* Reset cell highlight
* @param {Cell} cell
*/
resetHighlight (cell) {
if (cell.isEdge()) {
this.resetEdgeStyle(cell)
} else if (cell.isNode()) {
this.resetNodeStyle(cell)
}
},
/**
* Convert the graph to JSON
* @return {{cells:Cell[]}}
@ -512,38 +419,70 @@
}
)
},
showLayoutModal () {
const layoutModal = this.$refs.layoutModal
if (layoutModal) {
layoutModal.show()
}
},
/**
* format
* @desc Auto layout use @antv/layout
*/
format () {
const dagreLayout = new DagreLayout({
type: 'dagre',
rankdir: 'LR',
align: 'UL',
// Calculate the node spacing based on the edge label length
ranksepFunc: (d) => {
const edges = this.graph.getOutgoingEdges(d.id)
let max = 0
if (edges && edges.length > 0) {
edges.forEach((edge) => {
const edgeView = this.graph.findViewByCell(edge)
const labelWidth = +edgeView.findAttr(
'width',
_.get(edgeView, ['labelSelectors', '0', 'body'], null)
)
max = Math.max(max, labelWidth)
})
}
return 50 + max
},
nodesep: 50,
controlPoints: true
})
format (layoutConfig) {
if (!layoutConfig) {
layoutConfig = DEFAULT_LAYOUT_CONFIG
}
this.graph.cleanSelection()
let layoutFunc = null
if (layoutConfig.type === LAYOUT_TYPE.DAGRE) {
layoutFunc = new DagreLayout({
type: LAYOUT_TYPE.DAGRE,
rankdir: 'LR',
align: 'UL',
// Calculate the node spacing based on the edge label length
ranksepFunc: (d) => {
const edges = this.graph.getOutgoingEdges(d.id)
let max = 0
if (edges && edges.length > 0) {
edges.forEach((edge) => {
const edgeView = this.graph.findViewByCell(edge)
const labelWidth = +edgeView.findAttr(
'width',
_.get(edgeView, ['labelSelectors', '0', 'body'], null)
)
max = Math.max(max, labelWidth)
})
}
return layoutConfig.ranksep + max
},
nodesep: layoutConfig.nodesep,
controlPoints: true
})
} else if (layoutConfig.type === LAYOUT_TYPE.GRID) {
layoutFunc = new GridLayout({
type: LAYOUT_TYPE.GRID,
preventOverlap: true,
preventOverlapPadding: layoutConfig.padding,
sortBy: '_index',
rows: layoutConfig.rows || undefined,
cols: layoutConfig.cols || undefined,
nodeSize: 220
})
}
const json = this.toJSON()
const nodes = json.cells.filter((cell) => cell.shape === X6_NODE_NAME)
const nodes = json.cells
.filter((cell) => cell.shape === X6_NODE_NAME)
.map((item) => {
return {
...item,
// sort by code aesc
_index: -item.id
}
})
const edges = json.cells.filter((cell) => cell.shape === X6_EDGE_NAME)
const newModel = dagreLayout.layout({
const newModel = layoutFunc.layout({
nodes: nodes,
edges: edges
})
@ -606,12 +545,10 @@
return {
shape: X6_EDGE_NAME,
source: {
cell: sourceId,
port: X6_PORT_OUT_NAME
cell: sourceId
},
target: {
cell: targetId,
port: X6_PORT_IN_NAME
cell: targetId
},
labels: label ? [label] : undefined
}
@ -688,7 +625,7 @@
if (node) {
// Destroy the previous dom
node.removeMarkup()
node.setMarkup(NODE_PROPS.markup.concat(NODE_STATUS_MARKUP))
node.setMarkup(NODE.markup.concat(NODE_STATUS_MARKUP))
const nodeView = this.graph.findViewByCell(node)
const el = nodeView.find('div')[0]
nodeStatus({
@ -828,6 +765,28 @@
const edge = this.genEdgeJSON(code, postCode)
this.graph.addEdge(edge)
})
},
/**
* Navigate to cell
* @param {string} taskName
*/
navigateTo (taskName) {
const nodes = this.getNodes()
nodes.forEach((node) => {
if (node.data.taskName === taskName) {
const id = node.id
const cell = this.graph.getCellById(id)
this.graph.scrollToCell(cell, { animation: { duration: 600 } })
this.graph.cleanSelection()
this.graph.select(cell)
}
})
},
/**
* Canvas scale
*/
scaleChange (val) {
this.graph.zoomTo(val)
}
}
}

115
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/layoutConfigModal.vue

@ -0,0 +1,115 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<el-dialog
:title="$t('Format DAG')"
:visible.sync="visible"
width="500px"
class="dag-layout-modal"
:append-to-body="true"
>
<el-form
ref="form"
:model="form"
label-width="100px"
class="dag-layout-form"
>
<el-form-item :label="$t('layoutType')">
<el-radio-group v-model="form.type">
<el-radio label="grid">{{ $t("gridLayout") }}</el-radio>
<el-radio label="dagre">{{ $t("dagreLayout") }}</el-radio>
</el-radio-group>
</el-form-item>
<el-form-item :label="$t('rows')" v-if="form.type === LAYOUT_TYPE.GRID">
<el-input-number
v-model="form.rows"
:min="0"
size="small"
></el-input-number>
</el-form-item>
<el-form-item :label="$t('cols')" v-if="form.type === LAYOUT_TYPE.GRID">
<el-input-number
v-model="form.cols"
:min="0"
size="small"
></el-input-number>
</el-form-item>
</el-form>
<span slot="footer" class="dialog-footer">
<el-button size="small" @click="close">{{ $t("Cancel") }}</el-button>
<el-button size="small" type="primary" @click="submit">{{
$t("Confirm")
}}</el-button>
</span>
</el-dialog>
</template>
<script>
export const LAYOUT_TYPE = {
GRID: 'grid',
DAGRE: 'dagre'
}
export const DEFAULT_LAYOUT_CONFIG = {
cols: 0,
nodesep: 50,
padding: 50,
ranksep: 50,
rows: 0,
type: LAYOUT_TYPE.DAGRE
}
export default {
data () {
return {
visible: false,
form: { ...DEFAULT_LAYOUT_CONFIG },
LAYOUT_TYPE
}
},
methods: {
show () {
this.visible = true
},
close () {
this.visible = false
},
submit () {
this.$emit('submit', this.form)
this.close()
}
}
}
</script>
<style lang="scss" scoped>
.dag-layout-modal {
::v-deep .el-dialog__header {
border-bottom: solid 1px #d4d4d4;
}
::v-deep .dag-layout-form {
margin-top: 20px;
}
::v-deep .el-radio {
margin-bottom: 0;
}
.el-form-item {
margin-bottom: 10px;
}
}
</style>

4
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss

@ -174,6 +174,10 @@
}
}
}
&.disabled{
cursor: default
}
}
}
}

9
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.vue

@ -24,6 +24,9 @@
<draggable-box
:key="taskType.name"
@onDragstart="(e) => $emit('on-drag-start', e, taskType)"
:class="{
disabled: isDetails
}"
>
<div class="task-item">
<em :class="`icos-${taskType.name.toLocaleLowerCase()}`"></em>
@ -38,6 +41,7 @@
<script>
import draggableBox from './draggableBox.vue'
import { tasksType } from '../config.js'
import { mapState } from 'vuex'
export default {
name: 'dag-taskbar',
@ -55,6 +59,11 @@
return {
tasksTypeList
}
},
computed: {
...mapState('dag', [
'isDetails'
])
}
}
</script>

14
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.scss

@ -110,4 +110,18 @@
}
}
}
.process-online-tag{
margin-left: 10px;
}
.search-box{
width: 0;
overflow: hidden;
transition: all 0.5s;
&.visible{
width: 200px;
}
}
}

101
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.vue

@ -23,76 +23,111 @@
:content="$t('Copy name')"
placement="bottom"
>
<i class="el-icon-copy-document" @click="copyName"></i>
<em class="el-icon-copy-document" @click="copyName"></em>
</el-tooltip>
<textarea ref="textarea" cols="30" rows="10" class="transparent"></textarea>
<div class="toolbar-left">
<el-tag
class="process-online-tag"
size="small"
v-if="dagChart.type === 'definition' && releaseState === 'ONLINE'"
>{{ $t("processOnline") }}</el-tag
>
<el-tooltip
:content="$t('View variables')"
placement="bottom"
class="toolbar-operation"
>
<i
<em
class="custom-ico view-variables"
v-if="$route.name === 'projects-instance-details'"
@click="toggleVariableView"
></i>
></em>
</el-tooltip>
<el-tooltip
:content="$t('Startup parameter')"
placement="bottom"
class="toolbar-operation"
>
<i
<em
class="custom-ico startup-parameters"
v-if="$route.name === 'projects-instance-details'"
@click="toggleParamView"
></i>
></em>
</el-tooltip>
</div>
<div class="toolbar-right">
<el-tooltip
class="toolbar-operation"
:content="$t('searchNode')"
placement="bottom"
v-if="!searchInputVisible"
>
<em
class="el-icon-search"
@click="showSearchInput"
></em>
</el-tooltip>
<div
:class="{
'search-box': true,
'visible': searchInputVisible
}"
>
<el-input
v-model="searchText"
placeholder=""
prefix-icon="el-icon-search"
size="mini"
@keyup.enter.native="onSearch"
clearable
@blur="searchInputBlur"
ref="searchInput"
></el-input>
</div>
<el-tooltip
class="toolbar-operation"
:content="$t('Delete selected lines or nodes')"
placement="bottom"
v-if="!isDetails"
>
<i class="el-icon-delete" @click="removeCells"></i>
<em class="el-icon-delete" @click="removeCells"></em>
</el-tooltip>
<el-tooltip
class="toolbar-operation"
:content="$t('Download')"
placement="bottom"
>
<i class="el-icon-download" @click="downloadPNG"></i>
<em class="el-icon-download" @click="downloadPNG"></em>
</el-tooltip>
<el-tooltip
class="toolbar-operation"
:content="$t('Full Screen')"
:content="$t('Refresh DAG status')"
placement="bottom"
v-if="dagChart.type === 'instance'"
>
<i
:class="[
'custom-ico',
dagChart.fullScreen ? 'full-screen-close' : 'full-screen-open',
]"
@click="toggleFullScreen"
></i>
<em class="el-icon-refresh" @click="refreshTaskStatus"></em>
</el-tooltip>
<el-tooltip
class="toolbar-operation"
:content="$t('Refresh DAG status')"
:content="$t('Format DAG')"
placement="bottom"
v-if="dagChart.type === 'instance'"
v-if="!isDetails"
>
<i class="el-icon-refresh" @click="refreshTaskStatus"></i>
<em class="custom-ico graph-format" @click="chartFormat"></em>
</el-tooltip>
<el-tooltip
class="toolbar-operation last"
:content="$t('Format DAG')"
:content="$t('Full Screen')"
placement="bottom"
>
<i class="custom-ico graph-format" @click="chartFormat"></i>
<em
:class="[
'custom-ico',
dagChart.fullScreen ? 'full-screen-close' : 'full-screen-open',
]"
@click="toggleFullScreen"
></em>
</el-tooltip>
<el-button
class="toolbar-el-btn"
@ -101,7 +136,7 @@
v-if="dagChart.type === 'definition'"
@click="showVersions"
icon="el-icon-info"
>{{$t('Version Info')}}</el-button
>{{ $t("Version Info") }}</el-button
>
<el-button
class="toolbar-el-btn"
@ -125,7 +160,6 @@
type="primary"
icon="el-icon-switch-button"
size="mini"
v-if="type === 'instance' || 'definition'"
@click="returnToListPage"
>
{{ $t("Close") }}
@ -143,15 +177,28 @@
inject: ['dagChart'],
data () {
return {
canvasRef: null
canvasRef: null,
searchText: '',
searchInputVisible: false
}
},
computed: {
...mapState('dag', [
'isDetails'
])
...mapState('dag', ['isDetails', 'releaseState'])
},
methods: {
onSearch () {
const canvas = this.getDagCanvasRef()
canvas.navigateTo(this.searchText)
},
showSearchInput () {
this.searchInputVisible = true
this.$refs.searchInput.focus()
},
searchInputBlur () {
if (!this.searchText) {
this.searchInputVisible = false
}
},
getDagCanvasRef () {
if (this.canvasRef) {
return this.canvasRef
@ -200,7 +247,7 @@
},
chartFormat () {
const canvas = this.getDagCanvasRef()
canvas.format()
canvas.showLayoutModal()
},
refreshTaskStatus () {
this.dagChart.refreshTaskStatus()

163
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-helper.js

@ -17,16 +17,17 @@
export const X6_NODE_NAME = 'dag-task'
export const X6_EDGE_NAME = 'dag-edge'
export const X6_PORT_OUT_NAME = 'dag-port-out'
export const X6_PORT_IN_NAME = 'dag-port-in'
const EDGE = '#999999'
const BG_BLUE = 'rgba(40, 143, 255, 0.1)'
const EDGE_COLOR = '#999999'
const BG_BLUE = '#DFE9F7'
const BG_WHITE = '#FFFFFF'
const NODE_BORDER = '#e4e4e4'
const TITLE = '#333'
const NODE_BORDER = '#CCCCCC'
const TITLE = '#333333'
const STROKE_BLUE = '#288FFF'
const NODE_SHADOW = 'drop-shadow(3px 3px 4px rgba(0, 0, 0, 0.2))'
const EDGE_SHADOW = 'drop-shadow(3px 3px 2px rgba(0, 0, 0, 0.2))'
export const PORT_PROPS = {
export const PORT = {
groups: {
[X6_PORT_OUT_NAME]: {
position: {
@ -62,14 +63,14 @@ export const PORT_PROPS = {
},
'plus-text': {
fontSize: 12,
fill: EDGE,
fill: NODE_BORDER,
text: '+',
textAnchor: 'middle',
x: 0,
y: 3
},
'circle-outer': {
stroke: EDGE,
stroke: NODE_BORDER,
strokeWidth: 1,
r: 6,
fill: BG_WHITE
@ -79,57 +80,42 @@ export const PORT_PROPS = {
fill: 'transparent'
}
}
},
[X6_PORT_IN_NAME]: {
position: {
name: 'absolute',
args: {
x: 0,
y: 24
}
},
markup: [
{
tagName: 'g',
selector: 'body',
className: 'in-port-body',
children: [{
tagName: 'circle',
selector: 'circle',
className: 'circle'
}]
}
],
}
}
}
export const PORT_HOVER = {
groups: {
[X6_PORT_OUT_NAME]: {
attrs: {
body: {
magnet: true
'circle-outer': {
stroke: STROKE_BLUE,
fill: BG_BLUE,
r: 8
},
circle: {
r: 4,
strokeWidth: 0,
fill: 'transparent'
'circle-inner': {
fill: STROKE_BLUE,
r: 6
}
}
}
}
}
export const PORT_HIGHLIGHT_PROPS = {
[X6_PORT_OUT_NAME]: {
attrs: {
'circle-outer': {
stroke: STROKE_BLUE,
fill: BG_BLUE
},
'plus-text': {
fill: STROKE_BLUE
},
'circle-inner': {
fill: STROKE_BLUE
export const PORT_SELECTED = {
groups: {
[X6_PORT_OUT_NAME]: {
attrs: {
'plus-text': {
fill: STROKE_BLUE
},
'circle-outer': {
stroke: STROKE_BLUE,
fill: BG_WHITE
}
}
}
},
[X6_PORT_IN_NAME]: {}
}
}
export const NODE_STATUS_MARKUP = [{
@ -148,13 +134,14 @@ export const NODE_STATUS_MARKUP = [{
]
}]
export const NODE_PROPS = {
export const NODE = {
width: 220,
height: 48,
markup: [
{
tagName: 'rect',
selector: 'body'
selector: 'body',
className: 'dag-task-body'
},
{
tagName: 'image',
@ -174,7 +161,9 @@ export const NODE_PROPS = {
pointerEvents: 'visiblePainted',
fill: BG_WHITE,
stroke: NODE_BORDER,
strokeWidth: 1
strokeWidth: 1,
strokeDasharray: 'none',
filter: 'none'
},
image: {
width: 30,
@ -199,21 +188,17 @@ export const NODE_PROPS = {
}
},
ports: {
...PORT_PROPS,
...PORT,
items: [
{
id: X6_PORT_OUT_NAME,
group: X6_PORT_OUT_NAME
},
{
id: X6_PORT_IN_NAME,
group: X6_PORT_IN_NAME
}
]
}
}
export const NODE_HIGHLIGHT_PROPS = {
export const NODE_HOVER = {
attrs: {
body: {
fill: BG_BLUE,
@ -226,28 +211,42 @@ export const NODE_HIGHLIGHT_PROPS = {
}
}
export const EDGE_PROPS = {
export const NODE_SELECTED = {
attrs: {
body: {
filter: NODE_SHADOW,
fill: BG_WHITE,
stroke: STROKE_BLUE,
strokeDasharray: '5,2',
strokeWidth: '1.5'
},
title: {
fill: STROKE_BLUE
}
}
}
export const EDGE = {
attrs: {
line: {
stroke: EDGE,
strokeWidth: 0.8,
stroke: EDGE_COLOR,
strokeWidth: 1,
targetMarker: {
tagName: 'path',
fill: EDGE,
fill: EDGE_COLOR,
strokeWidth: 0,
d: 'M 6 -3 0 0 6 3 Z'
}
},
filter: 'none'
}
},
connector: {
name: 'rounded'
},
router: {
name: 'er',
name: 'manhattan',
args: {
offset: 20,
min: 20,
direction: 'L'
endDirections: ['top', 'bottom', 'left']
}
},
defaultLabel: {
@ -263,7 +262,7 @@ export const EDGE_PROPS = {
],
attrs: {
label: {
fill: EDGE,
fill: EDGE_COLOR,
fontSize: 14,
textAnchor: 'middle',
textVerticalAnchor: 'middle',
@ -272,7 +271,7 @@ export const EDGE_PROPS = {
body: {
ref: 'label',
fill: BG_WHITE,
stroke: EDGE,
stroke: EDGE_COLOR,
strokeWidth: 1,
rx: 4,
ry: 4,
@ -292,7 +291,7 @@ export const EDGE_PROPS = {
}
}
export const EDGE_HIGHLIGHT_PROPS = {
export const EDGE_HOVER = {
attrs: {
line: {
stroke: STROKE_BLUE,
@ -313,3 +312,27 @@ export const EDGE_HIGHLIGHT_PROPS = {
}
}
}
export const EDGE_SELECTED = {
attrs: {
line: {
stroke: STROKE_BLUE,
targetMarker: {
fill: STROKE_BLUE
},
strokeWidth: 2,
filter: EDGE_SHADOW
}
},
defaultLabel: {
attrs: {
label: {
fill: STROKE_BLUE
},
body: {
fill: BG_WHITE,
stroke: STROKE_BLUE
}
}
}
}

145
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style-mixin.js

@ -0,0 +1,145 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
NODE,
EDGE,
PORT,
NODE_HOVER,
PORT_HOVER,
EDGE_HOVER,
PORT_SELECTED,
NODE_SELECTED,
EDGE_SELECTED,
X6_PORT_OUT_NAME
} from './x6-helper'
import _ from 'lodash'
export default {
data () {
return {
hoverCell: null
}
},
methods: {
bindStyleEvent (graph) {
// nodes and edges hover
graph.on('cell:mouseenter', (data) => {
const { cell, e } = data
const isStatusIcon = (tagName) =>
tagName &&
(tagName.toLocaleLowerCase() === 'em' ||
tagName.toLocaleLowerCase() === 'body')
if (!isStatusIcon(e.target.tagName)) {
this.hoverCell = cell
this.updateCellStyle(cell, graph)
}
})
graph.on('cell:mouseleave', ({ cell }) => {
this.hoverCell = null
this.updateCellStyle(cell, graph)
})
// select
graph.on('cell:selected', ({ cell }) => {
this.updateCellStyle(cell, graph)
})
graph.on('cell:unselected', ({ cell }) => {
this.updateCellStyle(cell, graph)
})
},
updateCellStyle (cell, graph) {
if (cell.isEdge()) {
this.setEdgeStyle(cell, graph)
} else if (cell.isNode()) {
this.setNodeStyle(cell, graph)
}
},
/**
* Set node style
* @param {Node} node
* @param {Graph} graph
*/
setNodeStyle (node, graph) {
const isHover = node === this.hoverCell
const isSelected = graph.isSelected(node)
const portHover = _.cloneDeep(PORT_HOVER.groups[X6_PORT_OUT_NAME].attrs)
const portSelected = _.cloneDeep(PORT_SELECTED.groups[X6_PORT_OUT_NAME].attrs)
const portDefault = _.cloneDeep(PORT.groups[X6_PORT_OUT_NAME].attrs)
const nodeHover = _.merge(_.cloneDeep(NODE.attrs), NODE_HOVER.attrs)
const nodeSelected = _.merge(_.cloneDeep(NODE.attrs), NODE_SELECTED.attrs)
let img = null
let nodeAttrs = null
let portAttrs = null
if (isHover || isSelected) {
img = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}_hover.png`)
if (isHover) {
nodeAttrs = nodeHover
portAttrs = _.merge(portDefault, portHover)
} else {
nodeAttrs = nodeSelected
portAttrs = _.merge(portDefault, portSelected)
}
} else {
img = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}.png`)
nodeAttrs = NODE.attrs
portAttrs = portDefault
}
node.setAttrByPath('image/xlink:href', img)
node.setAttrs(nodeAttrs)
node.setPortProp(
X6_PORT_OUT_NAME,
'attrs',
portAttrs
)
},
/**
* Set edge style
* @param {Edge} edge
* @param {Graph} graph
*/
setEdgeStyle (edge, graph) {
const isHover = edge === this.hoverCell
const isSelected = graph.isSelected(edge)
const labelName = this.getEdgeLabelName ? this.getEdgeLabelName(edge) : ''
let edgeProps = null
if (isHover) {
edgeProps = _.merge(_.cloneDeep(EDGE), EDGE_HOVER)
} else if (isSelected) {
edgeProps = _.merge(_.cloneDeep(EDGE), EDGE_SELECTED)
} else {
edgeProps = _.cloneDeep(EDGE)
}
edge.setAttrs(edgeProps.attrs)
edge.setLabels([
{
..._.merge(
{
attrs: _.cloneDeep(edgeProps.defaultLabel.attrs)
},
{
attrs: { label: { text: labelName } }
}
)
}
])
}
}
}

21
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style.scss

@ -14,16 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
$STROKE_BLUE: #288FFF;
$BG_WHITE: #FFFFFF;
$STROKE_BLUE: #288fff;
$BG_WHITE: #ffffff;
.x6-node[data-shape="dag-task"]{
.in-port-body{
&.adsorbed,&.available{
.circle {
stroke: $STROKE_BLUE;
stroke-width: 4;
fill: $BG_WHITE;
.x6-node[data-shape="dag-task"] {
&.available {
.dag-task-body {
stroke: $STROKE_BLUE;
stroke-width: 1;
stroke-dasharray: 5, 2;
}
&.adsorbed {
.dag-task-body {
stroke-width: 3;
}
}
}

10
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue

@ -60,6 +60,10 @@
</el-dialog>
<edge-edit-model ref="edgeEditModel" />
<el-drawer :visible.sync="versionDrawer" size="" :with-header="false">
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-versions
:versionData="versionData"
:isInstance="type === 'instance'"
@ -187,7 +191,6 @@
},
beforeDestroy () {
this.resetParams()
clearInterval(this.statusTimer)
window.removeEventListener('resize', this.resizeDebounceFunc)
},
@ -400,6 +403,7 @@
buildGraphJSON (tasks, locations, connects) {
const nodes = []
const edges = []
if (!locations) { locations = [] }
tasks.forEach((task) => {
const location = locations.find((l) => l.taskCode === task.code) || {}
const node = this.$refs.canvas.genNodeJSON(
@ -484,6 +488,10 @@
const connects = this.connects
const json = this.buildGraphJSON(tasks, locations, connects)
this.$refs.canvas.fromJSON(json)
// Auto format
if (!locations) {
this.$refs.canvas.format()
}
},
/**
* Return to the previous process

24
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue

@ -60,7 +60,11 @@
<script>
import _ from 'lodash'
import disabledState from '@/module/mixin/disabledState'
const StrategyMap = {
WARN: 'WARN',
FAILED: 'FAILED',
WARNFAILED: 'WARNFAILED'
}
export default {
name: 'form-timeout-alarm',
data () {
@ -100,10 +104,12 @@
strategy: (() => {
// Handling checkout sequence
let strategy = this.strategy
if (strategy.length === 2 && strategy[0] === 'FAILED') {
return [strategy[1], strategy[0]].join(',')
if (strategy.length > 1) {
return StrategyMap.WARNFAILED
} else if (strategy.length === 1) {
return strategy[0]
} else {
return strategy.join(',')
return ''
}
})(),
interval: parseInt(this.interval),
@ -119,7 +125,15 @@
// Non-null objects represent backfill
if (!_.isEmpty(o) && o.timeout) {
this.enable = o.timeout.enable || false
this.strategy = _.split(o.timeout.strategy, ',') || ['WARN']
if (o.timeout.strategy) {
if (o.timeout.strategy === StrategyMap.WARNFAILED) {
this.strategy = [StrategyMap.WARN, StrategyMap.FAILED]
} else {
this.strategy = [o.timeout.strategy]
}
} else {
this.strategy = [StrategyMap.WARN]
}
this.interval = o.timeout.interval || null
}
},

1
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue

@ -398,7 +398,6 @@
<!-- Pre-tasks in workflow -->
<m-pre-tasks
ref="preTasks"
v-if="['SHELL', 'SUB_PROCESS'].indexOf(nodeData.taskType) > -1"
:code="code"
/>
</div>

14
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/kinship/_source/graphGridOption.js

@ -16,6 +16,7 @@
*/
import _ from 'lodash'
import i18n from '@/module/i18n/index.js'
import dayjs from 'dayjs'
const getCategory = (categoryDic, { workFlowPublishStatus, schedulePublishStatus, code }, sourceWorkFlowCode) => {
if (code === sourceWorkFlowCode) return categoryDic.active
@ -30,6 +31,12 @@ const getCategory = (categoryDic, { workFlowPublishStatus, schedulePublishStatus
}
}
const formatName = (str) => {
if (typeof str !== 'string') return ''
return str.slice(0, 6) + (str.length > 6 ? '\n...' : '')
}
const publishStatusFormat = (status) => {
return status === 0 || status === '0' ? i18n.$t('offline') : status === 1 || status === '1' ? i18n.$t('online') : '-'
}
@ -73,8 +80,8 @@ export default function (locations, links, sourceWorkFlowCode, isShowLabel) {
return `
${i18n.$t('workflowName')}${name}<br/>
${i18n.$t('scheduleStartTime')}${scheduleStartTime}<br/>
${i18n.$t('scheduleEndTime')}${scheduleEndTime}<br/>
${i18n.$t('scheduleStartTime')}${dayjs(scheduleStartTime).format('YYYY-MM-DD HH:mm:ss')}<br/>
${i18n.$t('scheduleEndTime')}${dayjs(scheduleEndTime).format('YYYY-MM-DD HH:mm:ss')}<br/>
${i18n.$t('crontabExpression')}${crontab}<br/>
${i18n.$t('workflowPublishStatus')}${publishStatusFormat(workFlowPublishStatus)}<br/>
${i18n.$t('schedulePublishStatus')}${publishStatusFormat(schedulePublishStatus)}<br/>
@ -116,8 +123,7 @@ export default function (locations, links, sourceWorkFlowCode, isShowLabel) {
position: 'inside',
formatter: (params) => {
if (!params.data.name) return ''
const str = params.data.name.split('_').map(item => `{a|${item}\n}`).join('')
return str
return formatName(params.data.name)
},
color: '#222222',
textStyle: {

17
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js

@ -18,6 +18,16 @@
import _ from 'lodash'
import io from '@/module/io'
// Avoid passing in illegal values when users directly call third-party interfaces
const convertLocations = (locationStr) => {
let locations = null
if (!locationStr) return locations
try {
locations = JSON.parse(locationStr)
} catch (error) {}
return Array.isArray(locations) ? locations : null
}
export default {
/**
* Task status acquisition
@ -133,12 +143,14 @@ export default {
state.version = res.data.processDefinition.version
// name
state.name = res.data.processDefinition.name
// releaseState
state.releaseState = res.data.processDefinition.releaseState
// description
state.description = res.data.processDefinition.description
// taskRelationJson
state.connects = res.data.processTaskRelationList
// locations
state.locations = JSON.parse(res.data.processDefinition.locations)
state.locations = convertLocations(res.data.processDefinition.locations)
// global params
state.globalParams = res.data.processDefinition.globalParamList
// timeout
@ -164,6 +176,7 @@ export default {
'timeout',
'environmentCode'
]))
resolve(res.data)
}).catch(res => {
reject(res)
@ -235,7 +248,7 @@ export default {
// connects
state.connects = processTaskRelationList
// locations
state.locations = JSON.parse(processDefinition.locations)
state.locations = convertLocations(processDefinition.locations)
// global params
state.globalParams = processDefinition.globalParamList
// timeout

2
dolphinscheduler-ui/src/js/conf/home/store/user/actions.js

@ -113,7 +113,7 @@ export default {
*/
deleteToken ({ state }, payload) {
return new Promise((resolve, reject) => {
io.delete(`access-token/${payload.id}`, payload, res => {
io.delete(`access-tokens/${payload.id}`, payload, res => {
resolve(res)
}).catch(e => {
reject(e)

2
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -658,7 +658,7 @@ export default {
'Please delete all non-existent resources': 'Please delete all non-existent resources',
Kinship: 'Workflow relationship',
Reset: 'Reset',
KinshipStateActive: 'Active',
KinshipStateActive: 'Current selection',
KinshipState1: 'Online',
KinshipState0: 'Workflow is not online',
KinshipState10: 'Scheduling is not online',

10
install.sh

@ -19,7 +19,9 @@
workDir=`dirname $0`
workDir=`cd ${workDir};pwd`
set -a
source ${workDir}/conf/config/install_config.conf
set +a
# 1.replace file
echo "1.replace file"
@ -31,19 +33,13 @@ if [[ "$OSTYPE" == "darwin"* ]]; then
fi
datasourceDriverClassname="com.mysql.jdbc.Driver"
if [ $dbtype == "postgresql" ];then
if [[ $dbtype == "postgresql" ]];then
datasourceDriverClassname="org.postgresql.Driver"
fi
# Change configuration in conf/config/dolphinscheduler_env.sh
sed -i ${txt} "s@^export JAVA_HOME=.*@export JAVA_HOME=${javaHome}@g" conf/env/dolphinscheduler_env.sh
# Change configuration in conf/datasource.properties
sed -i ${txt} "s@^spring.datasource.driver-class-name=.*@spring.datasource.driver-class-name=${datasourceDriverClassname}@g" conf/datasource.properties
sed -i ${txt} "s@^spring.datasource.url=.*@spring.datasource.url=jdbc:${dbtype}://${dbhost}/${dbname}?characterEncoding=UTF-8\&allowMultiQueries=true@g" conf/datasource.properties
sed -i ${txt} "s@^spring.datasource.username=.*@spring.datasource.username=${username}@g" conf/datasource.properties
sed -i ${txt} "s@^spring.datasource.password=.*@spring.datasource.password=${password}@g" conf/datasource.properties
# Change configuration in conf/common.properties
sed -i ${txt} "s@^data.basedir.path=.*@data.basedir.path=${dataBasedirPath}@g" conf/common.properties
sed -i ${txt} "s@^resource.storage.type=.*@resource.storage.type=${resourceStorageType}@g" conf/common.properties

3
script/dolphinscheduler-daemon.sh

@ -36,7 +36,10 @@ BIN_DIR=`cd "$BIN_DIR"; pwd`
DOLPHINSCHEDULER_HOME=`cd "$BIN_DIR/.."; pwd`
source /etc/profile
set -a
source "${DOLPHINSCHEDULER_HOME}/conf/env/dolphinscheduler_env.sh"
source "${DOLPHINSCHEDULER_HOME}/conf/config/install_config.conf"
set +a
export HOSTNAME=`hostname`

2
script/remove-zk-node.sh

@ -31,8 +31,10 @@ BIN_DIR=`dirname $0`
BIN_DIR=`cd "$BIN_DIR"; pwd`
DOLPHINSCHEDULER_HOME=$BIN_DIR/..
set -a
source ${BIN_DIR}/../conf/config/install_config.conf
source ${BIN_DIR}/../conf/env/dolphinscheduler_env.sh
set +a
export JAVA_HOME=$JAVA_HOME

2
script/scp-hosts.sh

@ -18,7 +18,9 @@
workDir=`dirname $0`
workDir=`cd ${workDir};pwd`
set -a
source $workDir/../conf/config/install_config.conf
set +a
txt=""
if [[ "$OSTYPE" == "darwin"* ]]; then

3
script/start-all.sh

@ -18,7 +18,10 @@
workDir=`dirname $0`
workDir=`cd ${workDir};pwd`
set -a
source $workDir/../conf/config/install_config.conf
set +a
declare -A workersGroupMap=()

2
script/status-all.sh

@ -18,7 +18,9 @@
workDir=`dirname $0`
workDir=`cd ${workDir};pwd`
set -a
source $workDir/../conf/config/install_config.conf
set +a
# install_config.conf info
echo -e '\n'

2
script/stop-all.sh

@ -19,7 +19,9 @@
workDir=`dirname $0`
workDir=`cd ${workDir};pwd`
set -a
source $workDir/../conf/config/install_config.conf
set +a
declare -A workersGroupMap=()

Loading…
Cancel
Save