Browse Source

[Feature][API] New restful API for workflow and schedule (#11912)

* [feat] New restful API for workflow and schedule

CURD for workflow and schedule, different with exists
API, this new restful api only operate single resource
in each request, and return the latest. For example,
previous workflow should also need to post tasks definition
and tasks relation definition, but this patch will allow
you to create workflow without task relate information

* use checkProjectAndAuthThrowException, and fix CI error

* Update dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ScheduleV2Controller.java

* change method name
from createProcessDefinitionV2 to createSingleProcessDefinition
from updateProcessDefinitionV2 to updateSingleProcessDefinition

Co-authored-by: caishunfeng <caishunfeng2021@gmail.com>
3.2.0-release
Jiajie Zhong 2 years ago committed by GitHub
parent
commit
82ddd72e4a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 15
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  2. 162
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ScheduleV2Controller.java
  3. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
  4. 165
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowV2Controller.java
  5. 8
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/PageQueryDto.java
  6. 120
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleCreateRequest.java
  7. 63
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleFilterRequest.java
  8. 38
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleParam.java
  9. 138
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleUpdateRequest.java
  10. 89
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowCreateRequest.java
  11. 63
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowFilterRequest.java
  12. 99
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowUpdateRequest.java
  13. 157
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  14. 19
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
  15. 51
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  16. 52
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  17. 49
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java
  18. 262
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  19. 255
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
  20. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java
  21. 4
      dolphinscheduler-api/src/main/resources/i18n/messages.properties
  22. 4
      dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
  23. 4
      dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
  24. 13
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java
  25. 91
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java
  26. 168
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowV2ControllerTest.java
  27. 61
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java
  28. 36
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTestTool.java
  29. 654
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  30. 373
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java
  31. 10
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java
  32. 10
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java
  33. 20
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml
  34. 25
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.xml

15
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -38,6 +38,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINI
import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
@ -660,9 +661,8 @@ public class ProcessDefinitionController extends BaseController {
public Result deleteProcessDefinitionByCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result deleteProcessDefinitionByCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable("code") long code) { @PathVariable("code") long code) {
Map<String, Object> result = processDefinitionService.deleteProcessDefinitionByCode(loginUser, code);
processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, code); return new Result(Status.SUCCESS);
return returnDataList(result);
} }
/** /**
@ -691,13 +691,8 @@ public class ProcessDefinitionController extends BaseController {
for (String strProcessDefinitionCode : processDefinitionCodeArray) { for (String strProcessDefinitionCode : processDefinitionCodeArray) {
long code = Long.parseLong(strProcessDefinitionCode); long code = Long.parseLong(strProcessDefinitionCode);
try { try {
Map<String, Object> deleteResult = processDefinitionService.deleteProcessDefinitionByCode(loginUser, code);
processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, code); } catch (ServiceException e) {
if (!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))) {
deleteFailedCodeSet.add((String) deleteResult.get(Constants.MSG));
logger.error((String) deleteResult.get(Constants.MSG));
}
} catch (Exception e) {
deleteFailedCodeSet.add(MessageFormat.format(Status.DELETE_PROCESS_DEFINE_BY_CODES_ERROR.getMsg(), deleteFailedCodeSet.add(MessageFormat.format(Status.DELETE_PROCESS_DEFINE_BY_CODES_ERROR.getMsg(),
strProcessDefinitionCode)); strProcessDefinitionCode));
} }

162
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ScheduleV2Controller.java

@ -0,0 +1,162 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR;
import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleCreateRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleFilterRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleUpdateRequest;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User;
import springfox.documentation.annotations.ApiIgnore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
/**
* schedule controller
*/
@Api(tags = "SCHEDULER_TAG")
@RestController
@RequestMapping("/v2/schedules")
public class ScheduleV2Controller extends BaseController {
@Autowired
private SchedulerService schedulerService;
/**
* Create resource schedule
*
* @param loginUser login user
* @param scheduleCreateRequest the new schedule object will be created
* @return ResourceResponse object created
*/
@ApiOperation(value = "create", notes = "CREATE_SCHEDULE_NOTES")
@PostMapping(consumes = {"application/json"})
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_SCHEDULE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<Schedule> createSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestBody ScheduleCreateRequest scheduleCreateRequest) {
Schedule schedule = schedulerService.createSchedulesV2(loginUser, scheduleCreateRequest);
return Result.success(schedule);
}
/**
* Delete schedule by id
*
* @param loginUser login user
* @param id schedule object id
*/
@ApiOperation(value = "delete", notes = "DELETE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", dataTypeClass = long.class, example = "123456", required = true)
})
@DeleteMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_SCHEDULE_BY_ID_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("id") Integer id) {
schedulerService.deleteSchedulesById(loginUser, id);
return Result.success();
}
/**
* Update resource schedule
*
* @param loginUser login user
* @param id schedule object id
* @param scheduleUpdateRequest the schedule object will be updated
* @return result Result
*/
@ApiOperation(value = "update", notes = "UPDATE_SCHEDULE_NOTES")
@PutMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_SCHEDULE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<Schedule> updateSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("id") Integer id,
@RequestBody ScheduleUpdateRequest scheduleUpdateRequest) {
Schedule schedule = schedulerService.updateSchedulesV2(loginUser, id, scheduleUpdateRequest);
return Result.success(schedule);
}
/**
* Get resource schedule by id
*
* @param loginUser login user
* @param id schedule object id
* @return result Result
*/
@ApiOperation(value = "get", notes = "GET_SCHEDULE_BY_ID_NOTES")
@GetMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_SCHEDULE_LIST_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<Schedule> getSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("id") Integer id) {
Schedule schedule = schedulerService.getSchedule(loginUser, id);
return Result.success(schedule);
}
/**
* Get resource schedule according to query parameter
*
* @param loginUser login user
* @
* @return result Result
*/
@ApiOperation(value = "get", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES")
@GetMapping(consumes = {"application/json"})
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<PageInfo<Schedule>> filterSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestBody ScheduleFilterRequest scheduleFilterRequest) {
PageInfo<Schedule> schedules = schedulerService.filterSchedules(loginUser, scheduleFilterRequest);
return Result.success(schedules);
}
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java

@ -18,7 +18,7 @@
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.OFFLINE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.OFFLINE_SCHEDULE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.PREVIEW_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PREVIEW_SCHEDULE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.PUBLISH_SCHEDULE_ONLINE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PUBLISH_SCHEDULE_ONLINE_ERROR;
@ -28,6 +28,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR
import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import static org.apache.dolphinscheduler.common.Constants.SESSION_USER;
import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
@ -64,7 +65,7 @@ import io.swagger.annotations.ApiParam;
/** /**
* scheduler controller * scheduler controller
*/ */
@Api(tags = "SCHEDULER_TAG") @Api(tags = "SCHEDULE_TAG")
@RestController @RestController
@RequestMapping("/projects/{projectCode}/schedules") @RequestMapping("/projects/{projectCode}/schedules")
public class SchedulerController extends BaseController { public class SchedulerController extends BaseController {
@ -260,19 +261,19 @@ public class SchedulerController extends BaseController {
* @param id scheule id * @param id scheule id
* @return delete result code * @return delete result code
*/ */
@ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES") @ApiOperation(value = "deleteScheduleById", notes = "DELETE_SCHEDULE_NOTES")
@ApiImplicitParams({ @ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataTypeClass = int.class, example = "100") @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataTypeClass = int.class, example = "100")
}) })
@DeleteMapping(value = "/{id}") @DeleteMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR) @ApiException(DELETE_SCHEDULE_BY_ID_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser") @AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable("id") Integer id) { @PathVariable("id") Integer id) {
Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectCode, id); schedulerService.deleteSchedulesById(loginUser, id);
return returnDataList(result); return new Result(Status.SUCCESS);
} }
/** /**

165
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowV2Controller.java

@ -0,0 +1,165 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR;
import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowCreateRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowFilterRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowUpdateRequest;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.User;
import springfox.documentation.annotations.ApiIgnore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
/**
* workflow controller
*/
@Api(tags = "WORKFLOW_TAG")
@RestController
@RequestMapping("/v2/workflows")
public class WorkflowV2Controller extends BaseController {
@Autowired
private ProcessDefinitionService processDefinitionService;
/**
* Create resource workflow
*
* @param loginUser login user
* @param workflowCreateRequest the new workflow object will be created
* @return ResourceResponse object created
*/
@ApiOperation(value = "create", notes = "CREATE_WORKFLOWS_NOTES")
@PostMapping(consumes = {"application/json"})
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_PROCESS_DEFINITION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<ProcessDefinition> createWorkflow(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestBody WorkflowCreateRequest workflowCreateRequest) {
ProcessDefinition processDefinition =
processDefinitionService.createSingleProcessDefinition(loginUser, workflowCreateRequest);
return Result.success(processDefinition);
}
/**
* Delete workflow by code
*
* @param loginUser login user
* @param code process definition code
* @return Result result object delete
*/
@ApiOperation(value = "delete", notes = "DELETE_WORKFLOWS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "code", value = "WORKFLOW_CODE", dataTypeClass = long.class, example = "123456", required = true)
})
@DeleteMapping(value = "/{code}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_DEFINE_BY_CODE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteWorkflow(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("code") Long code) {
processDefinitionService.deleteProcessDefinitionByCode(loginUser, code);
return Result.success();
}
/**
* Update resource workflow
*
* @param loginUser login user
* @param code workflow resource code you want to update
* @param workflowUpdateRequest workflowUpdateRequest
* @return ResourceResponse object updated
*/
@ApiOperation(value = "update", notes = "UPDATE_WORKFLOWS_NOTES")
@PutMapping(value = "/{code}")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROCESS_DEFINITION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<ProcessDefinition> updateWorkflow(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("code") Long code,
@RequestBody WorkflowUpdateRequest workflowUpdateRequest) {
ProcessDefinition processDefinition =
processDefinitionService.updateSingleProcessDefinition(loginUser, code, workflowUpdateRequest);
return Result.success(processDefinition);
}
/**
* Get resource workflow
*
* @param loginUser login user
* @param code workflow resource code you want to update
* @return ResourceResponse object get from condition
*/
@ApiOperation(value = "get", notes = "GET_WORKFLOWS_NOTES")
@GetMapping(value = "/{code}")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<ProcessDefinition> getWorkflow(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("code") Long code) {
ProcessDefinition processDefinition = processDefinitionService.getProcessDefinition(loginUser, code);
return Result.success(processDefinition);
}
/**
* Get resource workflows according to query parameter
*
* @param loginUser login user
* @param workflowFilterRequest workflowFilterRequest
* @return PageResourceResponse from condition
*/
@ApiOperation(value = "get", notes = "FILTER_WORKFLOWS_NOTES")
@GetMapping(consumes = {"application/json"})
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<PageInfo<ProcessDefinition>> filterWorkflows(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestBody WorkflowFilterRequest workflowFilterRequest) {
PageInfo<ProcessDefinition> processDefinitions =
processDefinitionService.filterProcessDefinition(loginUser, workflowFilterRequest);
return Result.success(processDefinitions);
}
}

8
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/PageQueryDto.java

@ -17,9 +17,9 @@
package org.apache.dolphinscheduler.api.dto; package org.apache.dolphinscheduler.api.dto;
import lombok.Data;
import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty; import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/** /**
* page query dto * page query dto
@ -29,8 +29,8 @@ import lombok.Data;
public class PageQueryDto { public class PageQueryDto {
@ApiModelProperty(example = "10", required = true) @ApiModelProperty(example = "10", required = true)
private Integer pageSize; private Integer pageSize = 10;
@ApiModelProperty(example = "1", required = true) @ApiModelProperty(example = "1", required = true)
private Integer pageNo; private Integer pageNo = 1;
} }

120
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleCreateRequest.java

@ -0,0 +1,120 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.schedule;
import static org.apache.dolphinscheduler.common.utils.DateUtils.stringToDate;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import java.util.Date;
import lombok.Data;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.swagger.annotations.ApiModelProperty;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
/**
* schedule create request
*/
@Data
public class ScheduleCreateRequest {
@ApiModelProperty(example = "1234567890123", required = true)
private long processDefinitionCode;
@ApiModelProperty(example = "schedule timezone", required = true)
private String crontab;
@ApiModelProperty(example = "2021-01-01 10:00:00", required = true)
private String startTime;
@ApiModelProperty(example = "2022-01-01 12:00:00", required = true)
private String endTime;
@ApiModelProperty(example = "Asia/Shanghai", required = true)
private String timezoneId;
@ApiModelProperty(allowableValues = "CONTINUE / END", example = "CONTINUE", notes = "default CONTINUE if value not provide.")
private String failureStrategy;
@ApiModelProperty(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE", notes = "default OFFLINE if value not provide.")
private String releaseState;
@ApiModelProperty(allowableValues = "NONE / SUCCESS / FAILURE / ALL", example = "SUCCESS", notes = "default NONE if value not provide.")
private String warningType;
@ApiModelProperty(example = "2", notes = "default 0 if value not provide.")
private int warningGroupId;
@ApiModelProperty(allowableValues = "HIGHEST / HIGH / MEDIUM / LOW / LOWEST", example = "MEDIUM", notes = "default MEDIUM if value not provide.")
private String processInstancePriority;
@ApiModelProperty(example = "worker-group-name")
private String workerGroup;
@ApiModelProperty(example = "environment-code")
private long environmentCode;
public String getScheduleParam() {
Gson gson = new GsonBuilder().serializeNulls().create();
ScheduleParam scheduleParam = new ScheduleParam(this.startTime, this.endTime, this.crontab, this.timezoneId);
return gson.toJson(scheduleParam);
}
public Schedule convert2Schedule() {
Schedule schedule = new Schedule();
schedule.setProcessDefinitionCode(this.processDefinitionCode);
schedule.setCrontab(this.crontab);
schedule.setStartTime(stringToDate(this.startTime));
schedule.setEndTime(stringToDate(this.endTime));
schedule.setTimezoneId(this.timezoneId);
schedule.setWarningGroupId(this.warningGroupId);
schedule.setWorkerGroup(this.workerGroup);
schedule.setEnvironmentCode(this.environmentCode);
FailureStrategy newFailureStrategy =
this.failureStrategy == null ? FailureStrategy.CONTINUE : FailureStrategy.valueOf(this.failureStrategy);
schedule.setFailureStrategy(newFailureStrategy);
ReleaseState newReleaseState =
this.releaseState == null ? ReleaseState.OFFLINE : ReleaseState.valueOf(this.releaseState);
schedule.setReleaseState(newReleaseState);
WarningType newWarningType =
this.warningType == null ? WarningType.NONE : WarningType.valueOf(this.warningType);
schedule.setWarningType(newWarningType);
Priority newPriority =
this.processInstancePriority == null ? Priority.MEDIUM : Priority.valueOf(this.processInstancePriority);
schedule.setProcessInstancePriority(newPriority);
Date date = new Date();
schedule.setCreateTime(date);
schedule.setUpdateTime(date);
return schedule;
}
}

63
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleFilterRequest.java

@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.schedule;
import org.apache.dolphinscheduler.api.dto.PageQueryDto;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import lombok.Data;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* schedule query request
*/
@ApiModel("SCHEDULE-QUERY")
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
@Data
public class ScheduleFilterRequest extends PageQueryDto {
@ApiModelProperty(example = "project-name")
private String projectName;
@ApiModelProperty(example = "process-definition-name")
private String processDefinitionName;
@ApiModelProperty(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE", notes = "default OFFLINE if value not provide.")
private String releaseState;
public Schedule convert2Schedule() {
Schedule schedule = new Schedule();
if (this.projectName != null) {
schedule.setProjectName(this.projectName);
}
if (this.processDefinitionName != null) {
schedule.setProcessDefinitionName(this.processDefinitionName);
}
if (this.releaseState != null) {
schedule.setReleaseState(ReleaseState.valueOf(this.releaseState));
}
return schedule;
}
}

38
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleParam.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.schedule;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class ScheduleParam {
private String startTime;
private String endTime;
private String crontab;
private String timezoneId;
public ScheduleParam(String startTime, String endTime, String crontab, String timezoneId) {
this.startTime = startTime;
this.endTime = endTime;
this.crontab = crontab;
this.timezoneId = timezoneId;
}
}

138
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleUpdateRequest.java

@ -0,0 +1,138 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.schedule;
import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS;
import static org.apache.dolphinscheduler.common.utils.DateUtils.format;
import static org.apache.dolphinscheduler.common.utils.DateUtils.stringToDate;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.commons.beanutils.BeanUtils;
import java.lang.reflect.InvocationTargetException;
import java.util.Date;
import lombok.Data;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.swagger.annotations.ApiModelProperty;
/**
* schedule update request
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
@Data
public class ScheduleUpdateRequest {
@ApiModelProperty(example = "schedule timezone", required = true)
private String crontab;
@ApiModelProperty(example = "2021-01-01 10:00:00", required = true)
private String startTime;
@ApiModelProperty(example = "2022-01-01 12:00:00", required = true)
private String endTime;
@ApiModelProperty(example = "Asia/Shanghai", required = true)
private String timezoneId;
@ApiModelProperty(allowableValues = "CONTINUE / END", example = "CONTINUE", notes = "default CONTINUE if value not provide.")
private String failureStrategy;
@ApiModelProperty(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE", notes = "default OFFLINE if value not provide.")
private String releaseState;
@ApiModelProperty(allowableValues = "NONE / SUCCESS / FAILURE / ALL", example = "SUCCESS", notes = "default NONE if value not provide.")
private String warningType;
@ApiModelProperty(example = "2", notes = "default 0 if value not provide.")
private int warningGroupId;
@ApiModelProperty(allowableValues = "HIGHEST / HIGH / MEDIUM / LOW / LOWEST", example = "MEDIUM", notes = "default MEDIUM if value not provide.")
private String processInstancePriority;
@ApiModelProperty(example = "worker-group-name")
private String workerGroup;
@ApiModelProperty(example = "environment-code")
private long environmentCode;
public String updateScheduleParam(Schedule schedule) throws InvocationTargetException, IllegalAccessException, InstantiationException, NoSuchMethodException {
Schedule scheduleUpdate = this.mergeIntoSchedule(schedule);
String startTimeUpdate = scheduleUpdate.getStartTime() == null ? null
: format(scheduleUpdate.getStartTime(), YYYY_MM_DD_HH_MM_SS, schedule.getTimezoneId());
String endTimeUpdate = scheduleUpdate.getEndTime() == null ? null
: format(scheduleUpdate.getEndTime(), YYYY_MM_DD_HH_MM_SS, schedule.getTimezoneId());
ScheduleParam scheduleParam = new ScheduleParam(startTimeUpdate, endTimeUpdate, scheduleUpdate.getCrontab(),
scheduleUpdate.getTimezoneId());
Gson gson = new GsonBuilder().serializeNulls().create();
return gson.toJson(scheduleParam);
}
public Schedule mergeIntoSchedule(Schedule schedule) throws InvocationTargetException, IllegalAccessException, InstantiationException, NoSuchMethodException {
Schedule scheduleDeepCopy = (Schedule) BeanUtils.cloneBean(schedule);;
assert scheduleDeepCopy != null;
if (this.crontab != null) {
scheduleDeepCopy.setCrontab(this.crontab);
}
if (this.startTime != null) {
scheduleDeepCopy.setStartTime(stringToDate(this.startTime));
}
if (this.endTime != null) {
scheduleDeepCopy.setEndTime(stringToDate(this.endTime));
}
if (this.timezoneId != null) {
scheduleDeepCopy.setTimezoneId(this.timezoneId);
}
if (this.failureStrategy != null) {
scheduleDeepCopy.setFailureStrategy(FailureStrategy.valueOf(this.failureStrategy));
}
if (this.releaseState != null) {
scheduleDeepCopy.setReleaseState(ReleaseState.valueOf(this.releaseState));
}
if (this.warningType != null) {
scheduleDeepCopy.setWarningType(WarningType.valueOf(this.warningType));
}
if (this.warningGroupId != 0) {
scheduleDeepCopy.setWarningGroupId(this.warningGroupId);
}
if (this.processInstancePriority != null) {
scheduleDeepCopy.setProcessInstancePriority(Priority.valueOf(this.processInstancePriority));
}
if (this.workerGroup != null) {
scheduleDeepCopy.setWorkerGroup(this.workerGroup);
}
if (this.environmentCode != 0L) {
scheduleDeepCopy.setEnvironmentCode(this.environmentCode);
}
scheduleDeepCopy.setUpdateTime(new Date());
return scheduleDeepCopy;
}
}

89
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowCreateRequest.java

@ -0,0 +1,89 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.workflow;
import static org.apache.dolphinscheduler.common.Constants.VERSION_FIRST;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import java.util.Date;
import lombok.Data;
import io.swagger.annotations.ApiModelProperty;
/**
* workflow create request
*/
@Data
public class WorkflowCreateRequest {
@ApiModelProperty(example = "workflow name", required = true)
private String name;
@ApiModelProperty(example = "workflow's description")
private String description;
@ApiModelProperty(example = "12345", required = true)
private long projectCode;
@ApiModelProperty(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE", notes = "default OFFLINE if not provide.")
private String releaseState;
@ApiModelProperty(example = "[{\"prop\":\"key\",\"value\":\"value\",\"direct\":\"IN\",\"type\":\"VARCHAR\"}]")
private String globalParams;
@ApiModelProperty(example = "2")
private int warningGroupId;
@ApiModelProperty(example = "60")
private int timeout;
@ApiModelProperty(example = "tenant1", required = true)
private String tenantCode;
@ApiModelProperty(allowableValues = "PARALLEL / SERIAL_WAIT / SERIAL_DISCARD / SERIAL_PRIORITY", example = "PARALLEL", notes = "default PARALLEL if not provide.")
private String executionType;
public ProcessDefinition convert2ProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setName(this.name);
processDefinition.setDescription(this.description);
processDefinition.setProjectCode(this.projectCode);
processDefinition.setGlobalParams(this.globalParams);
processDefinition.setWarningGroupId(this.warningGroupId);
processDefinition.setTimeout(this.timeout);
processDefinition.setTenantCode(this.tenantCode);
ReleaseState pdReleaseState =
this.releaseState == null ? ReleaseState.OFFLINE : ReleaseState.valueOf(this.releaseState);
processDefinition.setReleaseState(pdReleaseState);
ProcessExecutionTypeEnum processExecutionTypeEnum =
this.executionType == null ? ProcessExecutionTypeEnum.PARALLEL
: ProcessExecutionTypeEnum.valueOf(this.executionType);
processDefinition.setExecutionType(processExecutionTypeEnum);
processDefinition.setVersion(VERSION_FIRST);
Date date = new Date();
processDefinition.setCreateTime(date);
processDefinition.setUpdateTime(date);
return processDefinition;
}
}

63
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowFilterRequest.java

@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.workflow;
import org.apache.dolphinscheduler.api.dto.PageQueryDto;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import lombok.Data;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* workflow query response
*/
@ApiModel("WORKFLOW-QUERY")
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
@Data
public class WorkflowFilterRequest extends PageQueryDto {
@ApiModelProperty(example = "project-name")
private String projectName;
@ApiModelProperty(example = "workflow-name")
private String workflowName;
@ApiModelProperty(example = "ONLINE / OFFLINE")
private String releaseState;
@ApiModelProperty(example = "ONLINE / OFFLINE")
private String scheduleReleaseState;
public ProcessDefinition convert2ProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
if (this.workflowName != null) {
processDefinition.setName(this.workflowName);
}
if (this.releaseState != null) {
processDefinition.setReleaseState(ReleaseState.valueOf(this.releaseState));
}
return processDefinition;
}
}

99
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowUpdateRequest.java

@ -0,0 +1,99 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto.workflow;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import java.util.Date;
import lombok.Data;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.swagger.annotations.ApiModelProperty;
/**
* workflow update request
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
@Data
public class WorkflowUpdateRequest {
@ApiModelProperty(example = "workflow's name")
private String name;
@ApiModelProperty(example = "workflow's description")
private String description;
@ApiModelProperty(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE")
private String releaseState;
@ApiModelProperty(example = "[{\"prop\":\"key\",\"value\":\"value\",\"direct\":\"IN\",\"type\":\"VARCHAR\"}]")
private String globalParams;
@ApiModelProperty(example = "2")
private int warningGroupId;
@ApiModelProperty(example = "60")
private int timeout;
@ApiModelProperty(example = "tenantCode1")
private String tenantCode;
@ApiModelProperty(allowableValues = "PARALLEL / SERIAL_WAIT / SERIAL_DISCARD / SERIAL_PRIORITY", example = "PARALLEL", notes = "default PARALLEL if not provide.")
private String executionType;
public ProcessDefinition mergeIntoProcessDefinition(ProcessDefinition processDefinition) {
ProcessDefinition processDefinitionDeepCopy =
JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class);
assert processDefinitionDeepCopy != null;
if (this.name != null) {
processDefinitionDeepCopy.setName(this.name);
}
if (this.description != null) {
processDefinitionDeepCopy.setDescription(this.description);
}
if (this.releaseState != null) {
processDefinitionDeepCopy.setReleaseState(ReleaseState.valueOf(this.releaseState));
}
if (this.globalParams != null) {
processDefinitionDeepCopy.setGlobalParams(this.globalParams);
}
if (this.warningGroupId != 0) {
processDefinitionDeepCopy.setWarningGroupId(this.warningGroupId);
}
if (this.timeout != 0) {
processDefinitionDeepCopy.setTimeout(this.timeout);
}
if (this.tenantCode != null) {
processDefinitionDeepCopy.setTenantCode(this.tenantCode);
}
if (this.executionType != null) {
processDefinitionDeepCopy.setExecutionType(ProcessExecutionTypeEnum.valueOf(this.executionType));
}
int version = processDefinitionDeepCopy.getVersion() + 1;
processDefinitionDeepCopy.setVersion(version);
processDefinitionDeepCopy.setUpdateTime(new Date());
return processDefinitionDeepCopy;
}
}

157
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -17,11 +17,11 @@
package org.apache.dolphinscheduler.api.enums; package org.apache.dolphinscheduler.api.enums;
import org.springframework.context.i18n.LocaleContextHolder;
import java.util.Locale; import java.util.Locale;
import java.util.Optional; import java.util.Optional;
import org.springframework.context.i18n.LocaleContextHolder;
/** /**
* status enum // todo #4855 One category one interval * status enum // todo #4855 One category one interval
*/ */
@ -140,16 +140,20 @@ public enum Status {
QUERY_DETAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query detail of process definition error", "查询工作流详细信息错误"), QUERY_DETAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query detail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"), QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"), ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error",
"查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"), QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"), UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"), QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"), DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"), QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"), "query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,
"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"), QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"), ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error",
"查询工作流实例甘特图数据错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"), QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123, "sign out error", "退出错误"), SIGN_OUT_ERROR(10123, "sign out error", "退出错误"),
OS_TENANT_CODE_HAS_ALREADY_EXISTS(10124, "os tenant code has already exists", "操作系统租户已存在"), OS_TENANT_CODE_HAS_ALREADY_EXISTS(10124, "os tenant code has already exists", "操作系统租户已存在"),
@ -165,25 +169,37 @@ public enum Status {
NAME_NULL(10134, "name must be not null", "名称不能为空"), NAME_NULL(10134, "name must be not null", "名称不能为空"),
NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"), NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"),
SAVE_ERROR(10136, "save error", "保存错误"), SAVE_ERROR(10136, "save error", "保存错误"),
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"), DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!",
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误: {0}"), "请先删除全部工作流定义"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error",
"批量删除工作流实例错误: {0}"),
PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"), PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"), PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"), SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), DELETE_TENANT_BY_ID_FAIL(10142,
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), "delete tenant by id fail, for there are {0} process instances in executing using it",
DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it",
"删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it",
"删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145,
"delete worker group by id fail, for there are {0} process instances in executing using it",
"删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"), QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"), DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"),
USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"), USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"),
COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"), COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}",
MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"), "从{0}复制工作流到{1}错误 : {2}"),
MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}",
"从{0}移动工作流到{1}错误 : {2}"),
SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"), SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152 SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152,
, "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"), "Switch process definition version error: not exists process definition, [process definition id {0}]",
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153 "切换工作流版本出错:工作流不存在,[工作流id {0}]"),
, "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"), SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153,
"Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]",
"切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"),
QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"), QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"),
DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"), DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"),
@ -192,11 +208,17 @@ public enum Status {
BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"), BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"),
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"), BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"),
QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"), QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"),
QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR(10162, "query authorized and user created project error error", "查询授权的和用户创建的项目错误"), QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR(10162, "query authorized and user created project error error",
DELETE_PROCESS_DEFINITION_EXECUTING_FAIL(10163, "delete process definition by code fail, for there are {0} process instances in executing using it", "删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"), "查询授权的和用户创建的项目错误"),
CHECK_OS_TENANT_CODE_ERROR(10164, "Tenant code invalid, should follow linux's users naming conventions", "非法的租户名,需要遵守 Linux 用户命名规范"), DELETE_PROCESS_DEFINITION_EXECUTING_FAIL(10163,
"delete process definition by code fail, for there are {0} process instances in executing using it",
"删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"),
CHECK_OS_TENANT_CODE_ERROR(10164, "Tenant code invalid, should follow linux's users naming conventions",
"非法的租户名,需要遵守 Linux 用户命名规范"),
FORCE_TASK_SUCCESS_ERROR(10165, "force task success error", "强制成功任务实例错误"), FORCE_TASK_SUCCESS_ERROR(10165, "force task success error", "强制成功任务实例错误"),
TASK_INSTANCE_STATE_OPERATION_ERROR(10166, "the status of task instance {0} is {1},Cannot perform force success operation", "任务实例[{0}]的状态是[{1}],无法执行强制成功操作"), TASK_INSTANCE_STATE_OPERATION_ERROR(10166,
"the status of task instance {0} is {1},Cannot perform force success operation",
"任务实例[{0}]的状态是[{1}],无法执行强制成功操作"),
DATASOURCE_TYPE_NOT_EXIST(10167, "data source type not exist", "数据源类型不存在"), DATASOURCE_TYPE_NOT_EXIST(10167, "data source type not exist", "数据源类型不存在"),
PROCESS_DEFINITION_NAME_EXIST(10168, "process definition name {0} already exists", "工作流定义名称[{0}]已存在"), PROCESS_DEFINITION_NAME_EXIST(10168, "process definition name {0} already exists", "工作流定义名称[{0}]已存在"),
DATASOURCE_DB_TYPE_ILLEGAL(10169, "datasource type illegal", "数据源类型参数不合法"), DATASOURCE_DB_TYPE_ILLEGAL(10169, "datasource type illegal", "数据源类型参数不合法"),
@ -211,20 +233,28 @@ public enum Status {
QUERY_WORKER_ADDRESS_LIST_FAIL(10178, "query worker address list fail ", "查询worker地址列表失败"), QUERY_WORKER_ADDRESS_LIST_FAIL(10178, "query worker address list fail ", "查询worker地址列表失败"),
TRANSFORM_PROJECT_OWNERSHIP(10179, "Please transform project ownership [{0}]", "请先转移项目所有权[{0}]"), TRANSFORM_PROJECT_OWNERSHIP(10179, "Please transform project ownership [{0}]", "请先转移项目所有权[{0}]"),
QUERY_ALERT_GROUP_ERROR(10180, "query alert group error", "查询告警组错误"), QUERY_ALERT_GROUP_ERROR(10180, "query alert group error", "查询告警组错误"),
CURRENT_LOGIN_USER_TENANT_NOT_EXIST(10181, "the tenant of the currently login user is not specified", "未指定当前登录用户的租户"), CURRENT_LOGIN_USER_TENANT_NOT_EXIST(10181, "the tenant of the currently login user is not specified",
"未指定当前登录用户的租户"),
REVOKE_PROJECT_ERROR(10182, "revoke project error", "撤销项目授权错误"), REVOKE_PROJECT_ERROR(10182, "revoke project error", "撤销项目授权错误"),
QUERY_AUTHORIZED_USER(10183, "query authorized user error", "查询拥有项目权限的用户错误"), QUERY_AUTHORIZED_USER(10183, "query authorized user error", "查询拥有项目权限的用户错误"),
PROJECT_NOT_EXIST(10190, "This project was not found. Please refresh page.", "该项目不存在,请刷新页面"), PROJECT_NOT_EXIST(10190, "This project was not found. Please refresh page.", "该项目不存在,请刷新页面"),
TASK_INSTANCE_HOST_IS_NULL(10191, "task instance host is null", "任务实例host为空"), TASK_INSTANCE_HOST_IS_NULL(10191, "task instance host is null", "任务实例host为空"),
QUERY_EXECUTING_WORKFLOW_ERROR(10192, "query executing workflow error", "查询运行的工作流实例错误"), QUERY_EXECUTING_WORKFLOW_ERROR(10192, "query executing workflow error", "查询运行的工作流实例错误"),
DELETE_PROCESS_DEFINITION_USE_BY_OTHER_FAIL(10193, "delete process definition fail, cause used by other tasks: {0}", "删除工作流定时失败,被其他任务引用:{0}"), DELETE_PROCESS_DEFINITION_USE_BY_OTHER_FAIL(10193, "delete process definition fail, cause used by other tasks: {0}",
DELETE_TASK_USE_BY_OTHER_FAIL(10194, "delete task {0} fail, cause used by other tasks: {1}", "删除任务 {0} 失败,被其他任务引用:{1}"), "删除工作流定时失败,被其他任务引用:{0}"),
DELETE_TASK_USE_BY_OTHER_FAIL(10194, "delete task {0} fail, cause used by other tasks: {1}",
"删除任务 {0} 失败,被其他任务引用:{1}"),
TASK_WITH_DEPENDENT_ERROR(10195, "task used in other tasks", "删除被其他任务引用"), TASK_WITH_DEPENDENT_ERROR(10195, "task used in other tasks", "删除被其他任务引用"),
TASK_SAVEPOINT_ERROR(10196, "task savepoint error", "任务实例savepoint错误"), TASK_SAVEPOINT_ERROR(10196, "task savepoint error", "任务实例savepoint错误"),
TASK_STOP_ERROR(10197, "task stop error", "任务实例停止错误"), TASK_STOP_ERROR(10197, "task stop error", "任务实例停止错误"),
LIST_TASK_TYPE_ERROR(10200, "list task type error", "查询任务类型列表错误"), LIST_TASK_TYPE_ERROR(10200, "list task type error", "查询任务类型列表错误"),
DELETE_TASK_TYPE_ERROR(10200, "delete task type error", "删除任务类型错误"), DELETE_TASK_TYPE_ERROR(10200, "delete task type error", "删除任务类型错误"),
ADD_TASK_TYPE_ERROR(10200, "add task type error", "添加任务类型错误"), ADD_TASK_TYPE_ERROR(10200, "add task type error", "添加任务类型错误"),
CREATE_PROCESS_DEFINITION_LOG_ERROR(10201, "Create process definition log error", "创建 process definition log 对象失败"),
PARSE_SCHEDULE_PARAM_ERROR(10202, "Parse schedule parameter error, {0}", "解析 schedule 参数错误, {0}"),
SCHEDULE_NOT_EXISTS(10023, "schedule {0} does not exist", "调度 id {0} 不存在"),
SCHEDULE_ALREADY_EXISTS(10024, "workflow {0} schedule {1} already exist, please update or delete it",
"工作流 {0} 的定时 {1} 已经存在,请更新或删除"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
@ -235,30 +265,39 @@ public enum Status {
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"), RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"),
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"), UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"),
HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!",
"资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists !", "资源文件[{0}]不存在"), RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists !", "资源文件[{0}]不存在"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"), UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"), RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"), PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"), RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016,
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"), "resource not exist or no permission,please view the task node and remove error resource",
"请检查任务节点并移除无权限或者已删除的资源"),
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified",
"资源文件已授权其他用户[{0}],后缀不允许修改"),
RESOURCE_HAS_FOLDER(20018, "There are files or folders in the current directory:{0}", "当前目录下有文件或文件夹[{0}]"), RESOURCE_HAS_FOLDER(20018, "There are files or folders in the current directory:{0}", "当前目录下有文件或文件夹[{0}]"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"), USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"), USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"),
PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist", "工作流实例[{0}]不存在"), PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist", "工作流实例[{0}]不存在"),
PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists", "工作流实例[{0}]已存在"), PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists", "工作流实例[{0}]已存在"),
PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist", "工作流定义[{0}]不存在"), PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist", "工作流定义[{0}]不存在"),
PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} process version {1} not online", "工作流定义[{0}] 工作流版本[{1}]不是上线状态"), PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} process version {1} not online",
"工作流定义[{0}] 工作流版本[{1}]不是上线状态"),
SUB_PROCESS_DEFINE_NOT_RELEASE(50004, "exist sub process definition not online", "存在子工作流定义不是上线状态"), SUB_PROCESS_DEFINE_NOT_RELEASE(50004, "exist sub process definition not online", "存在子工作流定义不是上线状态"),
PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}", "工作流实例[{0}]的状态已经是[{1}]"), PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}",
PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation", "工作流实例[{0}]的状态是[{1}],无法执行[{2}]操作"), "工作流实例[{0}]的状态已经是[{1}]"),
PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006,
"the status of process instance {0} is {1},Cannot perform {2} operation",
"工作流实例[{0}]的状态是[{1}],无法执行[{2}]操作"),
SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist", "子工作流实例不存在"), SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist", "子工作流实例不存在"),
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"), PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"),
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."), PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...",
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"), "工作流实例[{0}]正在执行命令,请稍等..."),
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance",
"工作流实例[{0}]不是子工作流实例"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"), TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"), COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"), COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"),
@ -267,26 +306,30 @@ public enum Status {
PROCESS_INSTANCE_ERROR(50014, "process instance delete error: {0}", "工作流实例删除[{0}]错误"), PROCESS_INSTANCE_ERROR(50014, "process instance delete error: {0}", "工作流实例删除[{0}]错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"), EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "工作流定义错误"), CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "工作流定义错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"), QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,
"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"), DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"), DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"), PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"), PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition [{0}] is already online", "工作流定义[{0}]已上线"), PROCESS_DEFINE_STATE_ONLINE(50021, "process definition [{0}] is already online", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_CODE_ERROR(50022, "delete process definition by code error", "删除工作流定义错误"), DELETE_PROCESS_DEFINE_BY_CODE_ERROR(50022, "delete process definition by code error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023, "the status of schedule {0} is already online", "调度配置[{0}]已上线"), SCHEDULE_STATE_ONLINE(50023, "the status of schedule {0} is already online", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"), DELETE_SCHEDULE_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"), BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_CODES_ERROR(50026, "batch delete process definition by codes {0} error", "批量删除工作流定义[{0}]错误"), BATCH_DELETE_PROCESS_DEFINE_BY_CODES_ERROR(50026, "batch delete process definition by codes {0} error",
"批量删除工作流定义[{0}]错误"),
DELETE_PROCESS_DEFINE_BY_CODES_ERROR(50026, "delete process definition by codes {0} error", "删除工作流定义[{0}]错误"), DELETE_PROCESS_DEFINE_BY_CODES_ERROR(50026, "delete process definition by codes {0} error", "删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"), TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.",
"没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"), EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"), BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"), IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"),
TASK_DEFINE_NOT_EXIST(50030, "task definition [{0}] does not exist", "任务定义[{0}]不存在"), TASK_DEFINE_NOT_EXIST(50030, "task definition [{0}] does not exist", "任务定义[{0}]不存在"),
CREATE_PROCESS_TASK_RELATION_ERROR(50032, "create process task relation error", "创建工作流任务关系错误"), CREATE_PROCESS_TASK_RELATION_ERROR(50032, "create process task relation error", "创建工作流任务关系错误"),
PROCESS_TASK_RELATION_NOT_EXIST(50033, "process task relation [{0}] does not exist", "工作流任务关系[{0}]不存在"), PROCESS_TASK_RELATION_NOT_EXIST(50033, "process task relation [{0}] does not exist", "工作流任务关系[{0}]不存在"),
PROCESS_TASK_RELATION_EXIST(50034, "process task relation is already exist, processCode:[{0}]", "工作流任务关系已存在, processCode:[{0}]"), PROCESS_TASK_RELATION_EXIST(50034, "process task relation is already exist, processCode:[{0}]",
"工作流任务关系已存在, processCode:[{0}]"),
PROCESS_DAG_IS_EMPTY(50035, "process dag is empty", "工作流dag是空"), PROCESS_DAG_IS_EMPTY(50035, "process dag is empty", "工作流dag是空"),
CHECK_PROCESS_TASK_RELATION_ERROR(50036, "check process task relation error", "工作流任务关系参数错误"), CHECK_PROCESS_TASK_RELATION_ERROR(50036, "check process task relation error", "工作流任务关系参数错误"),
CREATE_TASK_DEFINITION_ERROR(50037, "create task definition error", "创建任务错误"), CREATE_TASK_DEFINITION_ERROR(50037, "create task definition error", "创建任务错误"),
@ -329,7 +372,6 @@ public enum Status {
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"), ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"),
QUERY_ACCESSTOKEN_BY_USER_ERROR(70016, "query access token by user error", "查询访问指定用户的token错误"), QUERY_ACCESSTOKEN_BY_USER_ERROR(70016, "query access token by user error", "查询访问指定用户的token错误"),
COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"), COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"),
NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"), NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"),
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"), START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"),
@ -340,9 +382,11 @@ public enum Status {
// audit log // audit log
QUERY_AUDIT_LOG_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"), QUERY_AUDIT_LOG_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"),
//plugin // plugin
PLUGIN_NOT_A_UI_COMPONENT(110001, "query plugin error, this plugin has no UI component", "查询插件错误,此插件无UI组件"), PLUGIN_NOT_A_UI_COMPONENT(110001, "query plugin error, this plugin has no UI component", "查询插件错误,此插件无UI组件"),
QUERY_PLUGINS_RESULT_IS_NULL(110002, "query alarm plugins result is empty, please check the startup status of the alarm component and confirm that the relevant alarm plugin is successfully registered", "查询告警插件为空, 请检查告警组件启动状态并确认相关告警插件已注册成功"), QUERY_PLUGINS_RESULT_IS_NULL(110002,
"query alarm plugins result is empty, please check the startup status of the alarm component and confirm that the relevant alarm plugin is successfully registered",
"查询告警插件为空, 请检查告警组件启动状态并确认相关告警插件已注册成功"),
QUERY_PLUGINS_ERROR(110003, "query plugins error", "查询插件错误"), QUERY_PLUGINS_ERROR(110003, "query plugins error", "查询插件错误"),
QUERY_PLUGIN_DETAIL_RESULT_IS_NULL(110004, "query plugin detail result is null", "查询插件详情结果为空"), QUERY_PLUGIN_DETAIL_RESULT_IS_NULL(110004, "query plugin detail result is null", "查询插件详情结果为空"),
@ -353,7 +397,8 @@ public enum Status {
QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR(110009, "query all alert plugin instance error", "查询所有告警实例失败"), QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR(110009, "query all alert plugin instance error", "查询所有告警实例失败"),
PLUGIN_INSTANCE_ALREADY_EXIT(110010, "plugin instance already exit", "该告警插件实例已存在"), PLUGIN_INSTANCE_ALREADY_EXIT(110010, "plugin instance already exit", "该告警插件实例已存在"),
LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR(110011, "query plugin instance page error", "分页查询告警实例失败"), LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR(110011, "query plugin instance page error", "分页查询告警实例失败"),
DELETE_ALERT_PLUGIN_INSTANCE_ERROR_HAS_ALERT_GROUP_ASSOCIATED(110012, "failed to delete the alert instance, there is an alarm group associated with this alert instance", DELETE_ALERT_PLUGIN_INSTANCE_ERROR_HAS_ALERT_GROUP_ASSOCIATED(110012,
"failed to delete the alert instance, there is an alarm group associated with this alert instance",
"删除告警实例失败,存在与此告警实例关联的警报组"), "删除告警实例失败,存在与此告警实例关联的警报组"),
PROCESS_DEFINITION_VERSION_IS_USED(110013, "this process definition version is used", "此工作流定义版本被使用"), PROCESS_DEFINITION_VERSION_IS_USED(110013, "this process definition version is used", "此工作流定义版本被使用"),
@ -363,9 +408,10 @@ public enum Status {
ENVIRONMENT_CONFIG_IS_NULL(120004, "this environment config shouldn't be empty.", "环境配置信息不能为空"), ENVIRONMENT_CONFIG_IS_NULL(120004, "this environment config shouldn't be empty.", "环境配置信息不能为空"),
UPDATE_ENVIRONMENT_ERROR(120005, "update environment [{0}] info error", "更新环境[{0}]信息失败"), UPDATE_ENVIRONMENT_ERROR(120005, "update environment [{0}] info error", "更新环境[{0}]信息失败"),
DELETE_ENVIRONMENT_ERROR(120006, "delete environment error", "删除环境信息失败"), DELETE_ENVIRONMENT_ERROR(120006, "delete environment error", "删除环境信息失败"),
DELETE_ENVIRONMENT_RELATED_TASK_EXISTS(120007, "this environment has been used in tasks,so you can't delete it.", "该环境已经被任务使用,所以不能删除该环境信息"), DELETE_ENVIRONMENT_RELATED_TASK_EXISTS(120007, "this environment has been used in tasks,so you can't delete it.",
QUERY_ENVIRONMENT_BY_NAME_ERROR(1200008, "not found environment [{0}] ", "查询环境名称[{0}]信息不存在"), "该环境已经被任务使用,所以不能删除该环境信息"),
QUERY_ENVIRONMENT_BY_CODE_ERROR(1200009, "not found environment [{0}] ", "查询环境编码[{0}]不存在"), QUERY_ENVIRONMENT_BY_NAME_ERROR(1200008, "not found environment name [{0}] ", "查询环境名称[{0}]不存在"),
QUERY_ENVIRONMENT_BY_CODE_ERROR(1200009, "not found environment code [{0}] ", "查询环境编码[{0}]不存在"),
QUERY_ENVIRONMENT_ERROR(1200010, "login user query environment error", "分页查询环境列表错误"), QUERY_ENVIRONMENT_ERROR(1200010, "login user query environment error", "分页查询环境列表错误"),
VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"), VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"),
GET_RULE_FORM_CREATE_JSON_ERROR(1200012, "get rule form create json error", "获取规则 FROM-CREATE-JSON 错误"), GET_RULE_FORM_CREATE_JSON_ERROR(1200012, "get rule form create json error", "获取规则 FROM-CREATE-JSON 错误"),
@ -383,16 +429,19 @@ public enum Status {
CLUSTER_CONFIG_IS_NULL(120023, "this cluster config shouldn't be empty.", "集群配置信息不能为空"), CLUSTER_CONFIG_IS_NULL(120023, "this cluster config shouldn't be empty.", "集群配置信息不能为空"),
UPDATE_CLUSTER_ERROR(120024, "update cluster [{0}] info error", "更新集群[{0}]信息失败"), UPDATE_CLUSTER_ERROR(120024, "update cluster [{0}] info error", "更新集群[{0}]信息失败"),
DELETE_CLUSTER_ERROR(120025, "delete cluster error", "删除集群信息失败"), DELETE_CLUSTER_ERROR(120025, "delete cluster error", "删除集群信息失败"),
DELETE_CLUSTER_RELATED_TASK_EXISTS(120026, "this cluster has been used in tasks,so you can't delete it.", "该集群已经被任务使用,所以不能删除该集群信息"), DELETE_CLUSTER_RELATED_TASK_EXISTS(120026, "this cluster has been used in tasks,so you can't delete it.",
"该集群已经被任务使用,所以不能删除该集群信息"),
QUERY_CLUSTER_BY_NAME_ERROR(1200027, "not found cluster [{0}] ", "查询集群名称[{0}]信息不存在"), QUERY_CLUSTER_BY_NAME_ERROR(1200027, "not found cluster [{0}] ", "查询集群名称[{0}]信息不存在"),
QUERY_CLUSTER_BY_CODE_ERROR(1200028, "not found cluster [{0}] ", "查询集群编码[{0}]不存在"), QUERY_CLUSTER_BY_CODE_ERROR(1200028, "not found cluster [{0}] ", "查询集群编码[{0}]不存在"),
QUERY_CLUSTER_ERROR(1200029, "login user query cluster error", "分页查询集群列表错误"), QUERY_CLUSTER_ERROR(1200029, "login user query cluster error", "分页查询集群列表错误"),
VERIFY_CLUSTER_ERROR(1200030, "verify cluster error", "验证集群信息错误"), VERIFY_CLUSTER_ERROR(1200030, "verify cluster error", "验证集群信息错误"),
CLUSTER_PROCESS_DEFINITIONS_IS_INVALID(1200031, "cluster worker groups is invalid format", "集群关联的工作组参数解析错误"), CLUSTER_PROCESS_DEFINITIONS_IS_INVALID(1200031, "cluster worker groups is invalid format", "集群关联的工作组参数解析错误"),
UPDATE_CLUSTER_PROCESS_DEFINITION_RELATION_ERROR(1200032, "You can't modify the process definition, because the process definition [{0}] and this cluster [{1}] already be used in the task [{2}]", UPDATE_CLUSTER_PROCESS_DEFINITION_RELATION_ERROR(1200032,
"您不能修改集群选项,因为该工作流组 [{0}] 和 该集群 [{1}] 已经被用在任务 [{2}] 中"), "You can't modify the process definition, because the process definition [{0}] and this cluster [{1}] already be used in the task [{2}]",
"您不能修改集群选项,因为该工作流组 [{0}] 和 该集群 [{1}] 已经被用在任务 [{2}] 中"),
CLUSTER_NOT_EXISTS(120033, "this cluster can not found in db.", "集群配置数据库里查询不到为空"), CLUSTER_NOT_EXISTS(120033, "this cluster can not found in db.", "集群配置数据库里查询不到为空"),
DELETE_CLUSTER_RELATED_NAMESPACE_EXISTS(120034, "this cluster has been used in namespace,so you can't delete it.", "该集群已经被命名空间使用,所以不能删除该集群信息"), DELETE_CLUSTER_RELATED_NAMESPACE_EXISTS(120034, "this cluster has been used in namespace,so you can't delete it.",
"该集群已经被命名空间使用,所以不能删除该集群信息"),
TASK_GROUP_NAME_EXSIT(130001, "this task group name is repeated in a project", "该任务组名称在一个项目中已经使用"), TASK_GROUP_NAME_EXSIT(130001, "this task group name is repeated in a project", "该任务组名称在一个项目中已经使用"),
TASK_GROUP_SIZE_ERROR(130002, "task group size error", "任务组大小应该为大于1的整数"), TASK_GROUP_SIZE_ERROR(130002, "task group size error", "任务组大小应该为大于1的整数"),
@ -409,7 +458,8 @@ public enum Status {
QUERY_TASK_GROUP_QUEUE_LIST_ERROR(130013, "query task group queue list error", "查询任务组队列列表错误"), QUERY_TASK_GROUP_QUEUE_LIST_ERROR(130013, "query task group queue list error", "查询任务组队列列表错误"),
TASK_GROUP_CACHE_START_FAILED(130014, "cache start failed", "任务组相关的缓存启动失败"), TASK_GROUP_CACHE_START_FAILED(130014, "cache start failed", "任务组相关的缓存启动失败"),
ENVIRONMENT_WORKER_GROUPS_IS_INVALID(130015, "environment worker groups is invalid format", "环境关联的工作组参数解析错误"), ENVIRONMENT_WORKER_GROUPS_IS_INVALID(130015, "environment worker groups is invalid format", "环境关联的工作组参数解析错误"),
UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR(130016, "You can't modify the worker group, because the worker group [{0}] and this environment [{1}] already be used in the task [{2}]", UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR(130016,
"You can't modify the worker group, because the worker group [{0}] and this environment [{1}] already be used in the task [{2}]",
"您不能修改工作组选项,因为该工作组 [{0}] 和 该环境 [{1}] 已经被用在任务 [{2}] 中"), "您不能修改工作组选项,因为该工作组 [{0}] 和 该环境 [{1}] 已经被用在任务 [{2}] 中"),
TASK_GROUP_QUEUE_ALREADY_START(130017, "task group queue already start", "节点已经获取任务组资源"), TASK_GROUP_QUEUE_ALREADY_START(130017, "task group queue already start", "节点已经获取任务组资源"),
TASK_GROUP_STATUS_CLOSED(130018, "The task group has been closed.", "任务组已经被关闭"), TASK_GROUP_STATUS_CLOSED(130018, "The task group has been closed.", "任务组已经被关闭"),
@ -418,7 +468,8 @@ public enum Status {
NOT_ALLOW_TO_DELETE_DEFAULT_ALARM_GROUP(130030, "Not allow to delete the default alarm group ", "不能删除默认告警组"), NOT_ALLOW_TO_DELETE_DEFAULT_ALARM_GROUP(130030, "Not allow to delete the default alarm group ", "不能删除默认告警组"),
TIME_ZONE_ILLEGAL(130031, "time zone [{0}] is illegal", "时区参数 [{0}] 不合法"), TIME_ZONE_ILLEGAL(130031, "time zone [{0}] is illegal", "时区参数 [{0}] 不合法"),
QUERY_K8S_NAMESPACE_LIST_PAGING_ERROR(1300001, "login user query k8s namespace list paging error", "分页查询k8s名称空间列表错误"), QUERY_K8S_NAMESPACE_LIST_PAGING_ERROR(1300001, "login user query k8s namespace list paging error",
"分页查询k8s名称空间列表错误"),
K8S_NAMESPACE_EXIST(1300002, "k8s namespace {0} already exists", "k8s命名空间[{0}]已存在"), K8S_NAMESPACE_EXIST(1300002, "k8s namespace {0} already exists", "k8s命名空间[{0}]已存在"),
CREATE_K8S_NAMESPACE_ERROR(1300003, "create k8s namespace error", "创建k8s命名空间错误"), CREATE_K8S_NAMESPACE_ERROR(1300003, "create k8s namespace error", "创建k8s命名空间错误"),
UPDATE_K8S_NAMESPACE_ERROR(1300004, "update k8s namespace error", "更新k8s命名空间信息错误"), UPDATE_K8S_NAMESPACE_ERROR(1300004, "update k8s namespace error", "更新k8s命名空间信息错误"),

19
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java

@ -63,7 +63,8 @@ public interface ExecutorService {
FailureStrategy failureStrategy, String startNodeList, FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId, TaskDependType taskDependType, WarningType warningType, int warningGroupId,
RunMode runMode, RunMode runMode,
Priority processInstancePriority, String workerGroup, Long environmentCode, Integer timeout, Priority processInstancePriority, String workerGroup, Long environmentCode,
Integer timeout,
Map<String, String> startParams, Integer expectedParallelismNumber, Map<String, String> startParams, Integer expectedParallelismNumber,
int dryRun, int testFlag, int dryRun, int testFlag,
ComplementDependentMode complementDependentMode); ComplementDependentMode complementDependentMode);
@ -74,10 +75,10 @@ public interface ExecutorService {
* @param projectCode project code * @param projectCode project code
* @param processDefinition process definition * @param processDefinition process definition
* @param processDefineCode process definition code * @param processDefineCode process definition code
* @param verison process definition version * @param version process definition version
* @return check result code
*/ */
Map<String, Object> checkProcessDefinitionValid(long projectCode, ProcessDefinition processDefinition, long processDefineCode, Integer verison); void checkProcessDefinitionValid(long projectCode, ProcessDefinition processDefinition, long processDefineCode,
Integer version);
/** /**
* do action to process instancepause, stop, repeat, recover from pause, recover from stop * do action to process instancepause, stop, repeat, recover from pause, recover from stop
@ -132,9 +133,9 @@ public interface ExecutorService {
* @return execute process instance code * @return execute process instance code
*/ */
Map<String, Object> execStreamTaskInstance(User loginUser, long projectCode, Map<String, Object> execStreamTaskInstance(User loginUser, long projectCode,
long taskDefinitionCode, int taskDefinitionVersion, long taskDefinitionCode, int taskDefinitionVersion,
int warningGroupId, int warningGroupId,
String workerGroup, Long environmentCode, String workerGroup, Long environmentCode,
Map<String, String> startParams, Map<String, String> startParams,
int dryRun); int dryRun);
} }

51
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

@ -17,6 +17,9 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowCreateRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowFilterRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowUpdateRequest;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum; import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
@ -67,6 +70,15 @@ public interface ProcessDefinitionService {
String otherParamsJson, String otherParamsJson,
ProcessExecutionTypeEnum executionType); ProcessExecutionTypeEnum executionType);
/**
* create process definition V2
*
* @param loginUser login user
* @param workflowCreateRequest the new workflow object will be created
* @return New ProcessDefinition object created just now
*/
ProcessDefinition createSingleProcessDefinition(User loginUser, WorkflowCreateRequest workflowCreateRequest);
/** /**
* query process definition list * query process definition list
* *
@ -107,6 +119,16 @@ public interface ProcessDefinitionService {
Integer pageNo, Integer pageNo,
Integer pageSize); Integer pageSize);
/**
* Filter resource process definitions
*
* @param loginUser login user
* @param workflowFilterRequest workflow filter requests
* @return List process definition
*/
PageInfo<ProcessDefinition> filterProcessDefinition(User loginUser,
WorkflowFilterRequest workflowFilterRequest);
/** /**
* query detail of process definition * query detail of process definition
* *
@ -120,6 +142,16 @@ public interface ProcessDefinitionService {
long projectCode, long projectCode,
long code); long code);
/**
* Get resource workflow
*
* @param loginUser login user
* @param code process definition code
* @return Process definition Object
*/
ProcessDefinition getProcessDefinition(User loginUser,
long code);
/** /**
* query detail of process definition * query detail of process definition
* *
@ -208,13 +240,10 @@ public interface ProcessDefinitionService {
* delete process definition by code * delete process definition by code
* *
* @param loginUser login user * @param loginUser login user
* @param projectCode project code
* @param code process definition code * @param code process definition code
* @return delete result code
*/ */
Map<String, Object> deleteProcessDefinitionByCode(User loginUser, void deleteProcessDefinitionByCode(User loginUser,
long projectCode, long code);
long code);
/** /**
* release process definition: online / offline * release process definition: online / offline
@ -430,6 +459,18 @@ public interface ProcessDefinitionService {
String otherParamsJson, String otherParamsJson,
ProcessExecutionTypeEnum executionType); ProcessExecutionTypeEnum executionType);
/**
* update process definition basic info, not including task definition, task relation and location.
*
* @param loginUser login user
* @param workflowCode workflow resource code you want to update
* @param workflowUpdateRequest workflow update requests
* @return ProcessDefinition instance
*/
ProcessDefinition updateSingleProcessDefinition(User loginUser,
long workflowCode,
WorkflowUpdateRequest workflowUpdateRequest);
/** /**
* release process definition and schedule * release process definition and schedule
* *

52
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -17,6 +17,10 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleCreateRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleFilterRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleUpdateRequest;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.Priority;
@ -59,6 +63,16 @@ public interface SchedulerService {
String workerGroup, String workerGroup,
Long environmentCode); Long environmentCode);
/**
* save schedule V2
*
* @param loginUser login user
* @param scheduleCreateRequest the new schedule object will be created
* @return Schedule object
*/
Schedule createSchedulesV2(User loginUser,
ScheduleCreateRequest scheduleCreateRequest);
/** /**
* updateProcessInstance schedule * updateProcessInstance schedule
* *
@ -85,6 +99,28 @@ public interface SchedulerService {
String workerGroup, String workerGroup,
Long environmentCode); Long environmentCode);
/**
* update schedule object V2
*
* @param loginUser login user
* @param scheduleId scheduler id
* @param scheduleUpdateRequest the schedule object will be updated
* @return Schedule object
*/
Schedule updateSchedulesV2(User loginUser,
Integer scheduleId,
ScheduleUpdateRequest scheduleUpdateRequest);
/**
* get schedule object
*
* @param loginUser login user
* @param scheduleId scheduler id
* @return Schedule object
*/
Schedule getSchedule(User loginUser,
Integer scheduleId);
/** /**
* set schedule online or offline * set schedule online or offline
* *
@ -115,6 +151,16 @@ public interface SchedulerService {
List<Schedule> queryScheduleByProcessDefinitionCodes(List<Long> processDefinitionCodes); List<Schedule> queryScheduleByProcessDefinitionCodes(List<Long> processDefinitionCodes);
/**
* query schedule V2
*
* @param loginUser login user
* @param scheduleFilterRequest schedule filter request
* @return schedule list page
*/
PageInfo<Schedule> filterSchedules(User loginUser,
ScheduleFilterRequest scheduleFilterRequest);
/** /**
* query schedule list * query schedule list
* *
@ -137,11 +183,9 @@ public interface SchedulerService {
* delete schedule by id * delete schedule by id
* *
* @param loginUser login user * @param loginUser login user
* @param projectCode project code * @param scheduleId schedule id
* @param scheduleId scheule id
* @return delete result code
*/ */
Map<String, Object> deleteScheduleById(User loginUser, long projectCode, Integer scheduleId); void deleteSchedulesById(User loginUser, Integer scheduleId);
/** /**
* preview schedule * preview schedule

49
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java

@ -195,11 +195,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// check process define release state // check process define release state
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
result = checkProcessDefinitionValid(projectCode, processDefinition, processDefinitionCode, this.checkProcessDefinitionValid(projectCode, processDefinition, processDefinitionCode,
processDefinition.getVersion()); processDefinition.getVersion());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (!checkTenantSuitable(processDefinition)) { if (!checkTenantSuitable(processDefinition)) {
logger.error( logger.error(
@ -291,32 +288,23 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
* @param projectCode project code * @param projectCode project code
* @param processDefinition process definition * @param processDefinition process definition
* @param processDefineCode process definition code * @param processDefineCode process definition code
* @param version process instance verison * @param version process instance version
* @return check result code
*/ */
@Override @Override
public Map<String, Object> checkProcessDefinitionValid(long projectCode, ProcessDefinition processDefinition, public void checkProcessDefinitionValid(long projectCode, ProcessDefinition processDefinition,
long processDefineCode, Integer version) { long processDefineCode, Integer version) {
Map<String, Object> result = new HashMap<>(); // check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
// check process definition exists throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefineCode));
logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, }
processDefineCode); // check process definition online
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefineCode)); if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
} else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { throw new ServiceException(Status.PROCESS_DEFINE_NOT_RELEASE, String.valueOf(processDefineCode), version);
// check process definition online }
logger.warn("Process definition is not {}, processDefinitionCode:{}, version:{}.", // check sub process definition online
ReleaseState.ONLINE.getDescp(), processDefineCode, version); if (!checkSubProcessDefinitionValid(processDefinition)) {
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, String.valueOf(processDefineCode), version); throw new ServiceException(Status.SUB_PROCESS_DEFINE_NOT_RELEASE);
} else if (!checkSubProcessDefinitionValid(processDefinition)) {
// check sub process definition online
logger.warn("Subprocess definition of process definition is not {}, processDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), processDefineCode);
putMsg(result, Status.SUB_PROCESS_DEFINE_NOT_RELEASE);
} else {
result.put(Constants.STATUS, Status.SUCCESS);
} }
return result;
} }
/** /**
@ -392,13 +380,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
processInstance.getProcessDefinitionVersion()); processInstance.getProcessDefinitionVersion());
processDefinition.setReleaseState(ReleaseState.ONLINE); processDefinition.setReleaseState(ReleaseState.ONLINE);
if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) {
result = this.checkProcessDefinitionValid(projectCode, processDefinition, processInstance.getProcessDefinitionCode(),
checkProcessDefinitionValid(projectCode, processDefinition, processInstance.getProcessDefinitionVersion());
processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
} }
result = checkExecuteType(processInstance, executeType); result = checkExecuteType(processInstance, executeType);

262
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -43,6 +43,9 @@ import org.apache.dolphinscheduler.api.dto.DagDataSchedule;
import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.treeview.Instance; import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowCreateRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowFilterRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowUpdateRequest;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
@ -292,6 +295,82 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs, otherParamsJson); return createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs, otherParamsJson);
} }
private void createWorkflowValid(User user, ProcessDefinition processDefinition) {
Project project = projectMapper.queryByCode(processDefinition.getProjectCode());
if (project == null) {
throw new ServiceException(Status.PROJECT_NOT_FOUND, processDefinition.getProjectCode());
}
// check user access for project
projectService.checkProjectAndAuthThrowException(user, project, WORKFLOW_CREATE);
if (checkDescriptionLength(processDefinition.getDescription())) {
throw new ServiceException(Status.DESCRIPTION_TOO_LONG_ERROR);
}
// check whether the new process define name exist
ProcessDefinition definition =
processDefinitionMapper.verifyByDefineName(project.getCode(), processDefinition.getName());
if (definition != null) {
throw new ServiceException(Status.PROCESS_DEFINITION_NAME_EXIST, processDefinition.getName());
}
this.getTenantId(processDefinition);
}
private int getTenantId(ProcessDefinition processDefinition) {
int tenantId = -1;
if (!Constants.DEFAULT.equals(processDefinition.getTenantCode())) {
Tenant tenant = tenantMapper.queryByTenantCode(processDefinition.getTenantCode());
if (tenant == null) {
throw new ServiceException(Status.TENANT_NOT_EXIST);
}
tenantId = tenant.getId();
}
return tenantId;
}
private void syncObj2Log(User user, ProcessDefinition processDefinition) {
ProcessDefinitionLog processDefinitionLog = new ProcessDefinitionLog(processDefinition);
processDefinitionLog.setOperator(user.getId());
int result = processDefinitionLogMapper.insert(processDefinitionLog);
if (result <= 0) {
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_LOG_ERROR);
}
}
/**
* create single process definition
*
* @param loginUser login user
* @param workflowCreateRequest the new workflow object will be created
* @return New ProcessDefinition object created just now
*/
@Override
@Transactional
public ProcessDefinition createSingleProcessDefinition(User loginUser,
WorkflowCreateRequest workflowCreateRequest) {
ProcessDefinition processDefinition = workflowCreateRequest.convert2ProcessDefinition();
this.createWorkflowValid(loginUser, processDefinition);
long processDefinitionCode;
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
processDefinition.setTenantId(this.getTenantId(processDefinition));
processDefinition.setCode(processDefinitionCode);
processDefinition.setUserId(loginUser.getId());
int create = processDefinitionMapper.insert(processDefinition);
if (create <= 0) {
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
this.syncObj2Log(loginUser, processDefinition);
return processDefinition;
}
protected Map<String, Object> createDagDefine(User loginUser, protected Map<String, Object> createDagDefine(User loginUser,
List<ProcessTaskRelationLog> taskRelationList, List<ProcessTaskRelationLog> taskRelationList,
ProcessDefinition processDefinition, ProcessDefinition processDefinition,
@ -515,6 +594,46 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return pageInfo; return pageInfo;
} }
/**
* Filter resource process definitions
*
* @param loginUser login user
* @param workflowFilterRequest workflow filter requests
* @return List process definition
*/
@Override
public PageInfo<ProcessDefinition> filterProcessDefinition(User loginUser,
WorkflowFilterRequest workflowFilterRequest) {
ProcessDefinition processDefinition = workflowFilterRequest.convert2ProcessDefinition();
if (workflowFilterRequest.getProjectName() != null) {
Project project = projectMapper.queryByName(workflowFilterRequest.getProjectName());
// check user access for project
projectService.checkProjectAndAuthThrowException(loginUser, project, WORKFLOW_DEFINITION);
processDefinition.setProjectCode(project.getCode());
}
Page<ProcessDefinition> page =
new Page<>(workflowFilterRequest.getPageNo(), workflowFilterRequest.getPageSize());
IPage<ProcessDefinition> processDefinitionIPage =
processDefinitionMapper.filterProcessDefinition(page, processDefinition);
List<ProcessDefinition> records = processDefinitionIPage.getRecords();
for (ProcessDefinition pd : records) {
ProcessDefinitionLog processDefinitionLog =
processDefinitionLogMapper.queryByDefinitionCodeAndVersion(pd.getCode(), pd.getVersion());
User user = userMapper.selectById(processDefinitionLog.getOperator());
pd.setModifyBy(user.getUserName());
}
processDefinitionIPage.setRecords(records);
PageInfo<ProcessDefinition> pageInfo =
new PageInfo<>(workflowFilterRequest.getPageNo(), workflowFilterRequest.getPageSize());
pageInfo.setTotal((int) processDefinitionIPage.getTotal());
pageInfo.setTotalList(processDefinitionIPage.getRecords());
return pageInfo;
}
/** /**
* query detail of process definition * query detail of process definition
* *
@ -549,6 +668,32 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result; return result;
} }
/**
* query detail of process definition
*
* @param loginUser login user
* @param code process definition code
* @return process definition detail
*/
@Override
public ProcessDefinition getProcessDefinition(User loginUser, long code) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null) {
throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
}
Project project = projectMapper.queryByCode(processDefinition.getProjectCode());
// check user access for project
projectService.checkProjectAndAuthThrowException(loginUser, project, WORKFLOW_DEFINITION);
Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId());
if (tenant != null) {
processDefinition.setTenantCode(tenant.getTenantCode());
}
return processDefinition;
}
@Override @Override
public Map<String, Object> queryProcessDefinitionByName(User loginUser, long projectCode, String name) { public Map<String, Object> queryProcessDefinitionByName(User loginUser, long projectCode, String name) {
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
@ -815,8 +960,6 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
private void processDefinitionUsedInOtherTaskValid(ProcessDefinition processDefinition) { private void processDefinitionUsedInOtherTaskValid(ProcessDefinition processDefinition) {
// check process definition is already online // check process definition is already online
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
logger.warn("Process definition can not be deleted due to {}, processDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), processDefinition.getCode());
throw new ServiceException(Status.PROCESS_DEFINE_STATE_ONLINE, processDefinition.getName()); throw new ServiceException(Status.PROCESS_DEFINE_STATE_ONLINE, processDefinition.getName());
} }
@ -824,9 +967,6 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
List<ProcessInstance> processInstances = processInstanceService List<ProcessInstance> processInstances = processInstanceService
.queryByProcessDefineCodeAndStatus(processDefinition.getCode(), Constants.NOT_TERMINATED_STATES); .queryByProcessDefineCodeAndStatus(processDefinition.getCode(), Constants.NOT_TERMINATED_STATES);
if (CollectionUtils.isNotEmpty(processInstances)) { if (CollectionUtils.isNotEmpty(processInstances)) {
logger.warn(
"Process definition can not be deleted because there are {} executing process instances, processDefinitionCode:{}",
processInstances.size(), processDefinition.getCode());
throw new ServiceException(Status.DELETE_PROCESS_DEFINITION_EXECUTING_FAIL, processInstances.size()); throw new ServiceException(Status.DELETE_PROCESS_DEFINITION_EXECUTING_FAIL, processInstances.size());
} }
@ -838,9 +978,6 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
.map(task -> String.format(Constants.FORMAT_S_S_COLON, task.getProcessDefinitionName(), .map(task -> String.format(Constants.FORMAT_S_S_COLON, task.getProcessDefinitionName(),
task.getTaskName())) task.getTaskName()))
.collect(Collectors.joining(Constants.COMMA)); .collect(Collectors.joining(Constants.COMMA));
logger.warn(
"Process definition can not be deleted due to being referenced by other tasks:{}, processDefinitionCode:{}",
taskDepDetail, processDefinition.getCode());
throw new ServiceException(Status.DELETE_PROCESS_DEFINITION_USE_BY_OTHER_FAIL, taskDepDetail); throw new ServiceException(Status.DELETE_PROCESS_DEFINITION_USE_BY_OTHER_FAIL, taskDepDetail);
} }
} }
@ -849,33 +986,23 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
* delete process definition by code * delete process definition by code
* *
* @param loginUser login user * @param loginUser login user
* @param projectCode project code
* @param code process definition code * @param code process definition code
* @return delete result code
*/ */
@Override @Override
@Transactional @Transactional
public Map<String, Object> deleteProcessDefinitionByCode(User loginUser, long projectCode, long code) { public void deleteProcessDefinitionByCode(User loginUser, long code) {
Project project = projectMapper.queryByCode(projectCode);
// check user access for project
Map<String, Object> result =
projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_DEFINITION_DELETE);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { if (processDefinition == null) {
logger.error("Process definition does not exist, processCode:{}.", code); throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
} }
Project project = projectMapper.queryByCode(processDefinition.getProjectCode());
// check user access for project
projectService.checkProjectAndAuthThrowException(loginUser, project, WORKFLOW_DEFINITION_DELETE);
// Determine if the login user is the owner of the process definition // Determine if the login user is the owner of the process definition
if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
logger.warn("User does not have permission for process definition, userId:{}, processDefinitionCode:{}.", throw new ServiceException(Status.USER_NO_OPERATION_PERM);
loginUser.getId(), code);
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
} }
processDefinitionUsedInOtherTaskValid(processDefinition); processDefinitionUsedInOtherTaskValid(processDefinition);
@ -886,25 +1013,16 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) { if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) {
int delete = scheduleMapper.deleteById(scheduleObj.getId()); int delete = scheduleMapper.deleteById(scheduleObj.getId());
if (delete == 0) { if (delete == 0) {
logger.error( throw new ServiceException(Status.DELETE_SCHEDULE_BY_ID_ERROR);
"Delete schedule of process definition error, processDefinitionCode:{}, scheduleId:{}.",
code, scheduleObj.getId());
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
throw new ServiceException(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
} }
} }
if (scheduleObj.getReleaseState() == ReleaseState.ONLINE) { if (scheduleObj.getReleaseState() == ReleaseState.ONLINE) {
logger.warn( throw new ServiceException(Status.SCHEDULE_STATE_ONLINE, scheduleObj.getId());
"Process definition can not be deleted due to schedule {}, processDefinitionCode:{}, scheduleId:{}.",
ReleaseState.ONLINE.getDescp(), processDefinition.getCode(), scheduleObj.getId());
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, scheduleObj.getId());
return result;
} }
} }
int delete = processDefinitionMapper.deleteById(processDefinition.getId()); int delete = processDefinitionMapper.deleteById(processDefinition.getId());
if (delete == 0) { if (delete == 0) {
logger.error("Delete process definition error, processDefinitionCode:{}.", code); logger.error("Delete process definition error, processDefinitionCode:{}.", code);
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
} }
int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode()); int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode());
@ -913,9 +1031,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
"The process definition has not relation, it will be delete successfully, processDefinitionCode:{}.", "The process definition has not relation, it will be delete successfully, processDefinitionCode:{}.",
code); code);
} }
deleteOtherRelation(project, result, processDefinition); deleteOtherRelation(project, new HashMap<>(), processDefinition);
putMsg(result, Status.SUCCESS);
return result;
} }
/** /**
@ -1119,7 +1235,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
@Override @Override
@Transactional @Transactional
public Map<String, Object> importSqlProcessDefinition(User loginUser, long projectCode, MultipartFile file) { public Map<String, Object> importSqlProcessDefinition(User loginUser, long projectCode, MultipartFile file) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result;
Project project = projectMapper.queryByCode(projectCode); Project project = projectMapper.queryByCode(projectCode);
result = projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_IMPORT); result = projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_IMPORT);
if (result.get(Constants.STATUS) != Status.SUCCESS) { if (result.get(Constants.STATUS) != Status.SUCCESS) {
@ -2092,7 +2208,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
* get new Task name or Process name when copy or import operate * get new Task name or Process name when copy or import operate
* @param originalName Task or Process original name * @param originalName Task or Process original name
* @param suffix "_copy_" or "_import_" * @param suffix "_copy_" or "_import_"
* @return * @return new name
*/ */
public String getNewName(String originalName, String suffix) { public String getNewName(String originalName, String suffix) {
StringBuilder newName = new StringBuilder(); StringBuilder newName = new StringBuilder();
@ -2526,6 +2642,68 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result; return result;
} }
private void updateWorkflowValid(User user, ProcessDefinition oldProcessDefinition,
ProcessDefinition newProcessDefinition) {
// online can not permit edit
if (oldProcessDefinition.getReleaseState() == ReleaseState.ONLINE) {
throw new ServiceException(Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, oldProcessDefinition.getName());
}
Project project = projectMapper.queryByCode(oldProcessDefinition.getProjectCode());
// check user access for project
projectService.checkProjectAndAuthThrowException(user, project, WORKFLOW_UPDATE);
if (checkDescriptionLength(newProcessDefinition.getDescription())) {
throw new ServiceException(Status.DESCRIPTION_TOO_LONG_ERROR);
}
// check whether the new process define name exist
if (!oldProcessDefinition.getName().equals(newProcessDefinition.getName())) {
ProcessDefinition definition = processDefinitionMapper
.verifyByDefineName(newProcessDefinition.getProjectCode(), newProcessDefinition.getName());
if (definition != null) {
throw new ServiceException(Status.PROCESS_DEFINITION_NAME_EXIST, newProcessDefinition.getName());
}
}
}
/**
* update single resource workflow
*
* @param loginUser login user
* @param workflowCode workflow resource code want to update
* @param workflowUpdateRequest workflow update resource object
* @return Process definition
*/
@Override
@Transactional
public ProcessDefinition updateSingleProcessDefinition(User loginUser,
long workflowCode,
WorkflowUpdateRequest workflowUpdateRequest) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(workflowCode);
// check process definition exists
if (processDefinition == null) {
throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST, workflowCode);
}
ProcessDefinition processDefinitionUpdate = workflowUpdateRequest.mergeIntoProcessDefinition(processDefinition);
this.updateWorkflowValid(loginUser, processDefinition, processDefinitionUpdate);
if (processDefinitionUpdate.getTenantCode() != null) {
Tenant tenant = tenantMapper.queryByTenantCode(processDefinitionUpdate.getTenantCode());
if (tenant == null) {
throw new ServiceException(Status.TENANT_NOT_EXIST);
}
processDefinitionUpdate.setTenantId(tenant.getId());
}
int update = processDefinitionMapper.updateById(processDefinitionUpdate);
if (update <= 0) {
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
this.syncObj2Log(loginUser, processDefinition);
return processDefinition;
}
protected Map<String, Object> updateDagSchedule(User loginUser, protected Map<String, Object> updateDagSchedule(User loginUser,
long projectCode, long projectCode,
long processDefinitionCode, long processDefinitionCode,

255
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java

@ -20,6 +20,9 @@ package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.PROJECT; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.PROJECT;
import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleCreateRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleFilterRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleUpdateRequest;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.ExecutorService;
@ -38,11 +41,13 @@ import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.Environment;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
@ -55,6 +60,7 @@ import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.InvocationTargetException;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
@ -68,6 +74,7 @@ import java.util.stream.Collectors;
import lombok.NonNull; import lombok.NonNull;
import org.quartz.CronExpression;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -110,6 +117,9 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
@Autowired @Autowired
private ProcessTaskRelationMapper processTaskRelationMapper; private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private EnvironmentMapper environmentMapper;
/** /**
* save schedule * save schedule
* *
@ -150,11 +160,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
// check work flow define release state // check work flow define release state
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode);
result = executorService.checkProcessDefinitionValid(projectCode, processDefinition, processDefineCode, executorService.checkProcessDefinitionValid(projectCode, processDefinition, processDefineCode,
processDefinition.getVersion()); processDefinition.getVersion());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
Schedule scheduleObj = new Schedule(); Schedule scheduleObj = new Schedule();
Date now = new Date(); Date now = new Date();
@ -212,6 +219,80 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
return result; return result;
} }
protected void projectPermCheckByProcess(User loginUser, long processDefinitionCode) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
}
Project project = projectMapper.queryByCode(processDefinition.getProjectCode());
// check project auth
this.projectService.checkProjectAndAuthThrowException(loginUser, project, null);
}
private void scheduleParamCheck(String scheduleParamStr) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleParamStr, ScheduleParam.class);
if (scheduleParam == null) {
throw new ServiceException(Status.PARSE_SCHEDULE_PARAM_ERROR, scheduleParamStr);
}
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
throw new ServiceException(Status.SCHEDULE_START_TIME_END_TIME_SAME);
}
if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) {
throw new ServiceException(Status.START_TIME_BIGGER_THAN_END_TIME_ERROR);
}
if (!CronExpression.isValidExpression(scheduleParam.getCrontab())) {
throw new ServiceException(Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
}
}
/**
* save schedule V2, will also change process definition's warningGroupId if schedule's warningGroupId be set
*
* @param loginUser login user
* @param scheduleCreateRequest schedule create object
* @return Schedule object just be created
*/
@Override
@Transactional
public Schedule createSchedulesV2(User loginUser,
ScheduleCreateRequest scheduleCreateRequest) {
this.projectPermCheckByProcess(loginUser, scheduleCreateRequest.getProcessDefinitionCode());
ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(scheduleCreateRequest.getProcessDefinitionCode());
// check workflow define release state
executorService.checkProcessDefinitionValid(processDefinition.getProjectCode(), processDefinition,
processDefinition.getCode(), processDefinition.getVersion());
Schedule scheduleExists =
scheduleMapper.queryByProcessDefinitionCode(scheduleCreateRequest.getProcessDefinitionCode());
if (scheduleExists != null) {
throw new ServiceException(Status.SCHEDULE_ALREADY_EXISTS, scheduleCreateRequest.getProcessDefinitionCode(),
scheduleExists.getId());
}
Schedule schedule = scheduleCreateRequest.convert2Schedule();
Environment environment = environmentMapper.queryByEnvironmentCode(schedule.getEnvironmentCode());
if (environment == null) {
throw new ServiceException(Status.QUERY_ENVIRONMENT_BY_CODE_ERROR, schedule.getEnvironmentCode());
}
schedule.setUserId(loginUser.getId());
// give more detail when return schedule object
schedule.setUserName(loginUser.getUserName());
schedule.setProcessDefinitionName(processDefinition.getName());
this.scheduleParamCheck(scheduleCreateRequest.getScheduleParam());
int create = scheduleMapper.insert(schedule);
if (create <= 0) {
throw new ServiceException(Status.CREATE_SCHEDULE_ERROR);
}
// updateProcessInstance receivers and cc by process definition id
processDefinition.setWarningGroupId(schedule.getWarningGroupId());
processDefinitionMapper.updateById(processDefinition);
return schedule;
}
/** /**
* updateProcessInstance schedule * updateProcessInstance schedule
* *
@ -254,13 +335,14 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
if (schedule == null) { if (schedule == null) {
logger.error("Schedule does not exist, scheduleId:{}.", id); logger.error("Schedule does not exist, scheduleId:{}.", id);
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); putMsg(result, Status.SCHEDULE_NOT_EXISTS, id);
return result; return result;
} }
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()); ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", schedule.getProcessDefinitionCode()); logger.error("Process definition does not exist, processDefinitionCode:{}.",
schedule.getProcessDefinitionCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(schedule.getProcessDefinitionCode())); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(schedule.getProcessDefinitionCode()));
return result; return result;
} }
@ -270,6 +352,69 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
return result; return result;
} }
/**
* update schedule object V2
*
* @param loginUser login user
* @param scheduleId scheduler id
* @param scheduleUpdateRequest the schedule object will be updated
* @return Schedule object
*/
@Override
@Transactional
public Schedule updateSchedulesV2(User loginUser,
Integer scheduleId,
ScheduleUpdateRequest scheduleUpdateRequest) {
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
}
Schedule scheduleUpdate;
try {
scheduleUpdate = scheduleUpdateRequest.mergeIntoSchedule(schedule);
// check update params
this.scheduleParamCheck(scheduleUpdateRequest.updateScheduleParam(scheduleUpdate));
} catch (InvocationTargetException | IllegalAccessException | InstantiationException
| NoSuchMethodException e) {
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleUpdateRequest.toString());
}
// check update params
this.projectPermCheckByProcess(loginUser, scheduleUpdate.getProcessDefinitionCode());
if (scheduleUpdate.getEnvironmentCode() != null) {
Environment environment = environmentMapper.queryByEnvironmentCode(scheduleUpdate.getEnvironmentCode());
if (environment == null) {
throw new ServiceException(Status.QUERY_ENVIRONMENT_BY_CODE_ERROR, scheduleUpdate.getEnvironmentCode());
}
}
int update = scheduleMapper.updateById(scheduleUpdate);
if (update <= 0) {
throw new ServiceException(Status.UPDATE_SCHEDULE_ERROR);
}
return scheduleUpdate;
}
/**
* get schedule object
*
* @param loginUser login user
* @param scheduleId scheduler id
* @return Schedule object
*/
@Override
@Transactional
public Schedule getSchedule(User loginUser,
Integer scheduleId) {
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
}
this.projectPermCheckByProcess(loginUser, schedule.getProcessDefinitionCode());
return schedule;
}
/** /**
* set schedule online or offline * set schedule online or offline
* *
@ -312,14 +457,16 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
ProcessDefinition processDefinition = ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(scheduleObj.getProcessDefinitionCode()); processDefinitionMapper.queryByCode(scheduleObj.getProcessDefinitionCode());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", scheduleObj.getProcessDefinitionCode()); logger.error("Process definition does not exist, processDefinitionCode:{}.",
scheduleObj.getProcessDefinitionCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(scheduleObj.getProcessDefinitionCode())); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(scheduleObj.getProcessDefinitionCode()));
return result; return result;
} }
List<ProcessTaskRelation> processTaskRelations = List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(projectCode, scheduleObj.getProcessDefinitionCode()); processTaskRelationMapper.queryByProcessCode(projectCode, scheduleObj.getProcessDefinitionCode());
if (processTaskRelations.isEmpty()) { if (processTaskRelations.isEmpty()) {
logger.error("Process task relations do not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinition.getCode()); logger.error("Process task relations do not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinition.getCode());
putMsg(result, Status.PROCESS_DAG_IS_EMPTY); putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result; return result;
} }
@ -335,7 +482,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
List<Long> subProcessDefineCodes = new ArrayList<>(); List<Long> subProcessDefineCodes = new ArrayList<>();
processService.recurseFindSubProcess(processDefinition.getCode(), subProcessDefineCodes); processService.recurseFindSubProcess(processDefinition.getCode(), subProcessDefineCodes);
if (!subProcessDefineCodes.isEmpty()) { if (!subProcessDefineCodes.isEmpty()) {
logger.info("Need to check sub process definition state before change schedule state, subProcessDefineCodes:{}.", logger.info(
"Need to check sub process definition state before change schedule state, subProcessDefineCodes:{}.",
org.apache.commons.lang.StringUtils.join(subProcessDefineCodes, ",")); org.apache.commons.lang.StringUtils.join(subProcessDefineCodes, ","));
List<ProcessDefinition> subProcessDefinitionList = List<ProcessDefinition> subProcessDefinitionList =
processDefinitionMapper.queryByCodes(subProcessDefineCodes); processDefinitionMapper.queryByCodes(subProcessDefineCodes);
@ -345,7 +493,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
* if there is no online process, exit directly * if there is no online process, exit directly
*/ */
if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.warn("Only sub process definition state is {} can change schedule state, subProcessDefinitionCode:{}.", logger.warn(
"Only sub process definition state is {} can change schedule state, subProcessDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), subProcessDefinition.getCode()); ReleaseState.ONLINE.getDescp(), subProcessDefinition.getCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE,
String.valueOf(subProcessDefinition.getId())); String.valueOf(subProcessDefinition.getId()));
@ -454,6 +603,33 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
return scheduleMapper.querySchedulesByProcessDefinitionCodes(processDefinitionCodes); return scheduleMapper.querySchedulesByProcessDefinitionCodes(processDefinitionCodes);
} }
/**
* query schedule
*
* @param loginUser login user
* @param scheduleFilterRequest schedule filter request
* @return schedule list page
*/
@Override
@Transactional
public PageInfo<Schedule> filterSchedules(User loginUser,
ScheduleFilterRequest scheduleFilterRequest) {
if (scheduleFilterRequest.getProjectName() != null) {
Project project = projectMapper.queryByName(scheduleFilterRequest.getProjectName());
// check project auth
projectService.checkProjectAndAuthThrowException(loginUser, project, null);
}
Page<Schedule> page = new Page<>(scheduleFilterRequest.getPageNo(), scheduleFilterRequest.getPageSize());
IPage<Schedule> scheduleIPage = scheduleMapper.filterSchedules(page, scheduleFilterRequest.convert2Schedule());
PageInfo<Schedule> pageInfo =
new PageInfo<>(scheduleFilterRequest.getPageNo(), scheduleFilterRequest.getPageSize());
pageInfo.setTotal((int) scheduleIPage.getTotal());
pageInfo.setTotalList(scheduleIPage.getRecords());
return pageInfo;
}
/** /**
* query schedule list * query schedule list
* *
@ -485,7 +661,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
} }
public void setSchedule(int projectId, Schedule schedule) { public void setSchedule(int projectId, Schedule schedule) {
logger.info("Set schedule state {}, project id: {}, scheduleId: {}", schedule.getReleaseState().getDescp(), projectId, schedule.getId()); logger.info("Set schedule state {}, project id: {}, scheduleId: {}", schedule.getReleaseState().getDescp(),
projectId, schedule.getId());
schedulerApi.insertOrUpdateScheduleTask(projectId, schedule); schedulerApi.insertOrUpdateScheduleTask(projectId, schedule);
} }
@ -523,54 +700,28 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
* delete schedule by id * delete schedule by id
* *
* @param loginUser login user * @param loginUser login user
* @param projectCode project code * @param scheduleId schedule id
* @param scheduleId scheule id
* @return delete result code
*/ */
@Override @Override
public Map<String, Object> deleteScheduleById(User loginUser, long projectCode, Integer scheduleId) { public void deleteSchedulesById(User loginUser, Integer scheduleId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
Schedule schedule = scheduleMapper.selectById(scheduleId); Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) { if (schedule == null) {
logger.error("Schedule does not exist, scheduleId:{}.", scheduleId); throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId);
return result;
} }
// Determine if the login user is the owner of the schedule
if (loginUser.getId() != schedule.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
logger.warn("User does not have permission to delete schedule, loginUserName:{}, scheduleId:{}.", loginUser.getUserName(), scheduleId);
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check schedule is already online // check schedule is already online
if (schedule.getReleaseState() == ReleaseState.ONLINE) { if (schedule.getReleaseState() == ReleaseState.ONLINE) {
logger.warn("Only {} state schedule can be deleted, scheduleId:{}.", ReleaseState.OFFLINE.getDescp(), scheduleId); throw new ServiceException(Status.SCHEDULE_STATE_ONLINE, scheduleId);
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); }
return result; // Determine if the login user is the owner of the schedule
if (loginUser.getId() != schedule.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
throw new ServiceException(Status.USER_NO_OPERATION_PERM);
} }
this.projectPermCheckByProcess(loginUser, schedule.getProcessDefinitionCode());
int delete = scheduleMapper.deleteById(scheduleId); int delete = scheduleMapper.deleteById(scheduleId);
if (delete <= 0) {
if (delete > 0) { throw new ServiceException(Status.DELETE_SCHEDULE_BY_ID_ERROR);
logger.info("Schedule delete complete, scheduleId:{}.", scheduleId);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Schedule delete error, scheduleId:{}.", scheduleId);
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
} }
return result;
} }
/** /**
@ -643,7 +794,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
// check schedule exists // check schedule exists
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode); Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode);
if (schedule == null) { if (schedule == null) {
logger.error("Schedule of process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); logger.error("Schedule of process definition does not exist, processDefinitionCode:{}.",
processDefinitionCode);
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, processDefinitionCode); putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, processDefinitionCode);
return result; return result;
} }
@ -665,8 +817,9 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup,
long environmentCode) { long environmentCode) {
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE,
Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
logger.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.", ReleaseState.ONLINE.getDescp(), schedule.getId()); logger.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.",
ReleaseState.ONLINE.getDescp(), schedule.getId());
return; return;
} }

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java

@ -27,6 +27,7 @@ import java.text.MessageFormat;
* @param <T> T * @param <T> T
*/ */
public class Result<T> { public class Result<T> {
/** /**
* status * status
*/ */
@ -50,13 +51,13 @@ public class Result<T> {
this.msg = msg; this.msg = msg;
} }
private Result(Status status) { public Result(Status status) {
if (status != null) { if (status != null) {
this.code = status.getCode(); this.code = status.getCode();
this.msg = status.getMsg(); this.msg = status.getMsg();
} }
} }
public Result(Integer code, String msg, T data) { public Result(Integer code, String msg, T data) {
this.code = code; this.code = code;
this.msg = msg; this.msg = msg;
@ -73,7 +74,7 @@ public class Result<T> {
public static <T> Result<T> success(T data) { public static <T> Result<T> success(T data) {
return new Result<>(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg(), data); return new Result<>(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg(), data);
} }
public static Result success() { public static Result success() {
return success(null); return success(null);
} }

4
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@ -217,7 +217,7 @@ SKIP_LINE_NUM=skip line num
QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log
USERS_TAG=users related operation USERS_TAG=users related operation
SCHEDULER_TAG=scheduler related operation SCHEDULE_TAG=schedule related operation
CREATE_SCHEDULE_NOTES=create schedule CREATE_SCHEDULE_NOTES=create schedule
CREATE_USER_NOTES=create user CREATE_USER_NOTES=create user
TENANT_ID=tenant id TENANT_ID=tenant id
@ -269,7 +269,7 @@ DELETE_DATA_SOURCE_NOTES=delete data source
VERIFY_DATA_SOURCE_NOTES=verify data source VERIFY_DATA_SOURCE_NOTES=verify data source
UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source
AUTHORIZED_DATA_SOURCE_NOTES=authorized data source AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id DELETE_SCHEDULE_NOTES=delete schedule by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids

4
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@ -265,7 +265,7 @@ SKIP_LINE_NUM=skip line num
QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log
USERS_TAG=users related operation USERS_TAG=users related operation
SCHEDULER_TAG=scheduler related operation SCHEDULE_TAG=schedule related operation
CREATE_SCHEDULE_NOTES=create schedule CREATE_SCHEDULE_NOTES=create schedule
CREATE_USER_NOTES=create user CREATE_USER_NOTES=create user
TENANT_ID=tenant id TENANT_ID=tenant id
@ -326,7 +326,7 @@ DELETE_DATA_SOURCE_NOTES=delete data source
VERIFY_DATA_SOURCE_NOTES=verify data source VERIFY_DATA_SOURCE_NOTES=verify data source
UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source
AUTHORIZED_DATA_SOURCE_NOTES=authorized data source AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id DELETE_SCHEDULE_NOTES=delete schedule by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=batch export process definition by ids BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=batch export process definition by ids

4
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@ -248,7 +248,7 @@ SKIP_LINE_NUM=忽略行数
QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志 QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志 DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志
USERS_TAG=用户相关操作 USERS_TAG=用户相关操作
SCHEDULER_TAG=定时相关操作 SCHEDULE_TAG=定时相关操作
CREATE_SCHEDULE_NOTES=创建定时 CREATE_SCHEDULE_NOTES=创建定时
CREATE_USER_NOTES=创建用户 CREATE_USER_NOTES=创建用户
CREATE_WORKER_GROUP_NOTES=创建Worker分组 CREATE_WORKER_GROUP_NOTES=创建Worker分组
@ -323,7 +323,7 @@ DELETE_DATA_SOURCE_NOTES=删除数据源
VERIFY_DATA_SOURCE_NOTES=验证数据源 VERIFY_DATA_SOURCE_NOTES=验证数据源
UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源 UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源
AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源 AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源
DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据 DELETE_SCHEDULE_NOTES=根据定时id删除定时数据
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表 QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义 EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义 BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义

13
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java

@ -40,6 +40,7 @@ import javax.servlet.http.HttpServletResponse;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
@ -299,15 +300,9 @@ public class ProcessDefinitionControllerTest {
public void testDeleteProcessDefinitionByCode() { public void testDeleteProcessDefinitionByCode() {
long projectCode = 1L; long projectCode = 1L;
long code = 1L; long code = 1L;
// not throw error mean pass
Map<String, Object> result = new HashMap<>(); Assertions.assertDoesNotThrow(
putMsg(result, Status.SUCCESS); () -> processDefinitionController.deleteProcessDefinitionByCode(user, projectCode, code));
Mockito.when(processDefinitionService.deleteProcessDefinitionByCode(user, projectCode, code))
.thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionByCode(user, projectCode, code);
Assert.assertTrue(response != null && response.isSuccess());
} }
@Test @Test

91
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java

@ -38,7 +38,6 @@ import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -50,6 +49,7 @@ import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
public class SchedulerControllerTest extends AbstractControllerTest { public class SchedulerControllerTest extends AbstractControllerTest {
private static final Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); private static final Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class);
@MockBean(name = "schedulerService") @MockBean(name = "schedulerService")
@ -58,21 +58,22 @@ public class SchedulerControllerTest extends AbstractControllerTest {
@Test @Test
public void testCreateSchedule() throws Exception { public void testCreateSchedule() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode","40"); paramsMap.add("processDefinitionCode", "40");
paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}"); paramsMap.add("schedule",
paramsMap.add("warningType",String.valueOf(WarningType.NONE)); "{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}");
paramsMap.add("warningGroupId","1"); paramsMap.add("warningType", String.valueOf(WarningType.NONE));
paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("warningGroupId", "1");
paramsMap.add("receivers",""); paramsMap.add("failureStrategy", String.valueOf(FailureStrategy.CONTINUE));
paramsMap.add("receiversCc",""); paramsMap.add("receivers", "");
paramsMap.add("workerGroupId","1"); paramsMap.add("receiversCc", "");
paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); paramsMap.add("workerGroupId", "1");
paramsMap.add("processInstancePriority", String.valueOf(Priority.HIGH));
Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class),
isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class),
isA(Priority.class), isA(String.class), isA(Long.class))).thenReturn(success()); isA(Priority.class), isA(String.class), isA(Long.class))).thenReturn(success());
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/",123) MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/", 123)
.header(SESSION_ID, sessionId) .header(SESSION_ID, sessionId)
.params(paramsMap)) .params(paramsMap))
.andExpect(status().isCreated()) .andExpect(status().isCreated())
@ -80,23 +81,23 @@ public class SchedulerControllerTest extends AbstractControllerTest {
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test @Test
@Ignore
public void testUpdateSchedule() throws Exception { public void testUpdateSchedule() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("id","37"); paramsMap.add("id", "37");
paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 7 * * ? *'}"); paramsMap.add("schedule",
paramsMap.add("warningType",String.valueOf(WarningType.NONE)); "{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 7 * * ? *'}");
paramsMap.add("warningGroupId","1"); paramsMap.add("warningType", String.valueOf(WarningType.NONE));
paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("warningGroupId", "1");
paramsMap.add("receivers",""); paramsMap.add("failureStrategy", String.valueOf(FailureStrategy.CONTINUE));
paramsMap.add("receiversCc",""); paramsMap.add("receivers", "");
paramsMap.add("workerGroupId","1"); paramsMap.add("receiversCc", "");
paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); paramsMap.add("workerGroupId", "1");
paramsMap.add("processInstancePriority", String.valueOf(Priority.HIGH));
Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class), Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class),
isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class), isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class),
@ -110,19 +111,19 @@ public class SchedulerControllerTest extends AbstractControllerTest {
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test @Test
public void testOnline() throws Exception { public void testOnline() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("id","37"); paramsMap.add("id", "37");
Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class), Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class),
isA(ReleaseState.class))).thenReturn(success()); isA(ReleaseState.class))).thenReturn(success());
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/{id}/online",123, 37) MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/{id}/online", 123, 37)
.header(SESSION_ID, sessionId) .header(SESSION_ID, sessionId)
.params(paramsMap)) .params(paramsMap))
.andExpect(status().isOk()) .andExpect(status().isOk())
@ -130,19 +131,19 @@ public class SchedulerControllerTest extends AbstractControllerTest {
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test @Test
public void testOffline() throws Exception { public void testOffline() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("id","28"); paramsMap.add("id", "28");
Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class), Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class),
isA(ReleaseState.class))).thenReturn(success()); isA(ReleaseState.class))).thenReturn(success());
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/{id}/offline",123, 28) MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/{id}/offline", 123, 28)
.header(SESSION_ID, sessionId) .header(SESSION_ID, sessionId)
.params(paramsMap)) .params(paramsMap))
.andExpect(status().isOk()) .andExpect(status().isOk())
@ -150,17 +151,17 @@ public class SchedulerControllerTest extends AbstractControllerTest {
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test @Test
public void testQueryScheduleListPaging() throws Exception { public void testQueryScheduleListPaging() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode","40"); paramsMap.add("processDefinitionCode", "40");
paramsMap.add("searchVal","test"); paramsMap.add("searchVal", "test");
paramsMap.add("pageNo","1"); paramsMap.add("pageNo", "1");
paramsMap.add("pageSize","30"); paramsMap.add("pageSize", "30");
PageInfo<Resource> pageInfo = new PageInfo<>(1, 10); PageInfo<Resource> pageInfo = new PageInfo<>(1, 10);
Result mockResult = Result.success(pageInfo); Result mockResult = Result.success(pageInfo);
@ -168,7 +169,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
Mockito.when(schedulerService.querySchedule(isA(User.class), isA(Long.class), isA(Long.class), Mockito.when(schedulerService.querySchedule(isA(User.class), isA(Long.class), isA(Long.class),
isA(String.class), isA(Integer.class), isA(Integer.class))).thenReturn(mockResult); isA(String.class), isA(Integer.class), isA(Integer.class))).thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/schedules/",123) MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/schedules/", 123)
.header(SESSION_ID, sessionId) .header(SESSION_ID, sessionId)
.params(paramsMap)) .params(paramsMap))
.andExpect(status().isOk()) .andExpect(status().isOk())
@ -176,7 +177,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@ -184,14 +185,14 @@ public class SchedulerControllerTest extends AbstractControllerTest {
public void testQueryScheduleList() throws Exception { public void testQueryScheduleList() throws Exception {
Mockito.when(schedulerService.queryScheduleList(isA(User.class), isA(Long.class))).thenReturn(success()); Mockito.when(schedulerService.queryScheduleList(isA(User.class), isA(Long.class))).thenReturn(success());
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/list",123) MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/list", 123)
.header(SESSION_ID, sessionId)) .header(SESSION_ID, sessionId))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@ -200,27 +201,27 @@ public class SchedulerControllerTest extends AbstractControllerTest {
Mockito.when(schedulerService.previewSchedule(isA(User.class), isA(String.class))) Mockito.when(schedulerService.previewSchedule(isA(User.class), isA(String.class)))
.thenReturn(success()); .thenReturn(success());
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/preview",123) MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/preview", 123)
.header(SESSION_ID, sessionId) .header(SESSION_ID, sessionId)
.param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *','timezoneId':'Asia/Shanghai'}")) .param("schedule",
"{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *','timezoneId':'Asia/Shanghai'}"))
.andExpect(status().isCreated()) .andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test @Test
public void testDeleteScheduleById() throws Exception { public void testDeleteScheduleById() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("scheduleId","37"); paramsMap.add("scheduleId", "37");
Mockito.when(schedulerService.deleteScheduleById(isA(User.class), isA(Long.class), isA(Integer.class))) Mockito.doNothing().when(schedulerService).deleteSchedulesById(isA(User.class), isA(Integer.class));
.thenReturn(success());
MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/schedules/{id}",123, 37) MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/schedules/{id}", 123, 37)
.header(SESSION_ID, sessionId) .header(SESSION_ID, sessionId)
.params(paramsMap)) .params(paramsMap))
.andExpect(status().isOk()) .andExpect(status().isOk())
@ -228,7 +229,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
.andReturn(); .andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
} }

168
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowV2ControllerTest.java

@ -0,0 +1,168 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.common.Constants.EMPTY_STRING;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowCreateRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowFilterRequest;
import org.apache.dolphinscheduler.api.dto.workflow.WorkflowUpdateRequest;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
/**
* project v2 controller test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class WorkflowV2ControllerTest {
protected User user;
@InjectMocks
private WorkflowV2Controller workflowV2Controller;
@Mock
private ProcessDefinitionService processDefinitionService;
@Mock
private TenantMapper tenantMapper;
private final static String name = "workflowName";
private final static String newName = "workflowNameNew";
private final static String releaseState = "ONLINE";
private final static int projectCode = 13579;
private final static String description = "the workflow description";
private final static int timeout = 30;
private final static String tenantCode = "dolphinscheduler";
private final static int warningGroupId = 0;
private final static String executionType = "PARALLEL";
@Before
public void before() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
loginUser.setUserName("admin");
user = loginUser;
}
@Test
public void testCreateWorkflow() {
WorkflowCreateRequest workflowCreateRequest = new WorkflowCreateRequest();
workflowCreateRequest.setName(name);
workflowCreateRequest.setReleaseState(releaseState);
workflowCreateRequest.setProjectCode(projectCode);
workflowCreateRequest.setDescription(description);
workflowCreateRequest.setGlobalParams(EMPTY_STRING);
workflowCreateRequest.setTimeout(timeout);
workflowCreateRequest.setTenantCode(tenantCode);
workflowCreateRequest.setWarningGroupId(warningGroupId);
workflowCreateRequest.setExecutionType(executionType);
Mockito.when(processDefinitionService.createSingleProcessDefinition(user, workflowCreateRequest))
.thenReturn(this.getProcessDefinition(name));
Result<ProcessDefinition> resourceResponse = workflowV2Controller.createWorkflow(user, workflowCreateRequest);
Assert.assertEquals(this.getProcessDefinition(name), resourceResponse.getData());
}
@Test
public void testUpdateWorkflow() {
WorkflowUpdateRequest workflowUpdateRequest = new WorkflowUpdateRequest();
workflowUpdateRequest.setName(newName);
Mockito.when(processDefinitionService.updateSingleProcessDefinition(user, 1L, workflowUpdateRequest))
.thenReturn(this.getProcessDefinition(newName));
Result<ProcessDefinition> resourceResponse =
workflowV2Controller.updateWorkflow(user, 1L, workflowUpdateRequest);
Assert.assertEquals(this.getProcessDefinition(newName), resourceResponse.getData());
}
@Test
public void testGetWorkflow() {
Mockito.when(processDefinitionService.getProcessDefinition(user, 1L))
.thenReturn(this.getProcessDefinition(name));
Result<ProcessDefinition> resourceResponse = workflowV2Controller.getWorkflow(user, 1L);
Assertions.assertEquals(this.getProcessDefinition(name), resourceResponse.getData());
}
@Test
public void testFilterWorkflow() {
WorkflowFilterRequest workflowFilterRequest = new WorkflowFilterRequest();
workflowFilterRequest.setWorkflowName(name);
Mockito.when(processDefinitionService.filterProcessDefinition(user, workflowFilterRequest))
.thenReturn(this.getProcessDefinitionPage(name));
Result<PageInfo<ProcessDefinition>> pageResourceResponse =
workflowV2Controller.filterWorkflows(user, workflowFilterRequest);
PageInfo<ProcessDefinition> processDefinitionPage = pageResourceResponse.getData();
Assertions.assertIterableEquals(this.getProcessDefinitionPage(name).getTotalList(),
processDefinitionPage.getTotalList());
}
private ProcessDefinition getProcessDefinition(String pdName) {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(1);
processDefinition.setName(pdName);
processDefinition.setDescription(description);
processDefinition.setReleaseState(ReleaseState.valueOf(releaseState));
processDefinition.setProjectCode(projectCode);
processDefinition.setTenantId(1);
processDefinition.setExecutionType(ProcessExecutionTypeEnum.valueOf(executionType));
processDefinition.setWarningGroupId(warningGroupId);
processDefinition.setGlobalParams(EMPTY_STRING);
return processDefinition;
}
private PageInfo<ProcessDefinition> getProcessDefinitionPage(String pdName) {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(1);
processDefinition.setName(pdName);
processDefinition.setDescription(description);
processDefinition.setReleaseState(ReleaseState.valueOf(releaseState));
processDefinition.setProjectCode(projectCode);
processDefinition.setTenantId(1);
processDefinition.setExecutionType(ProcessExecutionTypeEnum.valueOf(executionType));
processDefinition.setWarningGroupId(warningGroupId);
processDefinition.setGlobalParams(EMPTY_STRING);
PageInfo<ProcessDefinition> pageInfoProcessDefinitions = new PageInfo<ProcessDefinition>();
List<ProcessDefinition> processDefinitions = new ArrayList<ProcessDefinition>();
processDefinitions.add(processDefinition);
pageInfoProcessDefinitions.setTotalList(processDefinitions);
return pageInfoProcessDefinitions;
}
}

61
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java

@ -24,6 +24,10 @@ import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -35,9 +39,6 @@ import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/** /**
* base service test * base service test
*/ */
@ -63,9 +64,9 @@ public class BaseServiceTest {
User user = new User(); User user = new User();
user.setUserType(UserType.ADMIN_USER); user.setUserType(UserType.ADMIN_USER);
//ADMIN_USER // ADMIN_USER
Assert.assertTrue(baseService.isAdmin(user)); Assert.assertTrue(baseService.isAdmin(user));
//GENERAL_USER // GENERAL_USER
user.setUserType(UserType.GENERAL_USER); user.setUserType(UserType.GENERAL_USER);
Assert.assertFalse(baseService.isAdmin(user)); Assert.assertFalse(baseService.isAdmin(user));
@ -76,9 +77,9 @@ public class BaseServiceTest {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
baseService.putMsg(result, Status.SUCCESS); baseService.putMsg(result, Status.SUCCESS);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
//has params // has params
baseService.putMsg(result, Status.PROJECT_NOT_FOUND,"test"); baseService.putMsg(result, Status.PROJECT_NOT_FOUND, "test");
} }
@ -87,39 +88,39 @@ public class BaseServiceTest {
Result result = new Result(); Result result = new Result();
baseService.putMsg(result, Status.SUCCESS); baseService.putMsg(result, Status.SUCCESS);
Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
//has params // has params
baseService.putMsg(result,Status.PROJECT_NOT_FOUND,"test"); baseService.putMsg(result, Status.PROJECT_NOT_FOUND, "test");
} }
// @Test // @Test
// public void testCreateTenantDirIfNotExists() { // public void testCreateTenantDirIfNotExists() {
// //
// PowerMockito.mockStatic(HadoopUtils.class); // PowerMockito.mockStatic(HadoopUtils.class);
// PowerMockito.when(HadoopUtils.getInstance()).thenReturn(hadoopUtils); // PowerMockito.when(HadoopUtils.getInstance()).thenReturn(hadoopUtils);
// //
// try { // try {
// baseService.createTenantDirIfNotExists("test"); // baseService.createTenantDirIfNotExists("test");
// } catch (Exception e) { // } catch (Exception e) {
// Assert.fail(); // Assert.fail();
// logger.error("CreateTenantDirIfNotExists error ",e); // logger.error("CreateTenantDirIfNotExists error ",e);
// e.printStackTrace(); // e.printStackTrace();
// } // }
// //
// } // }
@Test @Test
public void testHasPerm() { public void testHasPerm() {
User user = new User(); User user = new User();
user.setId(1); user.setId(1);
//create user // create user
Assert.assertTrue(baseService.canOperator(user,1)); Assert.assertTrue(baseService.canOperator(user, 1));
//admin // admin
user.setId(2); user.setId(2);
user.setUserType(UserType.ADMIN_USER); user.setUserType(UserType.ADMIN_USER);
Assert.assertTrue(baseService.canOperator(user,1)); Assert.assertTrue(baseService.canOperator(user, 1));
} }

36
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTestTool.java

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import java.text.MessageFormat;
import java.util.Map;
public class BaseServiceTestTool {
protected void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
}

654
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

File diff suppressed because it is too large Load Diff

373
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java

@ -17,44 +17,50 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.utils.DateUtils.stringToDate;
import static org.mockito.ArgumentMatchers.isA;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleCreateRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleFilterRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleUpdateRequest;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.impl.SchedulerServiceImpl; import org.apache.dolphinscheduler.api.service.impl.SchedulerServiceImpl;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.dao.entity.Environment;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.scheduler.api.SchedulerApi; import org.apache.dolphinscheduler.scheduler.api.SchedulerApi;
import org.apache.dolphinscheduler.scheduler.quartz.QuartzScheduler;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunner;
/** /**
* scheduler service test * scheduler service test
*/ */
@RunWith(PowerMockRunner.class) @RunWith(PowerMockRunner.class)
public class SchedulerServiceTest { public class SchedulerServiceTest extends BaseServiceTestTool {
@InjectMocks @InjectMocks
private SchedulerServiceImpl schedulerService; private SchedulerServiceImpl schedulerService;
@ -78,24 +84,43 @@ public class SchedulerServiceTest {
private ProcessDefinitionMapper processDefinitionMapper; private ProcessDefinitionMapper processDefinitionMapper;
@Mock @Mock
private ProjectServiceImpl projectService; private ProjectService projectService;
@Mock @Mock
private SchedulerApi schedulerApi; private SchedulerApi schedulerApi;
@Mock
private ExecutorService executorService;
@Mock
private EnvironmentMapper environmentMapper;
protected static User user;
protected Exception exception;
private static final String userName = "userName";
private static final String projectName = "projectName";
private static final long projectCode = 1L;
private static final int userId = 1;
private static final String processDefinitionName = "processDefinitionName";
private static final long processDefinitionCode = 2L;
private static final int processDefinitionVersion = 3;
private static final int scheduleId = 3;
private static final long environmentCode = 4L;
private static final String startTime = "2020-01-01 12:13:14";
private static final String endTime = "2020-02-01 12:13:14";
private static final String crontab = "0 0 * * * ? *";
@Before @Before
public void setUp() { public void setUp() {
user = new User();
user.setUserName(userName);
user.setId(userId);
} }
@Test @Test
public void testSetScheduleState() { public void testSetScheduleState() {
String projectName = "test"; Map<String, Object> result;
long projectCode = 1L; Project project = getProject();
User loginUser = new User();
loginUser.setId(1);
Map<String, Object> result = new HashMap<String, Object>();
Project project = getProject(projectName, projectCode);
ProcessDefinition processDefinition = new ProcessDefinition(); ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectCode(projectCode); processDefinition.setProjectCode(projectCode);
@ -115,48 +140,332 @@ public class SchedulerServiceTest {
Mockito.when(processDefinitionMapper.queryByCode(1)).thenReturn(processDefinition); Mockito.when(processDefinitionMapper.queryByCode(1)).thenReturn(processDefinition);
//hash no auth // hash no auth
result = schedulerService.setScheduleState(loginUser, project.getCode(), 1, ReleaseState.ONLINE); result = schedulerService.setScheduleState(user, project.getCode(), 1, ReleaseState.ONLINE);
Mockito.when(projectService.hasProjectAndPerm(loginUser, project, result,null)).thenReturn(true); Mockito.when(projectService.hasProjectAndPerm(user, project, result, null)).thenReturn(true);
//schedule not exists // schedule not exists
result = schedulerService.setScheduleState(loginUser, project.getCode(), 2, ReleaseState.ONLINE); result = schedulerService.setScheduleState(user, project.getCode(), 2, ReleaseState.ONLINE);
Assert.assertEquals(Status.SCHEDULE_CRON_NOT_EXISTS, result.get(Constants.STATUS)); Assert.assertEquals(Status.SCHEDULE_CRON_NOT_EXISTS, result.get(Constants.STATUS));
//SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE // SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE
result = schedulerService.setScheduleState(loginUser, project.getCode(), 1, ReleaseState.OFFLINE); result = schedulerService.setScheduleState(user, project.getCode(), 1, ReleaseState.OFFLINE);
Assert.assertEquals(Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, result.get(Constants.STATUS)); Assert.assertEquals(Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, result.get(Constants.STATUS));
//PROCESS_DEFINE_NOT_EXIST // PROCESS_DEFINE_NOT_EXIST
schedule.setProcessDefinitionCode(2); schedule.setProcessDefinitionCode(2);
result = schedulerService.setScheduleState(loginUser, project.getCode(), 1, ReleaseState.ONLINE); result = schedulerService.setScheduleState(user, project.getCode(), 1, ReleaseState.ONLINE);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, result.get(Constants.STATUS)); Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, result.get(Constants.STATUS));
schedule.setProcessDefinitionCode(1); schedule.setProcessDefinitionCode(1);
result = schedulerService.setScheduleState(loginUser, project.getCode(), 1, ReleaseState.ONLINE); result = schedulerService.setScheduleState(user, project.getCode(), 1, ReleaseState.ONLINE);
Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, result.get(Constants.STATUS)); Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, result.get(Constants.STATUS));
processDefinition.setReleaseState(ReleaseState.ONLINE); processDefinition.setReleaseState(ReleaseState.ONLINE);
Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition); Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition);
result = schedulerService.setScheduleState(loginUser, project.getCode(), 1, ReleaseState.ONLINE); result = schedulerService.setScheduleState(user, project.getCode(), 1, ReleaseState.ONLINE);
Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, result.get(Constants.STATUS)); Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, result.get(Constants.STATUS));
//set master // set master
Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(masterServers); Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(masterServers);
//SUCCESS // SUCCESS
result = schedulerService.setScheduleState(loginUser, project.getCode(), 1, ReleaseState.ONLINE); result = schedulerService.setScheduleState(user, project.getCode(), 1, ReleaseState.ONLINE);
Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, result.get(Constants.STATUS)); Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, result.get(Constants.STATUS));
} }
private Project getProject(String name, long code) { @Test
Project project = new Project(); public void testCreateSchedulesV2() {
project.setName(name); Project project = this.getProject();
project.setCode(code); ProcessDefinition processDefinition = this.getProcessDefinition();
project.setUserId(1); Schedule schedule = this.getSchedule();
ScheduleCreateRequest scheduleCreateRequest = new ScheduleCreateRequest();
scheduleCreateRequest.setProcessDefinitionCode(processDefinitionCode);
scheduleCreateRequest.setEnvironmentCode(environmentCode);
// error process definition not exists
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode());
// error project permissions
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(processDefinition);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService)
.checkProjectAndAuthThrowException(user, project, null);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(),
((ServiceException) exception).getCode());
// we do not check method `executorService.checkProcessDefinitionValid` because it should be check in
// executorServiceTest
// error process definition already exists schedule
Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, project, null);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode)).thenReturn(schedule);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.SCHEDULE_ALREADY_EXISTS.getCode(), ((ServiceException) exception).getCode());
// error environment do not exists
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode)).thenReturn(null);
Mockito.when(environmentMapper.queryByEnvironmentCode(environmentCode)).thenReturn(null);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.QUERY_ENVIRONMENT_BY_CODE_ERROR.getCode(),
((ServiceException) exception).getCode());
// error schedule parameter same start time and end time
Mockito.when(environmentMapper.queryByEnvironmentCode(environmentCode)).thenReturn(this.getEnvironment());
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.SCHEDULE_START_TIME_END_TIME_SAME.getCode(),
((ServiceException) exception).getCode());
// error schedule parameter same start time after than end time
scheduleCreateRequest.setEndTime(endTime);
String badStartTime = "2022-01-01 12:13:14";
scheduleCreateRequest.setStartTime(badStartTime);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.START_TIME_BIGGER_THAN_END_TIME_ERROR.getCode(),
((ServiceException) exception).getCode());
// error schedule crontab
String badCrontab = "0 0 123 * * ? *";
scheduleCreateRequest.setStartTime(startTime);
scheduleCreateRequest.setCrontab(badCrontab);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.SCHEDULE_CRON_CHECK_FAILED.getCode(), ((ServiceException) exception).getCode());
// error create error
scheduleCreateRequest.setCrontab(crontab);
Mockito.when(scheduleMapper.insert(isA(Schedule.class))).thenReturn(0);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.createSchedulesV2(user, scheduleCreateRequest));
Assertions.assertEquals(Status.CREATE_SCHEDULE_ERROR.getCode(), ((ServiceException) exception).getCode());
// success
scheduleCreateRequest.setCrontab(crontab);
Mockito.when(scheduleMapper.insert(isA(Schedule.class))).thenReturn(1);
Schedule scheduleCreated = schedulerService.createSchedulesV2(user, scheduleCreateRequest);
Assertions.assertEquals(scheduleCreateRequest.getProcessDefinitionCode(),
scheduleCreated.getProcessDefinitionCode());
Assertions.assertEquals(scheduleCreateRequest.getEnvironmentCode(), scheduleCreated.getEnvironmentCode());
Assertions.assertEquals(stringToDate(scheduleCreateRequest.getStartTime()), scheduleCreated.getStartTime());
Assertions.assertEquals(stringToDate(scheduleCreateRequest.getEndTime()), scheduleCreated.getEndTime());
Assertions.assertEquals(scheduleCreateRequest.getCrontab(), scheduleCreated.getCrontab());
}
@Test
public void testDeleteSchedules() {
Schedule schedule = this.getSchedule();
// error schedule not exists
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.deleteSchedulesById(user, scheduleId));
Assertions.assertEquals(Status.SCHEDULE_NOT_EXISTS.getCode(), ((ServiceException) exception).getCode());
// error schedule already online
schedule.setReleaseState(ReleaseState.ONLINE);
Mockito.when(scheduleMapper.selectById(scheduleId)).thenReturn(schedule);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.deleteSchedulesById(user, scheduleId));
Assertions.assertEquals(Status.SCHEDULE_STATE_ONLINE.getCode(), ((ServiceException) exception).getCode());
schedule.setReleaseState(ReleaseState.OFFLINE);
// error user not own schedule
int notOwnUserId = 2;
schedule.setUserId(notOwnUserId);
Mockito.when(scheduleMapper.selectById(scheduleId)).thenReturn(schedule);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.deleteSchedulesById(user, scheduleId));
Assertions.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(), exception.getMessage());
schedule.setUserId(userId);
// error process definition not exists
Mockito.when(scheduleMapper.selectById(scheduleId)).thenReturn(schedule);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.deleteSchedulesById(user, scheduleId));
Assertions.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode());
// error project permissions
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode))
.thenReturn(this.getProcessDefinition());
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(this.getProject());
Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService)
.checkProjectAndAuthThrowException(user, this.getProject(), null);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.deleteSchedulesById(user, scheduleId));
Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(),
((ServiceException) exception).getCode());
// error delete mapper
Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, this.getProject(), null);
Mockito.when(scheduleMapper.deleteById(scheduleId)).thenReturn(0);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.deleteSchedulesById(user, scheduleId));
Assertions.assertEquals(Status.DELETE_SCHEDULE_BY_ID_ERROR.getCode(), ((ServiceException) exception).getCode());
// success
Mockito.when(scheduleMapper.deleteById(scheduleId)).thenReturn(1);
Assertions.assertDoesNotThrow(() -> schedulerService.deleteSchedulesById(user, scheduleId));
}
@Test
public void testFilterSchedules() {
Project project = this.getProject();
ScheduleFilterRequest scheduleFilterRequest = new ScheduleFilterRequest();
scheduleFilterRequest.setProjectName(project.getName());
// project permission error
Mockito.when(projectMapper.queryByName(project.getName())).thenReturn(project);
Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService)
.checkProjectAndAuthThrowException(user, project, null);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.filterSchedules(user, scheduleFilterRequest));
Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(),
((ServiceException) exception).getCode());
}
@Test
public void testGetSchedules() {
// error schedule not exists
exception =
Assertions.assertThrows(ServiceException.class, () -> schedulerService.getSchedule(user, scheduleId));
Assertions.assertEquals(Status.SCHEDULE_NOT_EXISTS.getCode(), ((ServiceException) exception).getCode());
// error process definition not exists
Mockito.when(scheduleMapper.selectById(scheduleId)).thenReturn(this.getSchedule());
exception =
Assertions.assertThrows(ServiceException.class, () -> schedulerService.getSchedule(user, scheduleId));
Assertions.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode());
// error project permissions
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode))
.thenReturn(this.getProcessDefinition());
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(this.getProject());
Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService)
.checkProjectAndAuthThrowException(user, this.getProject(), null);
exception =
Assertions.assertThrows(ServiceException.class, () -> schedulerService.getSchedule(user, scheduleId));
Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(),
((ServiceException) exception).getCode());
// success
Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, this.getProject(), null);
Schedule schedule = schedulerService.getSchedule(user, scheduleId);
Assertions.assertEquals(this.getSchedule().getId(), schedule.getId());
}
@Test
public void testUpdateSchedulesV2() {
ScheduleUpdateRequest scheduleUpdateRequest = new ScheduleUpdateRequest();
// error schedule not exists
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.SCHEDULE_NOT_EXISTS.getCode(), ((ServiceException) exception).getCode());
// error schedule parameter same start time and end time
scheduleUpdateRequest.setEndTime(endTime);
scheduleUpdateRequest.setStartTime(endTime);
Mockito.when(scheduleMapper.selectById(scheduleId)).thenReturn(this.getSchedule());
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.SCHEDULE_START_TIME_END_TIME_SAME.getCode(),
((ServiceException) exception).getCode());
// error schedule parameter same start time after than end time
String badStartTime = "2022-01-01 12:13:14";
scheduleUpdateRequest.setStartTime(badStartTime);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.START_TIME_BIGGER_THAN_END_TIME_ERROR.getCode(),
((ServiceException) exception).getCode());
scheduleUpdateRequest.setStartTime(startTime);
// error schedule crontab
String badCrontab = "0 0 123 * * ? *";
scheduleUpdateRequest.setCrontab(badCrontab);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.SCHEDULE_CRON_CHECK_FAILED.getCode(), ((ServiceException) exception).getCode());
scheduleUpdateRequest.setCrontab(crontab);
// error process definition not exists
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode());
// error project permissions
Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode))
.thenReturn(this.getProcessDefinition());
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(this.getProject());
Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService)
.checkProjectAndAuthThrowException(user, this.getProject(), null);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(),
((ServiceException) exception).getCode());
// error environment do not exists
Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, this.getProject(), null);
Mockito.when(environmentMapper.queryByEnvironmentCode(environmentCode)).thenReturn(null);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.QUERY_ENVIRONMENT_BY_CODE_ERROR.getCode(),
((ServiceException) exception).getCode());
// error environment do not exists
Mockito.when(environmentMapper.queryByEnvironmentCode(environmentCode)).thenReturn(this.getEnvironment());
Mockito.when(scheduleMapper.updateById(isA(Schedule.class))).thenReturn(0);
exception = Assertions.assertThrows(ServiceException.class,
() -> schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest));
Assertions.assertEquals(Status.UPDATE_SCHEDULE_ERROR.getCode(), ((ServiceException) exception).getCode());
// success
Mockito.when(scheduleMapper.updateById(isA(Schedule.class))).thenReturn(1);
Schedule schedule = schedulerService.updateSchedulesV2(user, scheduleId, scheduleUpdateRequest);
Assertions.assertEquals(scheduleUpdateRequest.getCrontab(), schedule.getCrontab());
Assertions.assertEquals(stringToDate(scheduleUpdateRequest.getStartTime()), schedule.getStartTime());
Assertions.assertEquals(stringToDate(scheduleUpdateRequest.getEndTime()), schedule.getEndTime());
}
private Project getProject() {
Project project = new Project();
project.setName(projectName);
project.setCode(projectCode);
project.setUserId(userId);
return project; return project;
} }
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setName(processDefinitionName);
processDefinition.setCode(processDefinitionCode);
processDefinition.setProjectCode(projectCode);
processDefinition.setVersion(processDefinitionVersion);
processDefinition.setUserId(userId);
return processDefinition;
}
private Schedule getSchedule() {
Schedule schedule = new Schedule();
schedule.setId(scheduleId);
schedule.setProcessDefinitionCode(processDefinitionCode);
schedule.setEnvironmentCode(environmentCode);
schedule.setUserId(userId);
return schedule;
}
private Environment getEnvironment() {
Environment environment = new Environment();
environment.setCode(environmentCode);
return environment;
}
} }

10
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java

@ -115,6 +115,16 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
@Param("userId") int userId, @Param("userId") int userId,
@Param("projectCode") long projectCode); @Param("projectCode") long projectCode);
/**
* Filter process definitions
*
* @param page page
* @param processDefinition process definition object
* @return process definition IPage
*/
IPage<ProcessDefinition> filterProcessDefinition(IPage<ProcessDefinition> page,
@Param("pd") ProcessDefinition processDefinition);
/** /**
* query all process definition list * query all process definition list
* *

10
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java

@ -63,6 +63,16 @@ public interface ScheduleMapper extends BaseMapper<Schedule> {
@Param("processDefinitionCode") long processDefinitionCode, @Param("processDefinitionCode") long processDefinitionCode,
@Param("searchVal") String searchVal); @Param("searchVal") String searchVal);
/**
* Filter schedule
*
* @param page page
* @param schedule schedule
* @return schedule IPage
*/
IPage<Schedule> filterSchedules(IPage<Schedule> page,
@Param("schedule") Schedule schedule);
/** /**
* query schedule list by project name * query schedule list by project name
* *

20
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml

@ -83,6 +83,26 @@
</if> </if>
order by update_time desc order by update_time desc
</select> </select>
<select id="filterProcessDefinition"
parameterType="org.apache.dolphinscheduler.dao.entity.ProcessDefinition"
resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinition"
>
SELECT
<include refid="baseSql"/>
FROM t_ds_process_definition
<where>
<if test=" pd.projectCode != 0">
and project_code = #{pd.projectCode}
</if>
<if test=" pd.name != null and pd.name != ''">
and name like concat('%', #{pd.name}, '%')
</if>
<if test=" pd.releaseState != null and pd.releaseState != ''">
and release_state = #{pd.releaseState}
</if>
</where>
order by update_time desc, id asc
</select>
<select id="queryAllDefinitionList" resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinition"> <select id="queryAllDefinitionList" resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinition">
select select

25
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.xml

@ -90,4 +90,29 @@
from t_ds_schedules from t_ds_schedules
where process_definition_code = #{processDefinitionCode} and release_state = 1 where process_definition_code = #{processDefinitionCode} and release_state = 1
</select> </select>
<select id="filterSchedules"
parameterType="org.apache.dolphinscheduler.dao.entity.Schedule"
resultType="org.apache.dolphinscheduler.dao.entity.Schedule"
>
SELECT pd.name as process_definition_name, p.name as project_name,
<include refid="baseSqlV2">
<property name="alias" value="s"/>
</include>
FROM t_ds_schedules s
join t_ds_process_definition pd on s.process_definition_code = pd.code
join t_ds_project as p on pd.project_code = p.code
<where>
<if test=" schedule.projectName != null and schedule.projectName != ''">
and p.name like concat('%', #{schedule.projectName}, '%')
</if>
<if test=" schedule.processDefinitionName != null and schedule.processDefinitionName != ''">
and pd.name like concat('%', #{schedule.processDefinitionName}, '%')
</if>
<if test=" schedule.releaseState != null and schedule.releaseState != ''">
and s.release_state = #{schedule.releaseState}
</if>
</where>
order by update_time desc, id asc
</select>
</mapper> </mapper>

Loading…
Cancel
Save