Browse Source

[Feature-2154][api] Workflow version control (#3053)

* [Feature][api] Workflow version control

* Update messages.properties

* Update messages_en_US.properties

* Update messages_zh_CN.properties
data_quality_design
Yichao Yang 4 years ago committed by GitHub
parent
commit
93660f4d61
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 290
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  2. 337
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  3. 113
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  4. 70
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java
  5. 99
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  6. 90
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  7. 181
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java
  8. 4
      dolphinscheduler-api/src/main/resources/i18n/messages.properties
  9. 4
      dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
  10. 5
      dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
  11. 217
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java
  12. 37
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  13. 274
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java
  14. 39
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java
  15. 44
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java
  16. 32
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java
  17. 329
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionVersion.java
  18. 24
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java
  19. 69
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.java
  20. 6
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml
  21. 47
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml
  22. 54
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java
  23. 172
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapperTest.java
  24. 155
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
  25. 2
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js
  26. 125
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue
  27. 255
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/versions.vue
  28. 42
      dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
  29. 5
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  30. 5
      dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
  31. 2
      pom.xml
  32. 27
      sql/dolphinscheduler-postgre.sql
  33. 27
      sql/dolphinscheduler_mysql.sql
  34. 31
      sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql
  35. 32
      sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql

290
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -14,32 +14,65 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import com.fasterxml.jackson.core.JsonProcessingException;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_COPY_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_MOVE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_DEFINITION;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_VERSIONS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/**
@ -55,16 +88,19 @@ public class ProcessDefinitionController extends BaseController {
@Autowired
private ProcessDefinitionService processDefinitionService;
@Autowired
private ProcessDefinitionVersionService processDefinitionVersionService;
/**
* create process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param json process definition json
* @param name process definition name
* @param json process definition json
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
*/
@ApiOperation(value = "save", notes = "CREATE_PROCESS_DEFINITION_NOTES")
@ -86,8 +122,8 @@ public class ProcessDefinitionController extends BaseController {
@RequestParam(value = "connects", required = true) String connects,
@RequestParam(value = "description", required = false) String description) throws JsonProcessingException {
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " +
"process_definition_json: {}, desc: {} locations:{}, connects:{}",
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, "
+ "process_definition_json: {}, desc: {} locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, json, description, locations, connects);
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json,
description, locations, connects);
@ -97,13 +133,13 @@ public class ProcessDefinitionController extends BaseController {
/**
* copy process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return copy result code
*/
@ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "copyProcessDefinition", notes = "COPY_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
@ -114,7 +150,7 @@ public class ProcessDefinitionController extends BaseController {
public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId",required = true) int targetProjectId) {
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
@ -122,19 +158,19 @@ public class ProcessDefinitionController extends BaseController {
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchCopyProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId));
processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
}
/**
* move process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return move result code
*/
@ApiOperation(value = "moveProcessDefinition", notes= "MOVE_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "moveProcessDefinition", notes = "MOVE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
@ -145,7 +181,7 @@ public class ProcessDefinitionController extends BaseController {
public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId",required = true) int targetProjectId) {
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
@ -153,15 +189,15 @@ public class ProcessDefinitionController extends BaseController {
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchMoveProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId));
processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@ApiOperation(value = "verify-name", notes = "VERIFY_PROCESS_DEFINITION_NAME_NOTES")
@ -172,8 +208,8 @@ public class ProcessDefinitionController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR)
public Result verifyProcessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name) {
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name) {
logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}",
loginUser.getUserName(), projectName, name);
Map<String, Object> result = processDefinitionService.verifyProcessDefinitionName(loginUser, projectName, name);
@ -183,18 +219,18 @@ public class ProcessDefinitionController extends BaseController {
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
@ApiOperation(value = "updateProcessDefinition", notes= "UPDATE_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "updateProcessDefinition", notes = "UPDATE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ -207,33 +243,115 @@ public class ProcessDefinitionController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROCESS_DEFINITION_ERROR)
public Result updateProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "id", required = true) int id,
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
@RequestParam(value = "locations", required = false) String locations,
@RequestParam(value = "connects", required = false) String connects,
@RequestParam(value = "description", required = false) String description) {
logger.info("login user {}, update process define, project name: {}, process define name: {}, " +
"process_definition_json: {}, desc: {}, locations:{}, connects:{}",
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "id", required = true) int id,
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
@RequestParam(value = "locations", required = false) String locations,
@RequestParam(value = "connects", required = false) String connects,
@RequestParam(value = "description", required = false) String description) {
logger.info("login user {}, update process define, project name: {}, process define name: {}, "
+ "process_definition_json: {}, desc: {}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects);
Map<String, Object> result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name,
processDefinitionJson, description, locations, connects);
return returnDataList(result);
}
/**
* query process definition version paging list info
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param pageNo the process definition version list current page number
* @param pageSize the process definition version list page size
* @param processDefinitionId the process definition id
* @return the process definition version list
*/
@ApiOperation(value = "queryProcessDefinitionVersions", notes = "QUERY_PROCESS_DEFINITION_VERSIONS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/versions")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_VERSIONS_ERROR)
public Result queryProcessDefinitionVersions(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "pageNo") int pageNo,
@RequestParam(value = "pageSize") int pageSize,
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
Map<String, Object> result = processDefinitionVersionService.queryProcessDefinitionVersions(loginUser
, projectName, pageNo, pageSize, processDefinitionId);
return returnDataList(result);
}
/**
* switch certain process definition version
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param processDefinitionId the process definition id
* @param version the version user want to switch
* @return switch version result code
*/
@ApiOperation(value = "switchProcessDefinitionVersion", notes = "SWITCH_PROCESS_DEFINITION_VERSION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
})
@GetMapping(value = "/version/switch")
@ResponseStatus(HttpStatus.OK)
@ApiException(SWITCH_PROCESS_DEFINITION_VERSION_ERROR)
public Result switchProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "version") long version) {
Map<String, Object> result = processDefinitionService.switchProcessDefinitionVersion(loginUser, projectName
, processDefinitionId, version);
return returnDataList(result);
}
/**
* delete the certain process definition version by version and process definition id
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param processDefinitionId process definition id
* @param version the process definition version user want to delete
* @return delete version result code
*/
@ApiOperation(value = "deleteProcessDefinitionVersion", notes = "DELETE_PROCESS_DEFINITION_VERSION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
})
@GetMapping(value = "/version/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR)
public Result deleteProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "version") long version) {
Map<String, Object> result = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(loginUser, projectName, processDefinitionId, version);
return returnDataList(result);
}
/**
* release process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param releaseState release state
* @return release result code
*/
@ApiOperation(value = "releaseProcessDefinition", notes= "RELEASE_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "releaseProcessDefinition", notes = "RELEASE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ -243,9 +361,9 @@ public class ProcessDefinitionController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(RELEASE_PROCESS_DEFINITION_ERROR)
public Result releaseProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId,
@RequestParam(value = "releaseState", required = true) int releaseState) {
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId,
@RequestParam(value = "releaseState", required = true) int releaseState) {
logger.info("login user {}, release process definition, project name: {}, release state: {}",
loginUser.getUserName(), projectName, releaseState);
@ -256,12 +374,12 @@ public class ProcessDefinitionController extends BaseController {
/**
* query datail of process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processId process definition id
* @return process definition detail
*/
@ApiOperation(value = "queryProcessDefinitionById", notes= "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiOperation(value = "queryProcessDefinitionById", notes = "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@ -269,8 +387,8 @@ public class ProcessDefinitionController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR)
public Result queryProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processId") Integer processId
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processId") Integer processId
) {
logger.info("query detail of process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
@ -281,7 +399,7 @@ public class ProcessDefinitionController extends BaseController {
/**
* query Process definition list
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @return process definition list
*/
@ -290,7 +408,7 @@ public class ProcessDefinitionController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
public Result queryProcessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName
) {
logger.info("query process definition list, login user:{}, project name:{}",
loginUser.getUserName(), projectName);
@ -301,15 +419,15 @@ public class ProcessDefinitionController extends BaseController {
/**
* query process definition list paging
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
@ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiOperation(value = "queryProcessDefinitionListPaging", notes = "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"),
@ -338,10 +456,10 @@ public class ProcessDefinitionController extends BaseController {
/**
* encapsulation treeview structure
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param limit limit
* @param id process definition id
* @param limit limit
* @return tree view json data
*/
@ApiOperation(value = "viewTree", notes = "VIEW_TREE_NOTES")
@ -363,8 +481,8 @@ public class ProcessDefinitionController extends BaseController {
/**
* get tasks list by process definition id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return task list
*/
@ -388,8 +506,8 @@ public class ProcessDefinitionController extends BaseController {
/**
* get tasks list by process definition id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIdList process definition id list
* @return node list data
*/
@ -414,8 +532,8 @@ public class ProcessDefinitionController extends BaseController {
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@ -439,8 +557,8 @@ public class ProcessDefinitionController extends BaseController {
/**
* batch delete process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition id list
* @return delete result code
*/
@ -489,13 +607,13 @@ public class ProcessDefinitionController extends BaseController {
/**
* batch export process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param response response
* @param response response
*/
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes= "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes = "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
})
@ -526,7 +644,7 @@ public class ProcessDefinitionController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
public Result queryProcessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId) {
@RequestParam("projectId") Integer projectId) {
logger.info("query process definition list, login user:{}, project id:{}",
loginUser.getUserName(), projectId);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId);

337
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -14,14 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.enums;
import org.springframework.context.i18n.LocaleContextHolder;
package org.apache.dolphinscheduler.api.enums;
import java.util.Locale;
import org.springframework.context.i18n.LocaleContextHolder;
/**
* status enum
* status enum
*/
public enum Status {
@ -32,15 +33,15 @@ public enum Status {
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"),
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"),
USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"),
USER_NAME_NULL(10004,"user name is null", "用户名不能为空"),
USER_NAME_NULL(10004, "user name is null", "用户名不能为空"),
HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"),
TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"),
TENANT_NAME_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"),
USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"),
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"),
ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"),
USER_NAME_PASSWD_ERROR(10013,"user name or password error", "用户名或密码错误"),
LOGIN_SESSION_FAILED(10014,"create session failed!", "创建session失败"),
USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"),
LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"),
DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"),
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"),
TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"),
@ -53,105 +54,105 @@ public enum Status {
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"),
MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"),
SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"),
CREATE_ALERT_GROUP_ERROR(10027,"create alert group error", "创建告警组错误"),
QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error", "查询告警组错误"),
LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error", "分页查询告警组错误"),
UPDATE_ALERT_GROUP_ERROR(10030,"update alert group error", "更新告警组错误"),
DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error", "删除告警组错误"),
ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error", "告警组授权用户错误"),
CREATE_DATASOURCE_ERROR(10033,"create datasource error", "创建数据源错误"),
UPDATE_DATASOURCE_ERROR(10034,"update datasource error", "更新数据源错误"),
QUERY_DATASOURCE_ERROR(10035,"query datasource error", "查询数据源错误"),
CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure", "建立数据源连接失败"),
CONNECTION_TEST_FAILURE(10037,"connection test failure", "测试数据源连接失败"),
DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure", "删除数据源失败"),
VERIFY_DATASOURCE_NAME_FAILURE(10039,"verify datasource name failure", "验证数据源名称失败"),
UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource", "未经授权的数据源"),
AUTHORIZED_DATA_SOURCE(10041,"authorized data source", "授权数据源失败"),
LOGIN_SUCCESS(10042,"login success", "登录成功"),
USER_LOGIN_FAILURE(10043,"user login failure", "用户登录失败"),
LIST_WORKERS_ERROR(10044,"list workers error", "查询worker列表错误"),
LIST_MASTERS_ERROR(10045,"list masters error", "查询master列表错误"),
UPDATE_PROJECT_ERROR(10046,"update project error", "更新项目信息错误"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error", "查询项目详细信息错误"),
CREATE_PROJECT_ERROR(10048,"create project error", "创建项目错误"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error", "分页查询项目列表错误"),
DELETE_PROJECT_ERROR(10050,"delete project error", "删除项目错误"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error", "查询未授权项目错误"),
QUERY_AUTHORIZED_PROJECT(10052,"query authorized project", "查询授权项目错误"),
QUERY_QUEUE_LIST_ERROR(10053,"query queue list error", "查询队列列表错误"),
CREATE_RESOURCE_ERROR(10054,"create resource error", "创建资源错误"),
UPDATE_RESOURCE_ERROR(10055,"update resource error", "更新资源错误"),
QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error", "查询资源列表错误"),
QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging", "分页查询资源列表错误"),
DELETE_RESOURCE_ERROR(10058,"delete resource error", "删除资源错误"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error", "资源名称或类型验证错误"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error", "查看资源文件错误"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error", "创建资源文件错误"),
RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067,"update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error", "授权资源文件错误"),
AUTHORIZE_RESOURCE_TREE( 10073,"authorize resource tree display error","授权资源目录树错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076,"create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077,"update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error", "上线调度配置错误"),
OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error", "下线调度配置错误"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error", "分页查询调度配置列表错误"),
QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error", "查询调度配置列表错误"),
QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error", "分页查询任务列表错误"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error", "分页查询任务记录错误"),
CREATE_TENANT_ERROR(10084,"create tenant error", "创建租户错误"),
QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error", "分页查询租户列表错误"),
QUERY_TENANT_LIST_ERROR(10086,"query tenant list error", "查询租户列表错误"),
UPDATE_TENANT_ERROR(10087,"update tenant error", "更新租户错误"),
DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error", "删除租户错误"),
VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error", "租户编码验证错误"),
CREATE_USER_ERROR(10090,"create user error", "创建用户错误"),
QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error", "分页查询用户列表错误"),
UPDATE_USER_ERROR(10092,"update user error", "更新用户错误"),
DELETE_USER_BY_ID_ERROR(10093,"delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094,"grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095,"grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097,"grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098,"get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099,"user list error", "查询用户列表错误"),
VERIFY_USERNAME_ERROR(10100,"verify username error", "用户名验证错误"),
UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error", "查询未授权用户错误"),
AUTHORIZED_USER_ERROR(10102,"authorized user error", "查询授权用户错误"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error", "查询任务实例日志错误"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error", "下载任务日志文件错误"),
CREATE_PROCESS_DEFINITION(10105,"create process definition", "创建工作流错误"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error", "工作流名称已存在"),
UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCESS_DEFINITION_LIST(10110,"query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115,"update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123,"sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"),
CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"),
QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"),
LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"),
UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"),
DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"),
ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"),
CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"),
UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"),
QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"),
CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"),
CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"),
DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"),
VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"),
UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"),
AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"),
LOGIN_SUCCESS(10042, "login success", "登录成功"),
USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"),
LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"),
LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"),
UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047, "query project details by id error", "查询项目详细信息错误"),
CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049, "login user query project list paging error", "分页查询项目列表错误"),
DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"),
QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"),
QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"),
CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"),
UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"),
QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"),
QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"),
DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059, "verify resource by name and type error", "资源名称或类型验证错误"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"),
RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"),
AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"),
OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"),
QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"),
QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"),
CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"),
QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"),
QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"),
UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"),
DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"),
VERIFY_TENANT_CODE_ERROR(10089, "verify tenant code error", "租户编码验证错误"),
CREATE_USER_ERROR(10090, "create user error", "创建用户错误"),
QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"),
UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"),
DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"),
VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"),
UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"),
AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"),
CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流名称已存在"),
UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123, "sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124, "tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"),
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"),
CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"),
QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"),
@ -159,29 +160,40 @@ public enum Status {
QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"),
UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"),
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"),
VERIFY_QUEUE_ERROR(10133,"verify queue error", "验证队列信息错误"),
NAME_NULL(10134,"name must be not null", "名称不能为空"),
VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"),
NAME_NULL(10134, "name must be not null", "名称不能为空"),
NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"),
SAVE_ERROR(10136, "save error", "保存错误"),
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"),
USER_DISABLED(10148,"The current user is disabled", "当前用户已停用"),
COPY_PROCESS_DEFINITION_ERROR(10149,"copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
MOVE_PROCESS_DEFINITION_ERROR(10150,"move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
QUERY_USER_CREATED_PROJECT_ERROR(10151,"query user created project error error", "查询用户创建的项目错误"),
PROCESS_DEFINITION_IDS_IS_EMPTY(10152,"process definition ids is empty", "工作流IDS不能为空"),
BATCH_COPY_PROCESS_DEFINITION_ERROR(10153,"batch copy process definition error", "复制工作流错误"),
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10154,"batch move process definition error", "移动工作流错误"),
QUERY_WORKFLOW_LINEAGE_ERROR(10155,"query workflow lineage error", "查询血缘失败"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"),
USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"),
COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152
, "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153
, "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"),
QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"),
QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR(10155
, "query process definition versions error: [page number:{0}] < 1 or [page size:{1}] < 1", "查询工作流历史版本出错:[pageNo:{0}] < 1 或 [pageSize:{1}] < 1"),
DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"),
QUERY_USER_CREATED_PROJECT_ERROR(10157, "query user created project error error", "查询用户创建的项目错误"),
PROCESS_DEFINITION_IDS_IS_EMPTY(10158, "process definition ids is empty", "工作流IDS不能为空"),
BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"),
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"),
QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
@ -194,10 +206,10 @@ public enum Status {
HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}","udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition","资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist","父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource","请检查任务节点并移除无权限或者已删除的资源"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"),
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
@ -214,52 +226,51 @@ public enum Status {
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"),
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."),
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error", "查询各用户流程定义数错误"),
START_PROCESS_INSTANCE_ERROR(50014,"start process instance error", "运行工作流实例错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node {0} parameter invalid", "流程节点[{0}]参数无效"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"),
START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028,"batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022, "delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023, "the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026, "batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"),
HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"),
HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"),
/**
* for monitor
*/
QUERY_DATABASE_STATE_ERROR(70001,"query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error", "查询zookeeper状态错误"),
QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"),
CREATE_ACCESS_TOKEN_ERROR(70010,"create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011,"generate token error", "生成token错误"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012,"query access token list paging error", "分页查询访问token列表错误"),
UPDATE_ACCESS_TOKEN_ERROR(70013,"update access token error", "更新访问token错误"),
DELETE_ACCESS_TOKEN_ERROR(70014,"delete access token error", "删除访问token错误"),
CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012, "query access token list paging error", "分页查询访问token列表错误"),
UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"),
DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"),
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"),
COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error", "查询各状态任务实例数错误"),
NEGTIVE_SIZE_NUMBER_ERROR(80002,"query size number error","查询size错误"),
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003,"start time bigger than end time error","开始时间在结束时间之后错误"),
QUEUE_COUNT_ERROR(90001,"queue count error", "查询队列数据错误"),
COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"),
NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"),
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"),
QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"),
KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error", "获取kerberos启动状态错误"),
KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"),
;
private final int code;

113
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

@ -14,13 +14,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.springframework.web.multipart.MultipartFile;
import com.fasterxml.jackson.core.JsonProcessingException;
/**
@ -31,13 +36,13 @@ public interface ProcessDefinitionService {
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
* @throws JsonProcessingException JsonProcessingException
*/
@ -52,7 +57,7 @@ public interface ProcessDefinitionService {
/**
* query process definition list
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
@ -62,12 +67,12 @@ public interface ProcessDefinitionService {
/**
* query process definition list paging
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
Map<String, Object> queryProcessDefinitionListPaging(User loginUser,
@ -80,9 +85,9 @@ public interface ProcessDefinitionService {
/**
* query datail of process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processId process definition id
* @return process definition detail
*/
@ -92,41 +97,41 @@ public interface ProcessDefinitionService {
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
* @return
*/
Map<String, Object> batchCopyProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId);
String projectName,
String processDefinitionIds,
int targetProjectId);
/**
* batch move process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
* @return
*/
Map<String, Object> batchMoveProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId);
String projectName,
String processDefinitionIds,
int targetProjectId);
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
Map<String, Object> updateProcessDefinition(User loginUser,
@ -139,9 +144,9 @@ public interface ProcessDefinitionService {
/**
* verify process definition name unique
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
Map<String, Object> verifyProcessDefinitionName(User loginUser,
@ -151,8 +156,8 @@ public interface ProcessDefinitionService {
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@ -163,9 +168,9 @@ public interface ProcessDefinitionService {
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param releaseState release state
* @return release result code
*/
@ -177,21 +182,21 @@ public interface ProcessDefinitionService {
/**
* batch export process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param response http servlet response
* @param response http servlet response
*/
void batchExportProcessDefinitionByIds(User loginUser,
String projectName,
String processDefinitionIds,
HttpServletResponse response);
String projectName,
String processDefinitionIds,
HttpServletResponse response);
/**
* import process definition
*
* @param loginUser login user
* @param file process metadata json file
* @param loginUser login user
* @param file process metadata json file
* @param currentProjectName current project name
* @return import process
*/
@ -202,7 +207,7 @@ public interface ProcessDefinitionService {
/**
* check the process definition node meets the specifications
*
* @param processData process data
* @param processData process data
* @param processDefinitionJson process definition json
* @return check result code
*/
@ -237,11 +242,23 @@ public interface ProcessDefinitionService {
* Encapsulates the TreeView structure
*
* @param processId process definition id
* @param limit limit
* @param limit limit
* @return tree view json data
* @throws Exception exception
*/
Map<String, Object> viewTree(Integer processId,
Integer limit) throws Exception;
/**
* switch the defined process definition verison
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param version the version user want to switch
* @return switch process definition version result code
*/
Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
, int processDefinitionId, long version);
}

70
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java

@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
public interface ProcessDefinitionVersionService {
/**
* add the newest version of one process definition
*
* @param processDefinition the process definition that need to record version
* @return the newest version number of this process definition
*/
long addProcessDefinitionVersion(ProcessDefinition processDefinition);
/**
* query the pagination versions info by one certain process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId process definition id
* @return the pagination process definition versions info of the certain process definition
*/
Map<String, Object> queryProcessDefinitionVersions(User loginUser, String projectName,
int pageNo, int pageSize, int processDefinitionId);
/**
* query one certain process definition version by version number and process definition id
*
* @param processDefinitionId process definition id
* @param version version number
* @return the process definition version info
*/
ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId,
long version);
/**
* delete one certain process definition by version number and process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param processDefinitionId process definition id
* @param version version number
* @return delele result code
*/
Map<String, Object> deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName,
int processDefinitionId, long version);
}

99
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -14,8 +14,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
@ -24,19 +24,6 @@ import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
@ -72,6 +59,21 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -108,6 +110,9 @@ public class ProcessInstanceService extends BaseService {
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
ExecutorService execService;
@ -118,18 +123,11 @@ public class ProcessInstanceService extends BaseService {
LoggerService loggerService;
@Autowired
UsersService usersService;
/**
* return top n SUCCESS process instance order by running time which started between startTime and endTime
* @param loginUser
* @param projectName
* @param size
* @param startTime
* @param endTime
* @return
*/
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) {
Map<String, Object> result = new HashMap<>();
@ -155,7 +153,7 @@ public class ProcessInstanceService extends BaseService {
return result;
}
Date end = DateUtils.stringToDate(endTime);
if(start == null || end == null) {
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result;
}
@ -169,6 +167,7 @@ public class ProcessInstanceService extends BaseService {
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by id
*
@ -214,7 +213,7 @@ public class ProcessInstanceService extends BaseService {
*/
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId,
String startDate, String endDate,
String searchVal, String executorName,ExecutionStatus stateType, String host,
String searchVal, String executorName, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
@ -246,18 +245,18 @@ public class ProcessInstanceService extends BaseService {
return result;
}
Page<ProcessInstance> page = new Page(pageNo, pageSize);
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList =
processInstanceMapper.queryProcessInstanceListPaging(page,
project.getId(), processDefineId, searchVal, executorId,statusArray, host, start, end);
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
for(ProcessInstance processInstance: processInstances){
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(),processInstance.getEndTime()));
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = usersService.queryUser(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
@ -271,8 +270,6 @@ public class ProcessInstanceService extends BaseService {
return result;
}
/**
* query task list by process instance id
*
@ -305,14 +302,13 @@ public class ProcessInstanceService extends BaseService {
/**
* add dependent result for dependent task
* @param taskInstanceList
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for(TaskInstance taskInstance: taskInstanceList){
if(taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())){
for (TaskInstance taskInstance : taskInstanceList) {
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) {
Result logResult = loggerService.queryLog(
taskInstance.getId(), 0, 4098);
if(logResult.getCode() == Status.SUCCESS.ordinal()){
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = (String) logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
@ -321,24 +317,24 @@ public class ProcessInstanceService extends BaseService {
}
}
public Map<String,DependResult> parseLogForDependentResult(String log) throws IOException {
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if(StringUtils.isEmpty(log)){
if (StringUtils.isEmpty(log)) {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if(line.contains(DEPENDENT_SPLIT)){
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if(tmpStringArray.length != 2){
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if(dependStringArray.length != 2){
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
@ -349,7 +345,6 @@ public class ProcessInstanceService extends BaseService {
return resultMap;
}
/**
* query sub process instance detail info by task id
*
@ -462,7 +457,7 @@ public class ProcessInstanceService extends BaseService {
processInstance.setTimeout(timeout);
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if(tenant != null){
if (tenant != null) {
processInstance.setTenantCode(tenant.getTenantCode());
}
processInstance.setProcessInstanceJson(processInstanceJson);
@ -477,6 +472,11 @@ public class ProcessInstanceService extends BaseService {
processDefinition.setLocations(locations);
processDefinition.setConnects(connects);
processDefinition.setTimeout(timeout);
processDefinition.setUpdateTime(new Date());
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
processDefinition.setVersion(version);
updateDefine = processDefineMapper.updateById(processDefinition);
}
if (update > 0 && updateDefine > 0) {
@ -485,7 +485,6 @@ public class ProcessInstanceService extends BaseService {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
return result;
}
@ -532,6 +531,7 @@ public class ProcessInstanceService extends BaseService {
/**
* delete process instance by id, at the same timedelete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
@ -554,13 +554,10 @@ public class ProcessInstanceService extends BaseService {
return result;
}
processService.removeTaskLogFile(processInstanceId);
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
@ -592,7 +589,6 @@ public class ProcessInstanceService extends BaseService {
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String workflowInstanceJson = processInstance.getProcessInstanceJson();
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class);
@ -603,10 +599,9 @@ public class ProcessInstanceService extends BaseService {
List<Property> globalParams = new ArrayList<>();
if (userDefinedParams != null && userDefinedParams.length() > 0) {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
List<TaskNode> taskNodeList = workflowData.getTasks();
// global param string
@ -618,7 +613,7 @@ public class ProcessInstanceService extends BaseService {
}
// local params
Map<String, Map<String,Object>> localUserDefParams = new HashMap<>();
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
Map<String, String> map = JSONUtils.toMap(parameter);
@ -627,9 +622,9 @@ public class ProcessInstanceService extends BaseService {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String,Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType",taskNode.getType());
localParamsMap.put("localParamsList",localParamsList);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType", taskNode.getType());
localParamsMap.put("localParamsList", localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskNode.getName(), localParamsMap);
}

90
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
@ -56,6 +57,7 @@ import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.TaskParametersUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
@ -125,6 +127,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@ -202,8 +207,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
// save the new process definition
processDefineMapper.insert(processDefine);
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
processDefine.setVersion(version);
processDefineMapper.updateVersionByProcessDefinitionId(processDefine.getId(), version);
// return processDefinition object with ID
result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId()));
putMsg(result, Status.SUCCESS);
@ -239,7 +253,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return sb.toString();
}
/**
* query process definition list
*
@ -265,7 +278,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return result;
}
/**
* query process definition list paging
*
@ -311,7 +323,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
@ -398,9 +409,14 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
processDefine.setGlobalParamList(globalParamsList);
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
processDefine.setVersion(version);
if (processDefineMapper.updateById(processDefine) > 0) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefineMapper.queryByDefineId(id));
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
@ -1058,7 +1074,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
}
}
/**
* check the process definition node meets the specifications
*
@ -1127,7 +1142,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return result;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
@ -1185,7 +1199,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
}
/**
* query process definition all by project id
*
@ -1277,7 +1290,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
TaskNode taskNode = dag.getNode(nodeName);
treeViewDto.setType(taskNode.getType());
//set treeViewDto instances
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
@ -1334,7 +1346,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return result;
}
/**
* Generate the DAG Graph based on the process definition id
*
@ -1360,7 +1371,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return new DAG<>();
}
/**
* whether the graph has a ring
*
@ -1525,6 +1535,66 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return result;
}
/**
* switch the defined process definition verison
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param version the version user want to switch
* @return switch process definition version result code
*/
@Override
public Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
, int processDefinitionId, long version) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (Objects.isNull(processDefinition)) {
putMsg(result
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR
, processDefinitionId);
return result;
}
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
if (Objects.isNull(processDefinitionVersion)) {
putMsg(result
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR
, processDefinitionId
, version);
return result;
}
processDefinition.setVersion(processDefinitionVersion.getVersion());
processDefinition.setProcessDefinitionJson(processDefinitionVersion.getProcessDefinitionJson());
processDefinition.setDescription(processDefinitionVersion.getDescription());
processDefinition.setLocations(processDefinitionVersion.getLocations());
processDefinition.setConnects(processDefinitionVersion.getConnects());
processDefinition.setTimeout(processDefinitionVersion.getTimeout());
processDefinition.setGlobalParams(processDefinitionVersion.getGlobalParams());
processDefinition.setUpdateTime(new Date());
processDefinition.setReceivers(processDefinitionVersion.getReceivers());
processDefinition.setReceiversCc(processDefinitionVersion.getReceiversCc());
processDefinition.setResourceIds(processDefinitionVersion.getResourceIds());
if (processDefineMapper.updateById(processDefinition) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
}
return result;
}
/**
* do batch move process definition
*

181
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java

@ -0,0 +1,181 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.ImmutableMap;
@Service
public class ProcessDefinitionVersionServiceImpl extends BaseService implements
ProcessDefinitionVersionService {
@Autowired
private ProcessDefinitionVersionMapper processDefinitionVersionMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProjectMapper projectMapper;
/**
* add the newest version of one process definition
*
* @param processDefinition the process definition that need to record version
* @return the newest version number of this process definition
*/
public long addProcessDefinitionVersion(ProcessDefinition processDefinition) {
long version = this.queryMaxVersionByProcessDefinitionId(processDefinition.getId()) + 1;
ProcessDefinitionVersion processDefinitionVersion = ProcessDefinitionVersion
.newBuilder()
.processDefinitionId(processDefinition.getId())
.version(version)
.processDefinitionJson(processDefinition.getProcessDefinitionJson())
.description(processDefinition.getDescription())
.locations(processDefinition.getLocations())
.connects(processDefinition.getConnects())
.timeout(processDefinition.getTimeout())
.globalParams(processDefinition.getGlobalParams())
.createTime(processDefinition.getUpdateTime())
.receivers(processDefinition.getReceivers())
.receiversCc(processDefinition.getReceiversCc())
.resourceIds(processDefinition.getResourceIds())
.build();
processDefinitionVersionMapper.insert(processDefinitionVersion);
return version;
}
/**
* query the max version number by the process definition id
*
* @param processDefinitionId process definition id
* @return the max version number of this id
*/
private long queryMaxVersionByProcessDefinitionId(int processDefinitionId) {
Long maxVersion = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId(processDefinitionId);
if (Objects.isNull(maxVersion)) {
return 0L;
} else {
return maxVersion;
}
}
/**
* query the pagination versions info by one certain process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId process definition id
* @return the pagination process definition versions info of the certain process definition
*/
public Map<String, Object> queryProcessDefinitionVersions(User loginUser, String projectName, int pageNo, int pageSize, int processDefinitionId) {
Map<String, Object> result = new HashMap<>();
// check the if pageNo or pageSize less than 1
if (pageNo <= 0 || pageSize <= 0) {
putMsg(result
, Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
, pageNo
, pageSize);
return result;
}
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
PageInfo<ProcessDefinitionVersion> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<ProcessDefinitionVersion> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinitionVersion> processDefinitionVersionsPaging = processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, processDefinitionId);
List<ProcessDefinitionVersion> processDefinitionVersions = processDefinitionVersionsPaging.getRecords();
pageInfo.setLists(processDefinitionVersions);
pageInfo.setTotalCount((int) processDefinitionVersionsPaging.getTotal());
return ImmutableMap.of(
Constants.MSG, Status.SUCCESS.getMsg()
, Constants.STATUS, Status.SUCCESS
, Constants.DATA_LIST, pageInfo);
}
/**
* query one certain process definition version by version number and process definition id
*
* @param processDefinitionId process definition id
* @param version version number
* @return the process definition version info
*/
public ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId, long version) {
return processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
}
/**
* delete one certain process definition by version number and process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param processDefinitionId process definition id
* @param version version number
* @return delele result code
*/
public Map<String, Object> deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName, int processDefinitionId, long version) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version);
putMsg(result, Status.SUCCESS);
return result;
}
}

4
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@ -258,3 +258,7 @@ COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
TARGET_PROJECT_ID= target project id
IS_COPY = is copy
DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version
VERSION=version

4
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@ -258,3 +258,7 @@ COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
TARGET_PROJECT_ID= target project id
IS_COPY = is copy
DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version
VERSION=version

5
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@ -256,4 +256,7 @@ COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义
MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义
TARGET_PROJECT_ID= 目标项目ID
IS_COPY = 是否复制
DELETE_PROCESS_DEFINITION_VERSION_NOTES=删除流程历史版本
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=查询流程历史版本信息
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本
VERSION=版本号

217
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java

@ -14,10 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
@ -25,8 +26,18 @@ import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -38,12 +49,6 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mock.web.MockHttpServletResponse;
import javax.servlet.http.HttpServletResponse;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* process definition controller test
@ -59,10 +64,13 @@ public class ProcessDefinitionControllerTest {
@Mock
private ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private ProcessDefinitionVersionService processDefinitionVersionService;
protected User user;
@Before
public void before(){
public void before() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
@ -73,7 +81,11 @@ public class ProcessDefinitionControllerTest {
@Test
public void testCreateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\""
+ ":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\"
+ "necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\""
+ ",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
@ -82,14 +94,14 @@ public class ProcessDefinitionControllerTest {
String connects = "[]";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
result.put("processDefinitionId", 1);
Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
@ -109,17 +121,21 @@ public class ProcessDefinitionControllerTest {
String projectName = "test";
String name = "dag_test";
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user,projectName,name)).thenReturn(result);
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user, projectName, name)).thenReturn(result);
Result response = processDefinitionController.verifyProcessDefinitionName(user,projectName,name);
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),response.getCode().intValue());
Result response = processDefinitionController.verifyProcessDefinitionName(user, projectName, name);
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(), response.getCode().intValue());
}
@Test
public void updateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\""
+ ",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"}"
+ ",\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
@ -128,14 +144,14 @@ public class ProcessDefinitionControllerTest {
int id = 1;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
result.put("processDefinitionId", 1);
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id,name, json,
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id, name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name,id, json,
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name, id, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@ -145,15 +161,19 @@ public class ProcessDefinitionControllerTest {
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result);
Result response = processDefinitionController.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal());
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal())).thenReturn(result);
Result response = processDefinitionController.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionById() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":"
+ "\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
@ -174,10 +194,10 @@ public class ProcessDefinitionControllerTest {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName,id);
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName, id)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@ -190,10 +210,10 @@ public class ProcessDefinitionControllerTest {
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName,id,targetProjectId);
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName, id, targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@ -206,35 +226,37 @@ public class ProcessDefinitionControllerTest {
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result);
Result response = processDefinitionController.moveProcessDefinition(user, projectName,id,targetProjectId);
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
Result response = processDefinitionController.moveProcessDefinition(user, projectName, id, targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionList() throws Exception {
String projectName = "test";
List<ProcessDefinition> resourceList = getDefinitionList();
List<ProcessDefinition> resourceList = getDefinitionList();
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, resourceList);
Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionList(user, projectName);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
public List<ProcessDefinition> getDefinitionList(){
public List<ProcessDefinition> getDefinitionList() {
List<ProcessDefinition> resourceList = new ArrayList<>();
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval"
+ "\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
@ -266,7 +288,7 @@ public class ProcessDefinitionControllerTest {
resourceList.add(processDefinition);
resourceList.add(processDefinition2);
return resourceList;
return resourceList;
}
@Test
@ -277,13 +299,13 @@ public class ProcessDefinitionControllerTest {
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName,id);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName, id)).thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@Test
public void testGetNodeListByDefinitionId() throws Exception {
String projectName = "test";
int id = 1;
@ -292,9 +314,9 @@ public class ProcessDefinitionControllerTest {
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionId(user,projectName,id);
Result response = processDefinitionController.getNodeListByDefinitionId(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@ -306,57 +328,57 @@ public class ProcessDefinitionControllerTest {
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionIdList(user,projectName,idList);
Result response = processDefinitionController.getNodeListByDefinitionIdList(user, projectName, idList);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionAllByProjectId() throws Exception{
public void testQueryProcessDefinitionAllByProjectId() throws Exception {
int projectId = 1;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user,projectId);
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user, projectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testViewTree() throws Exception{
public void testViewTree() throws Exception {
String projectName = "test";
int processId = 1;
int limit = 2;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.viewTree(processId,limit)).thenReturn(result);
Result response = processDefinitionController.viewTree(user,projectName,processId,limit);
Mockito.when(processDefinitionService.viewTree(processId, limit)).thenReturn(result);
Result response = processDefinitionController.viewTree(user, projectName, processId, limit);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionListPaging() throws Exception{
public void testQueryProcessDefinitionListPaging() throws Exception {
String projectName = "test";
int pageNo = 1;
int pageSize = 10;
String searchVal = "";
int userId = 1;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
result.put(Constants.DATA_LIST,new PageInfo<Resource>(1,10));
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, new PageInfo<Resource>(1, 10));
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user,projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionListPaging(user,projectName,pageNo,searchVal,userId,pageSize);
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user, projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionListPaging(user, projectName, pageNo, searchVal, userId, pageSize);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testBatchExportProcessDefinitionByIds() throws Exception{
public void testBatchExportProcessDefinitionByIds() throws Exception {
String processDefinitionIds = "1,2";
String projectName = "test";
@ -365,4 +387,67 @@ public class ProcessDefinitionControllerTest {
processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
}
@Test
public void testQueryProcessDefinitionVersions() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
resultMap.put(Constants.DATA_LIST, new PageInfo<ProcessDefinitionVersion>(1, 10));
Mockito.when(processDefinitionVersionService.queryProcessDefinitionVersions(
user
, projectName
, 1
, 10
, 1))
.thenReturn(resultMap);
Result result = processDefinitionController.queryProcessDefinitionVersions(
user
, projectName
, 1
, 10
, 1);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
@Test
public void testSwitchProcessDefinitionVersion() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
Mockito.when(processDefinitionService.switchProcessDefinitionVersion(
user
, projectName
, 1
, 10))
.thenReturn(resultMap);
Result result = processDefinitionController.switchProcessDefinitionVersion(
user
, projectName
, 1
, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
@Test
public void testDeleteProcessDefinitionVersion() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
Mockito.when(processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
user
, projectName
, 1
, 10))
.thenReturn(resultMap);
Result result = processDefinitionController.deleteProcessDefinitionVersion(
user
, projectName
, 1
, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
}

37
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
@ -98,6 +99,9 @@ public class ProcessDefinitionServiceTest {
@Mock
private TaskInstanceMapper taskInstanceMapper;
@Mock
private ProcessDefinitionVersionService processDefinitionVersionService;
private static final String SHELL_JSON = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
@ -390,7 +394,11 @@ public class ProcessDefinitionServiceTest {
// instance exit
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\","
+ "\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234"
+ "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\","
+ "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
@ -432,7 +440,11 @@ public class ProcessDefinitionServiceTest {
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\""
+ ",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234"
+ "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\","
+ "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
// check target project result == null
@ -568,14 +580,14 @@ public class ProcessDefinitionServiceTest {
//FIXME has function exit code 1 when exception
//process definition offline
// List<Schedule> schedules = new ArrayList<>();
// Schedule schedule = getSchedule();
// schedules.add(schedule);
// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
// Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
// 46, ReleaseState.OFFLINE.getCode());
// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
// List<Schedule> schedules = new ArrayList<>();
// Schedule schedule = getSchedule();
// schedules.add(schedule);
// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
// Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
// 46, ReleaseState.OFFLINE.getCode());
// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
}
@Test
@ -850,9 +862,12 @@ public class ProcessDefinitionServiceTest {
String projectName = "project_test1";
Project project = getProject(projectName);
ProcessDefinition processDefinition = getProcessDefinition();
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processService.findProcessDefineById(1)).thenReturn(getProcessDefinition());
Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition);
Mockito.when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L);
String sqlDependentJson = "{\n"
+ " \"globalParams\": [\n"

274
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java

@ -0,0 +1,274 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionVersionServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.Lists;
@RunWith(MockitoJUnitRunner.class)
public class ProcessDefinitionVersionServiceTest {
@InjectMocks
private ProcessDefinitionVersionServiceImpl processDefinitionVersionService;
@Mock
private ProcessDefinitionVersionMapper processDefinitionVersionMapper;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectServiceImpl projectService;
@Test
public void testAddProcessDefinitionVersion() {
long expectedVersion = 5L;
ProcessDefinition processDefinition = getProcessDefinition();
Mockito.when(processDefinitionVersionMapper
.queryMaxVersionByProcessDefinitionId(processDefinition.getId()))
.thenReturn(expectedVersion);
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
Assert.assertEquals(expectedVersion + 1, version);
}
@Test
@SuppressWarnings("unchecked")
public void testQueryProcessDefinitionVersions() {
// pageNo <= 0
int pageNo = -1;
int pageSize = 10;
int processDefinitionId = 66;
String projectName = "project_test1";
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> resultMap1 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
, resultMap1.get(Constants.STATUS));
// pageSize <= 0
pageNo = 1;
pageSize = -1;
Map<String, Object> resultMap2 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
, resultMap2.get(Constants.STATUS));
Map<String, Object> res = new HashMap<>();
putMsg(res, Status.PROJECT_NOT_FOUNT);
Project project = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName))
.thenReturn(project);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(res);
// project auth fail
pageNo = 1;
pageSize = 10;
Map<String, Object> resultMap3 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, resultMap3.get(Constants.STATUS));
putMsg(res, Status.SUCCESS);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(res);
ProcessDefinitionVersion processDefinitionVersion = getProcessDefinitionVersion(getProcessDefinition());
Mockito.when(processDefinitionVersionMapper
.queryProcessDefinitionVersionsPaging(Mockito.any(Page.class), Mockito.eq(processDefinitionId)))
.thenReturn(new Page<ProcessDefinitionVersion>()
.setRecords(Lists.newArrayList(processDefinitionVersion)));
Map<String, Object> resultMap4 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.SUCCESS, resultMap4.get(Constants.STATUS));
Assert.assertEquals(processDefinitionVersion
, ((PageInfo<ProcessDefinitionVersion>) resultMap4.get(Constants.DATA_LIST))
.getLists().get(0));
}
@Test
public void testQueryByProcessDefinitionIdAndVersion() {
ProcessDefinitionVersion expectedProcessDefinitionVersion =
getProcessDefinitionVersion(getProcessDefinition());
int processDefinitionId = 66;
long version = 10;
Mockito.when(processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version))
.thenReturn(expectedProcessDefinitionVersion);
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
Assert.assertEquals(expectedProcessDefinitionVersion, processDefinitionVersion);
}
@Test
public void testDeleteByProcessDefinitionIdAndVersion() {
String projectName = "project_test1";
int processDefinitionId = 66;
long version = 10;
Project project = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName))
.thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
// project auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(new HashMap<>());
Map<String, Object> resultMap1 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
loginUser
, projectName
, processDefinitionId
, version);
Assert.assertEquals(0, resultMap1.size());
Map<String, Object> res = new HashMap<>();
putMsg(res, Status.SUCCESS);
Mockito.when(processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version))
.thenReturn(1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(res);
Map<String, Object> resultMap2 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
loginUser
, projectName
, processDefinitionId
, version);
Assert.assertEquals(Status.SUCCESS, resultMap2.get(Constants.STATUS));
}
/**
* get mock processDefinitionVersion by processDefinition
*
* @return processDefinitionVersion
*/
private ProcessDefinitionVersion getProcessDefinitionVersion(ProcessDefinition processDefinition) {
return ProcessDefinitionVersion
.newBuilder()
.processDefinitionId(processDefinition.getId())
.version(1)
.processDefinitionJson(processDefinition.getProcessDefinitionJson())
.description(processDefinition.getDescription())
.locations(processDefinition.getLocations())
.connects(processDefinition.getConnects())
.timeout(processDefinition.getTimeout())
.globalParams(processDefinition.getGlobalParams())
.createTime(processDefinition.getUpdateTime())
.receivers(processDefinition.getReceivers())
.receiversCc(processDefinition.getReceiversCc())
.resourceIds(processDefinition.getResourceIds())
.build();
}
/**
* get mock processDefinition
*
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(66);
processDefinition.setName("test_pdf");
processDefinition.setProjectId(2);
processDefinition.setTenantId(1);
processDefinition.setDescription("");
return processDefinition;
}
/**
* get mock Project
*
* @param projectName projectName
* @return Project
*/
private Project getProject(String projectName) {
Project project = new Project();
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
}

39
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.eq;
@ -87,6 +88,9 @@ public class ProcessInstanceServiceTest {
@Mock
ProcessDefinitionService processDefinitionService;
@Mock
ProcessDefinitionVersionService processDefinitionVersionService;
@Mock
ExecutorService execService;
@ -99,12 +103,11 @@ public class ProcessInstanceServiceTest {
@Mock
UsersService usersService;
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\","
+ "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"},"
+ "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\","
+ "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\","
+ "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
@Test
public void testQueryProcessInstanceList() {
@ -265,19 +268,16 @@ public class ProcessInstanceServiceTest {
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testParseLogForDependentResult() {
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" +
"[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" +
"[root@node2 current]# ";
try {
Map<String, DependResult> resultMap =
processInstanceService.parseLogForDependentResult(logString);
Assert.assertEquals(1, resultMap.size());
} catch (IOException e) {
}
public void testParseLogForDependentResult() throws IOException {
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172]"
+ " - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n"
+ "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172]"
+ " - task : 223_10739_452334 exit status code : 0\n"
+ "[root@node2 current]# ";
Map<String, DependResult> resultMap =
processInstanceService.parseLogForDependentResult(logString);
Assert.assertEquals(1, resultMap.size());
}
@Test
@ -371,6 +371,7 @@ public class ProcessInstanceServiceTest {
when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant);
when(processService.updateProcessInstance(processInstance)).thenReturn(1);
when(processDefinitionService.checkProcessNodeList(Mockito.any(), eq(shellJson))).thenReturn(result);
when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L);
Map<String, Object> processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1,
shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", "");
Assert.assertEquals(Status.UPDATE_PROCESS_INSTANCE_ERROR, processInstanceFinishRes.get(Constants.STATUS));
@ -401,6 +402,7 @@ public class ProcessInstanceServiceTest {
when(projectMapper.queryByName(projectName)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> processInstanceNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
@ -559,5 +561,4 @@ public class ProcessInstanceServiceTest {
}
}
}

44
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
@ -35,9 +36,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
@ -53,7 +52,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class ProjectServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class);
@InjectMocks
@ -73,17 +71,6 @@ public class ProjectServiceTest {
private String userName = "ProjectServiceTest";
@Before
public void setUp() {
}
@After
public void after() {
}
@Test
public void testCreateProject() {
@ -105,7 +92,6 @@ public class ProjectServiceTest {
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
@ -148,6 +134,21 @@ public class ProjectServiceTest {
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
Map<String, Object> result2 = new HashMap<>();
result2 = projectService.checkProjectAndAuth(loginUser, null, projectName);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result2.get(Constants.STATUS));
Project project1 = getProject();
// USER_NO_OPERATION_PROJECT_PERM
project1.setUserId(2);
result2 = projectService.checkProjectAndAuth(loginUser, project1, projectName);
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result2.get(Constants.STATUS));
//success
project1.setUserId(1);
projectService.checkProjectAndAuth(loginUser, project1, projectName);
}
@Test
@ -225,7 +226,6 @@ public class ProjectServiceTest {
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
@ -322,7 +322,6 @@ public class ProjectServiceTest {
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
private Project getProject() {
Project project = new Project();
project.setId(1);
@ -337,7 +336,6 @@ public class ProjectServiceTest {
return list;
}
/**
* create admin user
*/
@ -369,13 +367,11 @@ public class ProjectServiceTest {
return list;
}
private String getDesc() {
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" +
".deleteProjectRelation(projectId,userId)projectUserMappe" +
"r.deleteProjectRelation(projectId,userId)projectUserMapper" +
".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe"
+ ".deleteProjectRelation(projectId,userId)projectUserMappe"
+ "r.deleteProjectRelation(projectId,userId)projectUserMapper"
+ ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
}
}

32
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java

@ -14,18 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.fasterxml.jackson.annotation.JsonFormat;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.util.ArrayList;
import java.util.Date;
@ -33,6 +28,13 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
* process definition
@ -54,7 +56,7 @@ public class ProcessDefinition {
/**
* version
*/
private int version;
private long version;
/**
* release state : online/offline
@ -96,13 +98,13 @@ public class ProcessDefinition {
/**
* create time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
/**
* update time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date updateTime;
/**
@ -182,11 +184,11 @@ public class ProcessDefinition {
this.name = name;
}
public int getVersion() {
public long getVersion() {
return version;
}
public void setVersion(int version) {
public void setVersion(long version) {
this.version = version;
}
@ -276,9 +278,9 @@ public class ProcessDefinition {
}
public void setGlobalParams(String globalParams) {
if (globalParams == null){
if (globalParams == null) {
this.globalParamList = new ArrayList<>();
}else {
} else {
this.globalParamList = JSONUtils.toList(globalParams, Property.class);
}
this.globalParams = globalParams;
@ -295,7 +297,7 @@ public class ProcessDefinition {
public Map<String, String> getGlobalParamMap() {
if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) {
List<Property> propList = JSONUtils.toList(globalParams,Property.class);
List<Property> propList = JSONUtils.toList(globalParams, Property.class);
globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
}

329
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionVersion.java

@ -0,0 +1,329 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import java.util.Date;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
* process definition version
*/
@TableName("t_ds_process_definition_version")
public class ProcessDefinitionVersion {
/**
* id
*/
@TableId(value = "id", type = IdType.AUTO)
private int id;
/**
* process definition id
*/
private int processDefinitionId;
/**
* version
*/
private long version;
/**
* definition json string
*/
private String processDefinitionJson;
/**
* description
*/
private String description;
/**
* receivers
*/
private String receivers;
/**
* receivers cc
*/
private String receiversCc;
/**
* process warning time out. unit: minute
*/
private int timeout;
/**
* resource ids
*/
private String resourceIds;
/**
* create time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
/**
* user defined parameters
*/
private String globalParams;
/**
* locations array for web
*/
private String locations;
/**
* connects array for web
*/
private String connects;
public String getGlobalParams() {
return globalParams;
}
public void setGlobalParams(String globalParams) {
this.globalParams = globalParams;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(int processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
public String getProcessDefinitionJson() {
return processDefinitionJson;
}
public void setProcessDefinitionJson(String processDefinitionJson) {
this.processDefinitionJson = processDefinitionJson;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public String getLocations() {
return locations;
}
public void setLocations(String locations) {
this.locations = locations;
}
public String getConnects() {
return connects;
}
public void setConnects(String connects) {
this.connects = connects;
}
public String getReceivers() {
return receivers;
}
public void setReceivers(String receivers) {
this.receivers = receivers;
}
public String getReceiversCc() {
return receiversCc;
}
public void setReceiversCc(String receiversCc) {
this.receiversCc = receiversCc;
}
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public String getResourceIds() {
return resourceIds;
}
public void setResourceIds(String resourceIds) {
this.resourceIds = resourceIds;
}
@Override
public String toString() {
return "ProcessDefinitionVersion{"
+ "id=" + id
+ ", processDefinitionId=" + processDefinitionId
+ ", version=" + version
+ ", processDefinitionJson='" + processDefinitionJson + '\''
+ ", description='" + description + '\''
+ ", globalParams='" + globalParams + '\''
+ ", createTime=" + createTime
+ ", locations='" + locations + '\''
+ ", connects='" + connects + '\''
+ ", receivers='" + receivers + '\''
+ ", receiversCc='" + receiversCc + '\''
+ ", timeout=" + timeout
+ ", resourceIds='" + resourceIds + '\''
+ '}';
}
public static Builder newBuilder() {
return new Builder();
}
public static final class Builder {
private int id;
private int processDefinitionId;
private long version;
private String processDefinitionJson;
private String description;
private String globalParams;
private Date createTime;
private String locations;
private String connects;
private String receivers;
private String receiversCc;
private int timeout;
private String resourceIds;
private Builder() {
}
public Builder id(int id) {
this.id = id;
return this;
}
public Builder processDefinitionId(int processDefinitionId) {
this.processDefinitionId = processDefinitionId;
return this;
}
public Builder version(long version) {
this.version = version;
return this;
}
public Builder processDefinitionJson(String processDefinitionJson) {
this.processDefinitionJson = processDefinitionJson;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public Builder globalParams(String globalParams) {
this.globalParams = globalParams;
return this;
}
public Builder createTime(Date createTime) {
this.createTime = createTime;
return this;
}
public Builder locations(String locations) {
this.locations = locations;
return this;
}
public Builder connects(String connects) {
this.connects = connects;
return this;
}
public Builder receivers(String receivers) {
this.receivers = receivers;
return this;
}
public Builder receiversCc(String receiversCc) {
this.receiversCc = receiversCc;
return this;
}
public Builder timeout(int timeout) {
this.timeout = timeout;
return this;
}
public Builder resourceIds(String resourceIds) {
this.resourceIds = resourceIds;
return this;
}
public ProcessDefinitionVersion build() {
ProcessDefinitionVersion processDefinitionVersion = new ProcessDefinitionVersion();
processDefinitionVersion.setId(id);
processDefinitionVersion.setProcessDefinitionId(processDefinitionId);
processDefinitionVersion.setVersion(version);
processDefinitionVersion.setProcessDefinitionJson(processDefinitionJson);
processDefinitionVersion.setDescription(description);
processDefinitionVersion.setGlobalParams(globalParams);
processDefinitionVersion.setCreateTime(createTime);
processDefinitionVersion.setLocations(locations);
processDefinitionVersion.setConnects(connects);
processDefinitionVersion.setReceivers(receivers);
processDefinitionVersion.setReceiversCc(receiversCc);
processDefinitionVersion.setTimeout(timeout);
processDefinitionVersion.setResourceIds(resourceIds);
return processDefinitionVersion;
}
}
}

24
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java

@ -14,18 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import org.apache.ibatis.annotations.MapKey;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
/**
* process definition mapper interface
*/
@ -34,6 +37,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* query process definition by name
*
* @param projectId projectId
* @param name name
* @return process definition
@ -43,6 +47,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* query process definition by id
*
* @param processDefineId processDefineId
* @return process definition
*/
@ -50,6 +55,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* process definition page
*
* @param page page
* @param searchVal searchVal
* @param userId userId
@ -65,6 +71,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* query all process definition list
*
* @param projectId projectId
* @return process definition list
*/
@ -72,6 +79,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* query process definition by ids
*
* @param ids ids
* @return process definition list
*/
@ -79,6 +87,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* query process definition by tenant
*
* @param tenantId tenantId
* @return process definition list
*/
@ -86,6 +95,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* count process definition group by user
*
* @param userId userId
* @param projectIds projectIds
* @param isAdmin isAdmin
@ -98,6 +108,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* list all resource ids
*
* @return resource ids list
*/
@MapKey("id")
@ -105,8 +116,17 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
/**
* list all resource ids by user id
*
* @return resource ids list
*/
@MapKey("id")
List<Map<String, Object>> listResourcesByUser(@Param("userId") Integer userId);
/**
* update process definition version by process definitionId
*
* @param processDefinitionId process definition id
* @param version version
*/
void updateVersionByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version);
}

69
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.java

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.ibatis.annotations.Param;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process definition mapper interface
*/
public interface ProcessDefinitionVersionMapper extends BaseMapper<ProcessDefinitionVersion> {
/**
* query max version by process definition id
*
* @param processDefinitionId process definition id
* @return the max version of this process definition id
*/
Long queryMaxVersionByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId);
/**
* query the paging process definition version list by pagination info
*
* @param page pagination info
* @param processDefinitionId process definition id
* @return the paging process definition version list
*/
IPage<ProcessDefinitionVersion> queryProcessDefinitionVersionsPaging(Page<ProcessDefinitionVersion> page,
@Param("processDefinitionId") int processDefinitionId);
/**
* query the certain process definition version info by process definition id and version number
*
* @param processDefinitionId process definition id
* @param version version number
* @return the process definition version info
*/
ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version);
/**
* delete the certain process definition version by process definition id and version number
*
* @param processDefinitionId process definition id
* @param version version number
* @return delete result
*/
int deleteByProcessDefinitionIdAndVersion(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version);
}

6
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml

@ -102,4 +102,10 @@
FROM t_ds_process_definition
WHERE user_id = #{userId} and release_state = 1 and resource_ids is not null and resource_ids != ''
</select>
<update id="updateVersionByProcessDefinitionId">
update t_ds_process_definition
set version = #{version}
where id = #{processDefinitionId}
</update>
</mapper>

47
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper">
<select id="queryMaxVersionByProcessDefinitionId" resultType="java.lang.Long">
select max(version)
from t_ds_process_definition_version
where process_definition_id = #{processDefinitionId}
</select>
<select id="queryProcessDefinitionVersionsPaging" resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion">
select *
from t_ds_process_definition_version
where process_definition_id = #{processDefinitionId}
order by version desc
</select>
<select id="queryByProcessDefinitionIdAndVersion" resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion">
select *
from t_ds_process_definition_version
where process_definition_id = #{processDefinitionId}
and version = #{version}
</select>
<delete id="deleteByProcessDefinitionIdAndVersion">
delete
from t_ds_process_definition_version
where process_definition_id = #{processDefinitionId}
and version = #{version}
</delete>
</mapper>

54
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java

@ -14,14 +14,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Queue;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -31,9 +39,8 @@ import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(SpringRunner.class)
@SpringBootTest
@ -59,9 +66,10 @@ public class ProcessDefinitionMapperTest {
/**
* insert
*
* @return ProcessDefinition
*/
private ProcessDefinition insertOne(){
private ProcessDefinition insertOne() {
//insertOne
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setName("def 1");
@ -77,9 +85,10 @@ public class ProcessDefinitionMapperTest {
/**
* insert
*
* @return ProcessDefinition
*/
private ProcessDefinition insertTwo(){
private ProcessDefinition insertTwo() {
//insertOne
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setName("def 2");
@ -95,7 +104,7 @@ public class ProcessDefinitionMapperTest {
* test update
*/
@Test
public void testUpdate(){
public void testUpdate() {
//insertOne
ProcessDefinition processDefinition = insertOne();
//update
@ -108,7 +117,7 @@ public class ProcessDefinitionMapperTest {
* test delete
*/
@Test
public void testDelete(){
public void testDelete() {
ProcessDefinition processDefinition = insertOne();
int delete = processDefinitionMapper.deleteById(processDefinition.getId());
Assert.assertEquals(1, delete);
@ -175,8 +184,8 @@ public class ProcessDefinitionMapperTest {
@Test
public void testQueryDefineListPaging() {
ProcessDefinition processDefinition = insertOne();
Page<ProcessDefinition> page = new Page(1,3);
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010,true);
Page<ProcessDefinition> page = new Page(1, 3);
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010, true);
Assert.assertNotEquals(processDefinitionIPage.getTotal(), 0);
}
@ -186,7 +195,7 @@ public class ProcessDefinitionMapperTest {
@Test
public void testQueryAllDefinitionList() {
ProcessDefinition processDefinition = insertOne();
List<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010);
List<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010);
Assert.assertNotEquals(processDefinitionIPage.size(), 0);
}
@ -214,7 +223,7 @@ public class ProcessDefinitionMapperTest {
@Test
public void testCountDefinitionGroupByUser() {
User user= new User();
User user = new User();
user.setUserName("user1");
user.setUserPassword("1");
user.setEmail("xx@123.com");
@ -239,7 +248,7 @@ public class ProcessDefinitionMapperTest {
}
@Test
public void listResourcesTest(){
public void listResourcesTest() {
ProcessDefinition processDefinition = insertOne();
processDefinition.setResourceIds("3,5");
processDefinition.setReleaseState(ReleaseState.ONLINE);
@ -248,11 +257,22 @@ public class ProcessDefinitionMapperTest {
}
@Test
public void listResourcesByUserTest(){
public void listResourcesByUserTest() {
ProcessDefinition processDefinition = insertOne();
processDefinition.setResourceIds("3,5");
processDefinition.setReleaseState(ReleaseState.ONLINE);
List<Map<String, Object>> maps = processDefinitionMapper.listResourcesByUser(processDefinition.getUserId());
Assert.assertNotNull(maps);
}
@Test
public void testUpdateVersionByProcessDefinitionId() {
long expectedVersion = 10;
ProcessDefinition processDefinition = insertOne();
processDefinition.setVersion(expectedVersion);
processDefinitionMapper.updateVersionByProcessDefinitionId(
processDefinition.getId(), processDefinition.getVersion());
ProcessDefinition processDefinition1 = processDefinitionMapper.selectById(processDefinition.getId());
Assert.assertEquals(expectedVersion, processDefinition1.getVersion());
}
}

172
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapperTest.java

@ -0,0 +1,172 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import java.util.Date;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(SpringRunner.class)
@SpringBootTest
@Transactional
@Rollback(true)
public class ProcessDefinitionVersionMapperTest {
@Autowired
ProcessDefinitionMapper processDefinitionMapper;
@Autowired
ProcessDefinitionVersionMapper processDefinitionVersionMapper;
@Autowired
UserMapper userMapper;
@Autowired
QueueMapper queueMapper;
@Autowired
TenantMapper tenantMapper;
@Autowired
ProjectMapper projectMapper;
/**
* insert
*
* @return ProcessDefinition
*/
private ProcessDefinitionVersion insertOne() {
// insertOne
ProcessDefinitionVersion processDefinitionVersion
= new ProcessDefinitionVersion();
processDefinitionVersion.setProcessDefinitionId(66);
processDefinitionVersion.setVersion(10);
processDefinitionVersion.setProcessDefinitionJson(StringUtils.EMPTY);
processDefinitionVersion.setDescription(StringUtils.EMPTY);
processDefinitionVersion.setGlobalParams(StringUtils.EMPTY);
processDefinitionVersion.setCreateTime(new Date());
processDefinitionVersion.setLocations(StringUtils.EMPTY);
processDefinitionVersion.setConnects(StringUtils.EMPTY);
processDefinitionVersion.setReceivers(StringUtils.EMPTY);
processDefinitionVersion.setReceiversCc(StringUtils.EMPTY);
processDefinitionVersion.setTimeout(10);
processDefinitionVersion.setResourceIds("1,2");
processDefinitionVersionMapper.insert(processDefinitionVersion);
return processDefinitionVersion;
}
/**
* insert
*
* @return ProcessDefinitionVersion
*/
private ProcessDefinitionVersion insertTwo() {
// insertTwo
ProcessDefinitionVersion processDefinitionVersion
= new ProcessDefinitionVersion();
processDefinitionVersion.setProcessDefinitionId(67);
processDefinitionVersion.setVersion(11);
processDefinitionVersion.setProcessDefinitionJson(StringUtils.EMPTY);
processDefinitionVersion.setDescription(StringUtils.EMPTY);
processDefinitionVersion.setGlobalParams(StringUtils.EMPTY);
processDefinitionVersion.setCreateTime(new Date());
processDefinitionVersion.setLocations(StringUtils.EMPTY);
processDefinitionVersion.setConnects(StringUtils.EMPTY);
processDefinitionVersion.setReceivers(StringUtils.EMPTY);
processDefinitionVersion.setReceiversCc(StringUtils.EMPTY);
processDefinitionVersion.setTimeout(10);
processDefinitionVersion.setResourceIds("1,2");
processDefinitionVersionMapper.insert(processDefinitionVersion);
return processDefinitionVersion;
}
/**
* test insert
*/
@Test
public void testInsert() {
ProcessDefinitionVersion processDefinitionVersion = insertOne();
Assert.assertTrue(processDefinitionVersion.getId() > 0);
}
/**
* test query
*/
@Test
public void testQueryMaxVersionByProcessDefinitionId() {
ProcessDefinitionVersion processDefinitionVersion = insertOne();
Long version = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId(
processDefinitionVersion.getProcessDefinitionId());
// query
Assert.assertEquals(10, (long) version);
}
@Test
public void testQueryProcessDefinitionVersionsPaging() {
insertOne();
insertTwo();
Page<ProcessDefinitionVersion> page = new Page<>(1, 3);
IPage<ProcessDefinitionVersion> processDefinitionVersionIPage =
processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, 10);
Assert.assertTrue(processDefinitionVersionIPage.getSize() >= 2);
}
@Test
public void testDeleteByProcessDefinitionIdAndVersion() {
ProcessDefinitionVersion processDefinitionVersion = insertOne();
int i = processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(
processDefinitionVersion.getProcessDefinitionId(), processDefinitionVersion.getVersion());
Assert.assertEquals(1, i);
}
@Test
public void testQueryByProcessDefinitionIdAndVersion() {
ProcessDefinitionVersion processDefinitionVersion1 = insertOne();
ProcessDefinitionVersion processDefinitionVersion3 = processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(
processDefinitionVersion1.getProcessDefinitionId(), 10);
ProcessDefinitionVersion processDefinitionVersion2 = insertTwo();
ProcessDefinitionVersion processDefinitionVersion4 = processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(
processDefinitionVersion2.getProcessDefinitionId(), 11);
Assert.assertEquals(processDefinitionVersion1.getProcessDefinitionId(),
processDefinitionVersion3.getProcessDefinitionId());
Assert.assertEquals(processDefinitionVersion2.getProcessDefinitionId(),
processDefinitionVersion4.getProcessDefinitionId());
}
}

155
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue

@ -103,7 +103,7 @@
</x-button>
<x-button
type="primary"
v-tooltip.light="$t('Close')"
v-tooltip.light="$t('Close')"
icon="ans-icon-off"
size="xsmall"
data-container="body"
@ -122,6 +122,17 @@
>
{{spinnerLoading ? 'Loading...' : $t('Save')}}
</x-button>
<x-button
style="vertical-align: middle;"
type="primary"
size="xsmall"
v-if="this.type !== 'instance' && this.urlParam.id !== null"
:loading="spinnerLoading"
@click="_version"
icon="ans-icon-dependence"
>
{{spinnerLoading ? 'Loading...' : $t('Version Info')}}
</x-button>
</div>
</div>
<div class="scrollbar dag-container">
@ -147,6 +158,7 @@
import { findComponentDownward } from '@/module/util/'
import disabledState from '@/module/mixin/disabledState'
import { mapActions, mapState, mapMutations } from 'vuex'
import mVersions from '../../projects/pages/definition/pages/list/_source/versions'
let eventModel
@ -176,7 +188,7 @@
releaseState: String
},
methods: {
...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState']),
...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState', 'switchProcessDefinitionVersion', 'getProcessDefinitionVersionsPage', 'deleteProcessDefinitionVersion']),
...mapMutations('dag', ['addTasks', 'cacheTasks', 'resetParams', 'setIsEditDag', 'setName', 'addConnects']),
// DAG automatic layout
@ -196,7 +208,7 @@
],
Connector: 'Bezier',
PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
ConnectionOverlays: [
[
'Arrow',
@ -370,6 +382,12 @@
this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => {
this.$message.success(res.msg)
this.spinnerLoading = false
// Jump process definition
if (this.type === 'instance') {
this.$router.push({ path: `/projects/instance/list/${this.urlParam.id}?_t=${new Date().getTime()}` })
} else {
this.$router.push({ path: `/projects/definition/list/${this.urlParam.id}?_t=${new Date().getTime()}` })
}
resolve()
}).catch(e => {
this.$message.error(e.msg || '')
@ -657,6 +675,135 @@
if(eventModel && this.taskId == $id){
eventModel.remove()
}
},
/**
* query the process definition pagination version
*/
_version (item) {
let self = this
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: this.urlParam.id
}).then(res => {
let processDefinitionVersions = res.data.lists
let total = res.data.totalCount
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
if (this.versionsModel) {
this.versionsModel.remove()
}
this.versionsModel = this.$drawer({
direction: 'right',
closable: true,
showMask: true,
escClose: true,
render (h) {
return h(mVersions, {
on: {
/**
* switch version in process definition version list
*
* @param version the version user want to change
* @param processDefinitionId the process definition id
* @param fromThis fromThis
*/
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
self.$store.state.dag.isSwitchVersion = true
self.switchProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
self.$message.success($t('Switch Version Successfully'))
setTimeout(() => {
fromThis.$destroy()
self.versionsModel.remove()
}, 0)
self.$router.push({ path: `/projects/definition/list/${processDefinitionId}?_t=${new Date().getTime()}` })
}).catch(e => {
self.$store.state.dag.isSwitchVersion = false
self.$message.error(e.msg || '')
})
},
/**
* Paging event of process definition versions
*
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId the process definition id of page version
* @param fromThis fromThis
*/
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
self.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
processDefinitionId: processDefinitionId
}).then(res => {
fromThis.processDefinitionVersions = res.data.lists
fromThis.total = res.data.totalCount
fromThis.pageSize = res.data.pageSize
fromThis.pageNo = res.data.currentPage
}).catch(e => {
self.$message.error(e.msg || '')
})
},
/**
* delete one version of process definition
*
* @param version the version need to delete
* @param processDefinitionId the process definition id user want to delete
* @param fromThis fromThis
*/
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
self.deleteProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
self.$message.success(res.msg || '')
fromThis.$emit('mVersionGetProcessDefinitionVersionsPage', {
pageNo: 1,
pageSize: 10,
processDefinitionId: processDefinitionId,
fromThis: fromThis
})
}).catch(e => {
self.$message.error(e.msg || '')
})
},
/**
* remove this drawer
*
* @param fromThis
*/
close ({ fromThis }) {
setTimeout(() => {
fromThis.$destroy()
self.versionsModel.remove()
}, 0)
}
},
props: {
processDefinition: {
id: self.urlParam.id,
version: self.$store.state.dag.version
},
processDefinitionVersions: processDefinitionVersions,
total: total,
pageNo: pageNo,
pageSize: pageSize
}
})
}
})
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
watch: {
@ -685,7 +832,7 @@
],
Connector: 'Bezier',
PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
ConnectionOverlays: [
[
'Arrow',

2
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js

@ -72,7 +72,7 @@ Affirm.paramVerification = (name) => {
}
} else {
// View history direct jump
flag = name === 'projects-instance-details' ? true : !dagStore.isEditDag
flag = name === 'projects-instance-details' ? true : (dagStore.isSwitchVersion || !dagStore.isEditDag)
}
return flag
}

125
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue

@ -117,6 +117,7 @@
</x-poptip>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('TreeView')" @click="_treeView(item)" icon="ans-icon-node"><!--{{$t('树形图')}}--></x-button>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Export')" @click="_export(item)" icon="ans-icon-download"><!--{{$t('导出')}}--></x-button>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Version Info')" @click="_version(item)" :disabled="item.releaseState === 'ONLINE'" icon="ans-icon-dependence"><!--{{$t('版本信息')}}--></x-button>
</td>
</tr>
@ -148,6 +149,7 @@
import mTiming from './timing'
import { mapActions } from 'vuex'
import { publishStatus } from '@/conf/home/pages/dag/_source/config'
import mVersions from './versions'
export default {
name: 'definition-list',
@ -164,7 +166,7 @@
pageSize: Number
},
methods: {
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition','exportDefinition','copyProcess']),
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion']),
...mapActions('security', ['getWorkerGroupsAll']),
_rtPublishStatus (code) {
return _.filter(publishStatus, v => v.code === code)[0].desc
@ -334,6 +336,125 @@
})
},
_version (item) {
let self = this
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: item.id
}).then(res => {
let processDefinitionVersions = res.data.lists
let total = res.data.totalCount
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
if (this.versionsModel) {
this.versionsModel.remove()
}
this.versionsModel = this.$drawer({
direction: 'right',
closable: true,
showMask: true,
escClose: true,
render (h) {
return h(mVersions, {
on: {
/**
* switch version in process definition version list
*
* @param version the version user want to change
* @param processDefinitionId the process definition id
* @param fromThis fromThis
*/
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
self.switchProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
self.$message.success($t('Switch Version Successfully'))
setTimeout(() => {
fromThis.$destroy()
self.versionsModel.remove()
}, 0)
self.$router.push({ path: `/projects/definition/list/${processDefinitionId}` })
}).catch(e => {
self.$message.error(e.msg || '')
})
},
/**
* Paging event of process definition versions
*
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId the process definition id of page version
* @param fromThis fromThis
*/
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
self.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
processDefinitionId: processDefinitionId
}).then(res => {
fromThis.processDefinitionVersions = res.data.lists
fromThis.total = res.data.totalCount
fromThis.pageSize = res.data.pageSize
fromThis.pageNo = res.data.currentPage
}).catch(e => {
self.$message.error(e.msg || '')
})
},
/**
* delete one version of process definition
*
* @param version the version need to delete
* @param processDefinitionId the process definition id user want to delete
* @param fromThis fromThis
*/
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
self.deleteProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
self.$message.success(res.msg || '')
fromThis.$emit('mVersionGetProcessDefinitionVersionsPage', {
pageNo: 1,
pageSize: 10,
processDefinitionId: processDefinitionId,
fromThis: fromThis
})
}).catch(e => {
self.$message.error(e.msg || '')
})
},
/**
* remove this drawer
*
* @param fromThis
*/
close ({ fromThis }) {
setTimeout(() => {
fromThis.$destroy()
self.versionsModel.remove()
}, 0)
}
},
props: {
processDefinition: item,
processDefinitionVersions: processDefinitionVersions,
total: total,
pageNo: pageNo,
pageSize: pageSize
}
})
}
})
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_batchExport () {
this.exportDefinition({
processDefinitionIds: this.strSelectIds,
@ -423,6 +544,6 @@
},
mounted () {
},
components: { }
components: { mVersions }
}
</script>

255
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/versions.vue

@ -0,0 +1,255 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="container">
<div class="title-box">
<span class="name">{{$t('Version Info')}}</span>
</div>
<div class="table-box" v-if="processDefinitionVersions.length > 0">
<table class="fixed">
<caption><!-- placeHolder --></caption>
<tr>
<th scope="col">
<span>#</span>
</th>
<th scope="col" style="min-width: 40px">
<span>Version</span>
</th>
<th scope="col" style="min-width: 200px;max-width: 300px;">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" style="min-width: 50px">
<span>{{$t('Create Time')}}</span>
</th>
<th scope="col" style="min-width: 300px">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in processDefinitionVersions" :key="item.id">
<td>
<span>-</span>
</td>
<td>
<span v-if="item.version">
<span v-if="item.version === processDefinition.version" style="color: green"><strong>{{item.version}} {{$t('Current Version')}}</strong></span>
<span v-else>{{item.version}}</span>
</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.description">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.createTime">{{item.createTime}}</span>
<span v-else>-</span>
</td>
<td>
<x-poptip
:ref="'poptip-switch-version-' + $index"
placement="top-end"
width="90">
<p>{{$t('Confirm Switch To This Version?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeSwitchVersion($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_mVersionSwitchProcessDefinitionVersion(item)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
icon="ans-icon-dependence"
type="primary"
shape="circle"
size="xsmall"
:disabled="item.version === processDefinition.version"
data-toggle="tooltip"
:title="$t('Switch To This Version')">
</x-button>
</template>
</x-poptip>
<x-poptip
:ref="'poptip-delete-' + $index"
placement="top-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_mVersionDeleteProcessDefinitionVersion(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
icon="ans-icon-trash"
type="error"
shape="circle"
size="xsmall"
:disabled="item.version === processDefinition.version"
data-toggle="tooltip"
:title="$t('delete')">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
<div v-if="processDefinitionVersions.length === 0">
<m-no-data><!----></m-no-data>
</div>
<div v-if="processDefinitionVersions.length > 0">
<div class="bottom-box">
<x-button type="text" @click="_close()"> {{$t('Cancel')}} </x-button>
<x-page :current="pageNo" :total="total" @on-change="_mVersionGetProcessDefinitionVersionsPage" small><!----></x-page>
</div>
</div>
</div>
</template>
<script>
import mNoData from '@/module/components/noData/noData'
export default {
name: 'versions',
data () {
return {
tableHeaders: [
{
label: 'version',
prop: 'version'
},
{
label: 'createTime',
prop: 'createTime'
}
]
}
},
props: {
processDefinition: Object,
processDefinitionVersions: Array,
total: Number,
pageNo: Number,
pageSize: Number
},
methods: {
/**
* switch version in process definition version list
*/
_mVersionSwitchProcessDefinitionVersion (item) {
this.$emit('mVersionSwitchProcessDefinitionVersion', {
version: item.version,
processDefinitionId: this.processDefinition.id,
fromThis: this
})
},
/**
* delete one version of process definition
*/
_mVersionDeleteProcessDefinitionVersion (item) {
this.$emit('mVersionDeleteProcessDefinitionVersion', {
version: item.version,
processDefinitionId: this.processDefinition.id,
fromThis: this
})
},
/**
* Paging event of process definition versions
*/
_mVersionGetProcessDefinitionVersionsPage (val) {
this.$emit('mVersionGetProcessDefinitionVersionsPage', {
pageNo: val,
pageSize: this.pageSize,
processDefinitionId: this.processDefinition.id,
fromThis: this
})
},
/**
* Close the switch version layer
*/
_closeSwitchVersion (i) {
if (i > 0) {
this.$refs[`poptip-switch-version-${i}`][0].doClose()
}
},
/**
* Close the delete layer
*/
_closeDelete (i) {
if (i > 0) {
this.$refs[`poptip-delete-${i}`][0].doClose()
}
},
/**
* Close and destroy component and component internal events
*/
_close () {
// flag Whether to delete a node this.$destroy()
this.$emit('close', {
fromThis: this
})
}
},
created () {},
mounted () {},
components: { mNoData }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.container {
width: 500px;
position: relative;
.title-box {
height: 61px;
border-bottom: 1px solid #DCDEDC;
position: relative;
.name {
position: absolute;
left: 24px;
top: 18px;
font-size: 16px;
}
}
.bottom-box {
position: absolute;
bottom: 0;
left: 0;
width: 100%;
text-align: right;
height: 60px;
line-height: 60px;
border-top: 1px solid #DCDEDC;
background: #fff;
.ans-page {
display: inline-block;
}
}
.table-box {
overflow-y: scroll;
height: calc(100vh - 61px);
padding-bottom: 60px;
}
}
</style>

42
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js

@ -79,6 +79,46 @@ export default {
})
})
},
/**
* get process definition versions pagination info
*/
getProcessDefinitionVersionsPage ({ state }, payload) {
return new Promise((resolve, reject) => {
io.get(`projects/${state.projectName}/process/versions`, payload, res => {
resolve(res)
}).catch(e => {
reject(e)
})
})
},
/**
* switch process definition version
*/
switchProcessDefinitionVersion ({ state }, payload) {
return new Promise((resolve, reject) => {
io.get(`projects/${state.projectName}/process/version/switch`, payload, res => {
resolve(res)
}).catch(e => {
reject(e)
})
})
},
/**
* delete process definition version
*/
deleteProcessDefinitionVersion ({ state }, payload) {
return new Promise((resolve, reject) => {
io.get(`projects/${state.projectName}/process/version/delete`, payload, res => {
resolve(res)
}).catch(e => {
reject(e)
})
})
},
/**
* Update process instance status
*/
@ -126,6 +166,8 @@ export default {
state.connects = JSON.parse(res.data.connects)
// locations
state.locations = JSON.parse(res.data.locations)
// version
state.version = res.data.version
// Process definition
const processDefinitionJson = JSON.parse(res.data.processDefinitionJson)
// tasks info

5
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -135,6 +135,7 @@ export default {
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Switch To This Version': 'Switch To This Version',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
@ -159,8 +160,11 @@ export default {
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Current Version': 'Current Version',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
'Switch Version Successfully': 'Switch Version Successfully',
'Confirm Switch To This Version?': 'Confirm Switch To This Version?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
@ -288,6 +292,7 @@ export default {
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
'Version Info': 'Version Info',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',

5
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js

@ -138,6 +138,7 @@ export default {
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Switch To This Version': '切换到该版本',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
@ -162,8 +163,11 @@ export default {
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Current Version': '当前版本',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
'Switch Version Successfully': '切换版本成功',
'Confirm Switch To This Version?': '确定切换到该版本吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
@ -289,6 +293,7 @@ export default {
Rename: '重命名',
Download: '下载',
Export: '导出',
'Version Info': '版本信息',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',

2
pom.xml

@ -730,6 +730,7 @@
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessDefinitionVersionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
@ -853,6 +854,7 @@
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>

27
sql/dolphinscheduler-postgre.sql

@ -315,6 +315,30 @@ CREATE TABLE t_ds_process_definition (
create index process_definition_index on t_ds_process_definition (project_id,id);
--
-- Table structure for table t_ds_process_definition_version
--
DROP TABLE IF EXISTS t_ds_process_definition_version;
CREATE TABLE t_ds_process_definition_version (
id int NOT NULL ,
process_definition_id int NOT NULL ,
version int DEFAULT NULL ,
process_definition_json text ,
description text ,
global_params text ,
locations text ,
connects text ,
receivers text ,
receivers_cc text ,
create_time timestamp DEFAULT NULL ,
timeout int DEFAULT '0' ,
resource_ids varchar(64),
PRIMARY KEY (id)
) ;
create index process_definition_id_and_version on t_ds_process_definition_version (process_definition_id,version);
--
-- Table structure for table t_ds_process_instance
--
@ -693,6 +717,9 @@ ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource
DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence;
CREATE SEQUENCE t_ds_process_definition_id_sequence;
ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_definition_version_id_sequence;
CREATE SEQUENCE t_ds_process_definition_version_id_sequence;
ALTER TABLE t_ds_process_definition_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_version_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence;
CREATE SEQUENCE t_ds_process_instance_id_sequence;
ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence');

27
sql/dolphinscheduler_mysql.sql

@ -423,6 +423,33 @@ CREATE TABLE `t_ds_process_definition` (
-- Records of t_ds_process_definition
-- ----------------------------
-- ----------------------------
-- Table structure for t_ds_process_definition_version
-- ----------------------------
DROP TABLE IF EXISTS `t_ds_process_definition_version`;
CREATE TABLE `t_ds_process_definition_version` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key',
`process_definition_id` int(11) NOT NULL COMMENT 'process definition id',
`version` int(11) DEFAULT NULL COMMENT 'process definition version',
`process_definition_json` longtext COMMENT 'process definition json content',
`description` text,
`global_params` text COMMENT 'global parameters',
`locations` text COMMENT 'Node location information',
`connects` text COMMENT 'Node connection information',
`receivers` text COMMENT 'receivers',
`receivers_cc` text COMMENT 'cc',
`create_time` datetime DEFAULT NULL COMMENT 'create time',
`timeout` int(11) DEFAULT '0' COMMENT 'time out',
`resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids',
PRIMARY KEY (`id`),
UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE,
KEY `process_definition_index` (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of t_ds_process_definition
-- ----------------------------
-- ----------------------------
-- Table structure for t_ds_process_instance
-- ----------------------------

31
sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql

@ -56,3 +56,34 @@ delimiter ;
CALL uc_dolphin_T_t_ds_task_instance_A_delay_time();
DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time;
-- uc_dolphin_T_t_ds_process_definition_A_modify_by
drop PROCEDURE if EXISTS ct_dolphin_T_t_ds_process_definition_version;
delimiter d//
CREATE PROCEDURE ct_dolphin_T_t_ds_process_definition_version()
BEGIN
CREATE TABLE `t_ds_process_definition_version` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key',
`process_definition_id` int(11) NOT NULL COMMENT 'process definition id',
`version` int(11) DEFAULT NULL COMMENT 'process definition version',
`process_definition_json` longtext COMMENT 'process definition json content',
`description` text,
`global_params` text COMMENT 'global parameters',
`locations` text COMMENT 'Node location information',
`connects` text COMMENT 'Node connection information',
`receivers` text COMMENT 'receivers',
`receivers_cc` text COMMENT 'cc',
`create_time` datetime DEFAULT NULL COMMENT 'create time',
`timeout` int(11) DEFAULT '0' COMMENT 'time out',
`resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids',
PRIMARY KEY (`id`),
UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE,
KEY `process_definition_index` (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8;
END;
d//
delimiter ;
CALL ct_dolphin_T_t_ds_process_definition_version;
DROP PROCEDURE ct_dolphin_T_t_ds_process_definition_version;

32
sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql

@ -49,4 +49,34 @@ d//
delimiter ;
SELECT uc_dolphin_T_t_ds_task_instance_A_delay_time();
DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time();
DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time();
-- uc_dolphin_T_t_ds_process_definition_A_modify_by
delimiter d//
CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_process_definition_version() RETURNS void AS $$
BEGIN
CREATE TABLE t_ds_process_definition_version (
id int NOT NULL ,
process_definition_id int NOT NULL ,
version int DEFAULT NULL ,
process_definition_json text ,
description text ,
global_params text ,
locations text ,
connects text ,
receivers text ,
receivers_cc text ,
create_time timestamp DEFAULT NULL ,
timeout int DEFAULT '0' ,
resource_ids varchar(64),
PRIMARY KEY (id)
) ;
create index process_definition_id_and_version on t_ds_process_definition_version (process_definition_id,version);
END;
$$ LANGUAGE plpgsql;
d//
delimiter ;
SELECT ct_dolphin_T_t_ds_process_definition_version();
DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_process_definition_version();
Loading…
Cancel
Save