Browse Source

Merge remote-tracking branch 'upstream/dev-20190415' into dev-20190415

# Conflicts:
#	escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java
pull/2/head
dailidong 6 years ago
parent
commit
1e43309661
  1. 2
      .gitignore
  2. 2
      escheduler-alert/pom.xml
  3. 2
      escheduler-api/pom.xml
  4. 169
      escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java
  5. 58
      escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java
  6. 8
      escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java
  7. 105
      escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java
  8. 17
      escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java
  9. 34
      escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java
  10. 14
      escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java
  11. 144
      escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java
  12. 60
      escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java
  13. 22
      escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java
  14. 58
      escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java
  15. 3
      escheduler-api/src/main/java/cn/escheduler/api/quartz/ProcessScheduleJob.java
  16. 185
      escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java
  17. 199
      escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java
  18. 41
      escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java
  19. 7
      escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java
  20. 7
      escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java
  21. 238
      escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java
  22. 7
      escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java
  23. 9
      escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java
  24. 4
      escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java
  25. 13
      escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java
  26. 155
      escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java
  27. 9
      escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java
  28. 162
      escheduler-api/src/test/java/cn/escheduler/api/HttpClientTest.java
  29. 78
      escheduler-api/src/test/java/cn/escheduler/api/controller/QueueControllerTest.java
  30. 28
      escheduler-common/pom.xml
  31. 57
      escheduler-common/src/main/java/cn/escheduler/common/Constants.java
  32. 5
      escheduler-common/src/main/java/cn/escheduler/common/enums/DbType.java
  33. 75
      escheduler-common/src/main/java/cn/escheduler/common/job/db/ClickHouseDataSource.java
  34. 6
      escheduler-common/src/main/java/cn/escheduler/common/job/db/DataSourceFactory.java
  35. 75
      escheduler-common/src/main/java/cn/escheduler/common/job/db/OracleDataSource.java
  36. 71
      escheduler-common/src/main/java/cn/escheduler/common/job/db/SQLServerDataSource.java
  37. 16
      escheduler-common/src/main/java/cn/escheduler/common/model/TaskNode.java
  38. 9
      escheduler-common/src/main/java/cn/escheduler/common/queue/ITaskQueue.java
  39. 33
      escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueZkImpl.java
  40. 7
      escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java
  41. 2
      escheduler-common/src/main/java/cn/escheduler/common/utils/DateUtils.java
  42. 56
      escheduler-common/src/main/java/cn/escheduler/common/utils/FileUtils.java
  43. 104
      escheduler-common/src/main/java/cn/escheduler/common/utils/MysqlUtil.java
  44. 150
      escheduler-common/src/main/java/cn/escheduler/common/utils/SchemaUtils.java
  45. 317
      escheduler-common/src/main/java/cn/escheduler/common/utils/ScriptRunner.java
  46. 1
      escheduler-common/src/main/resources/common/common.properties
  47. 6
      escheduler-common/src/test/java/cn/escheduler/common/queue/TaskQueueImplTest.java
  48. 2
      escheduler-dao/pom.xml
  49. 54
      escheduler-dao/readme.txt
  50. 119
      escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java
  51. 15
      escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java
  52. 90
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapper.java
  53. 136
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapperProvider.java
  54. 18
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapper.java
  55. 31
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapperProvider.java
  56. 59
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapper.java
  57. 71
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapperProvider.java
  58. 11
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java
  59. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java
  60. 35
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapper.java
  61. 80
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapperProvider.java
  62. 4
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java
  63. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapperProvider.java
  64. 19
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapper.java
  65. 42
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapperProvider.java
  66. 29
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java
  67. 34
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java
  68. 131
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapper.java
  69. 160
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapperProvider.java
  70. 126
      escheduler-dao/src/main/java/cn/escheduler/dao/model/AccessToken.java
  71. 16
      escheduler-dao/src/main/java/cn/escheduler/dao/model/Command.java
  72. 290
      escheduler-dao/src/main/java/cn/escheduler/dao/model/ErrorCommand.java
  73. 8
      escheduler-dao/src/main/java/cn/escheduler/dao/model/ExecuteStatusCount.java
  74. 13
      escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessInstance.java
  75. 29
      escheduler-dao/src/main/java/cn/escheduler/dao/model/Queue.java
  76. 16
      escheduler-dao/src/main/java/cn/escheduler/dao/model/Schedule.java
  77. 16
      escheduler-dao/src/main/java/cn/escheduler/dao/model/TaskInstance.java
  78. 24
      escheduler-dao/src/main/java/cn/escheduler/dao/model/TaskRecord.java
  79. 49
      escheduler-dao/src/main/java/cn/escheduler/dao/model/User.java
  80. 88
      escheduler-dao/src/main/java/cn/escheduler/dao/model/WorkerGroup.java
  81. 82
      escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/EschedulerManager.java
  82. 299
      escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/UpgradeDao.java
  83. 44
      escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/CreateEscheduler.java
  84. 38
      escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/InitEscheduler.java
  85. 47
      escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/UpgradeEscheduler.java
  86. 6
      escheduler-dao/src/main/resources/dao/data_source.properties
  87. 62
      escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AccessTokenMapperTest.java
  88. 12
      escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserMapperTest.java
  89. 69
      escheduler-dao/src/test/java/cn/escheduler/dao/mapper/WorkerGroupMapperTest.java
  90. 2
      escheduler-rpc/pom.xml
  91. 2
      escheduler-server/pom.xml
  92. 3
      escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java
  93. 56
      escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java
  94. 11
      escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java
  95. 73
      escheduler-server/src/main/java/cn/escheduler/server/worker/task/PythonCommandExecutor.java
  96. 14
      escheduler-server/src/main/java/cn/escheduler/server/worker/task/processdure/ProcedureTask.java
  97. 2
      escheduler-server/src/main/java/cn/escheduler/server/worker/task/python/PythonTask.java
  98. 9
      escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java
  99. 65
      escheduler-server/src/test/java/cn/escheduler/server/worker/EnvFileTest.java
  100. 52
      escheduler-server/src/test/java/cn/escheduler/server/worker/sql/SqlExecutorTest.java
  101. Some files were not shown because too many files have changed in this diff Show More

2
.gitignore vendored

@ -33,3 +33,5 @@ yarn.lock
package-lock.json package-lock.json
config.gypi config.gypi
test/coverage test/coverage
/docs/zh_CN/介绍
/docs/zh_CN/贡献代码.md

2
escheduler-alert/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.1-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-alert</artifactId> <artifactId>escheduler-alert</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>

2
escheduler-api/pom.xml

@ -3,7 +3,7 @@
<parent> <parent>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.1-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-api</artifactId> <artifactId>escheduler-api</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>

169
escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java

@ -0,0 +1,169 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.AccessTokenService;
import cn.escheduler.api.service.UsersService;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
import static cn.escheduler.api.enums.Status.*;
/**
* user controller
*/
@RestController
@RequestMapping("/access-token")
public class AccessTokenController extends BaseController{
private static final Logger logger = LoggerFactory.getLogger(AccessTokenController.class);
@Autowired
private AccessTokenService accessTokenService;
/**
* create token
* @param loginUser
* @return
*/
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
public Result createToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime,
@RequestParam(value = "token") String token){
logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(),
userId,expireTime,token);
try {
Map<String, Object> result = accessTokenService.createToken(userId, expireTime, token);
return returnDataList(result);
}catch (Exception e){
logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e);
return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg());
}
}
/**
* create token
* @param loginUser
* @return
*/
@PostMapping(value = "/generate")
@ResponseStatus(HttpStatus.CREATED)
public Result generateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime){
logger.info("login user {}, generate token , userId : {} , token expire time : {}",loginUser,userId,expireTime);
try {
Map<String, Object> result = accessTokenService.generateToken(userId, expireTime);
return returnDataList(result);
}catch (Exception e){
logger.error(GENERATE_TOKEN_ERROR.getMsg(),e);
return error(GENERATE_TOKEN_ERROR.getCode(), GENERATE_TOKEN_ERROR.getMsg());
}
}
/**
* query access token list paging
*
* @param loginUser
* @param pageNo
* @param searchVal
* @param pageSize
* @return
*/
@GetMapping(value="/list-paging")
@ResponseStatus(HttpStatus.OK)
public Result queryAccessTokenList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
logger.info("login user {}, list access token paging, pageNo: {}, searchVal: {}, pageSize: {}",
loginUser.getUserName(),pageNo,searchVal,pageSize);
try{
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg(),e);
return error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getCode(),QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg());
}
}
/**
* delete access token by id
* @param loginUser
* @param id
* @return
*/
@PostMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
public Result delAccessTokenById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) {
logger.info("login user {}, delete access token, id: {},", loginUser.getUserName(), id);
try {
Map<String, Object> result = accessTokenService.delAccessTokenById(loginUser, id);
return returnDataList(result);
}catch (Exception e){
logger.error(DELETE_USER_BY_ID_ERROR.getMsg(),e);
return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg());
}
}
/**
* update token
* @param loginUser
* @return
*/
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.CREATED)
public Result updateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime,
@RequestParam(value = "token") String token){
logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(),
userId,expireTime,token);
try {
Map<String, Object> result = accessTokenService.updateToken(id,userId, expireTime, token);
return returnDataList(result);
}catch (Exception e){
logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e);
return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg());
}
}
}

58
escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java

@ -57,8 +57,7 @@ public class DataAnalysisController extends BaseController{
public Result countTaskState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result countTaskState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="startDate", required=false) String startDate, @RequestParam(value="startDate", required=false) String startDate,
@RequestParam(value="endDate", required=false) String endDate, @RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
){
try{ try{
logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}", logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), startDate, endDate, projectId); loginUser.getUserName(), startDate, endDate, projectId);
@ -82,12 +81,11 @@ public class DataAnalysisController extends BaseController{
public Result countProcessInstanceState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result countProcessInstanceState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="startDate", required=false) String startDate, @RequestParam(value="startDate", required=false) String startDate,
@RequestParam(value="endDate", required=false) String endDate, @RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
){
try{ try{
logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id", logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id",
loginUser.getUserName(), startDate, endDate, projectId); loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(loginUser,projectId, startDate, endDate); Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate);
return returnDataList(result); return returnDataList(result);
}catch (Exception e){ }catch (Exception e){
logger.error(COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e); logger.error(COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e);
@ -105,8 +103,7 @@ public class DataAnalysisController extends BaseController{
@GetMapping(value="/define-user-count") @GetMapping(value="/define-user-count")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
public Result countDefinitionByUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result countDefinitionByUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
){
try{ try{
logger.info("count process definition , user:{}, project id", logger.info("count process definition , user:{}, project id",
loginUser.getUserName(), projectId); loginUser.getUserName(), projectId);
@ -119,4 +116,51 @@ public class DataAnalysisController extends BaseController{
} }
/**
* statistical command status data
*
* @param loginUser
* @param projectId
* @return
*/
@GetMapping(value="/command-state-count")
@ResponseStatus(HttpStatus.OK)
public Result countCommandState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="startDate", required=false) String startDate,
@RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate);
return returnDataList(result);
}catch (Exception e){
logger.error(COMMAND_STATE_COUNT_ERROR.getMsg(),e);
return error(COMMAND_STATE_COUNT_ERROR.getCode(), COMMAND_STATE_COUNT_ERROR.getMsg());
}
}
/**
* queue count
*
* @param loginUser
* @param projectId
* @return
*/
@GetMapping(value="/queue-count")
@ResponseStatus(HttpStatus.OK)
public Result countQueueState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), projectId);
Map<String, Object> result = dataAnalysisService.countQueueState(loginUser, projectId);
return returnDataList(result);
}catch (Exception e){
logger.error(QUEUE_COUNT_ERROR.getMsg(),e);
return error(QUEUE_COUNT_ERROR.getCode(), QUEUE_COUNT_ERROR.getMsg());
}
}
} }

8
escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java

@ -66,13 +66,15 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "receiversCc", required = false) String receiversCc, @RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "runMode", required = false) RunMode runMode, @RequestParam(value = "runMode", required = false) RunMode runMode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
@RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId,
@RequestParam(value = "timeout", required = false) Integer timeout) { @RequestParam(value = "timeout", required = false) Integer timeout) {
try { try {
logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, "
+ "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, "
+ "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, timeout: {}", + "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroupId: {}, timeout: {}",
loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, failureStrategy, loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, failureStrategy,
taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority,timeout); taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority,
workerGroupId, timeout);
if (timeout == null) { if (timeout == null) {
timeout = cn.escheduler.common.Constants.MAX_TASK_TIMEOUT; timeout = cn.escheduler.common.Constants.MAX_TASK_TIMEOUT;
@ -80,7 +82,7 @@ public class ExecutorController extends BaseController {
Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy,
startNodeList, taskDependType, warningType, startNodeList, taskDependType, warningType,
warningGroupId,receivers,receiversCc, runMode,processInstancePriority,timeout); warningGroupId,receivers,receiversCc, runMode,processInstancePriority, workerGroupId, timeout);
return returnDataList(result); return returnDataList(result);
} catch (Exception e) { } catch (Exception e) {
logger.error(START_PROCESS_INSTANCE_ERROR.getMsg(),e); logger.error(START_PROCESS_INSTANCE_ERROR.getMsg(),e);

105
escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java

@ -17,6 +17,7 @@
package cn.escheduler.api.controller; package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.QueueService; import cn.escheduler.api.service.QueueService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
@ -29,7 +30,7 @@ import org.springframework.web.bind.annotation.*;
import java.util.Map; import java.util.Map;
import static cn.escheduler.api.enums.Status.QUERY_QUEUE_LIST_ERROR; import static cn.escheduler.api.enums.Status.*;
/** /**
@ -63,5 +64,107 @@ public class QueueController extends BaseController{
} }
} }
/**
* query queue list paging
* @param loginUser
* @return
*/
@GetMapping(value="/list-paging")
@ResponseStatus(HttpStatus.OK)
public Result queryQueueListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
try{
logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(),searchVal);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
result = queueService.queryList(loginUser,searchVal,pageNo,pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(QUERY_QUEUE_LIST_ERROR.getMsg(),e);
return error(QUERY_QUEUE_LIST_ERROR.getCode(), QUERY_QUEUE_LIST_ERROR.getMsg());
}
}
/**
* create queue
*
* @param loginUser
* @param queue
* @param queueName
* @return
*/
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
public Result createQueue(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "queue") String queue,
@RequestParam(value = "queueName") String queueName) {
logger.info("login user {}, create queue, queue: {}, queueName: {}",
loginUser.getUserName(), queue, queueName);
try {
Map<String, Object> result = queueService.createQueue(loginUser,queue,queueName);
return returnDataList(result);
}catch (Exception e){
logger.error(CREATE_QUEUE_ERROR.getMsg(),e);
return error(CREATE_QUEUE_ERROR.getCode(), CREATE_QUEUE_ERROR.getMsg());
}
}
/**
* update queue
*
* @param loginUser
* @param queue
* @param queueName
* @return
*/
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.CREATED)
public Result updateQueue(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "queue") String queue,
@RequestParam(value = "queueName") String queueName) {
logger.info("login user {}, update queue, id: {}, queue: {}, queueName: {}",
loginUser.getUserName(), id,queue, queueName);
try {
Map<String, Object> result = queueService.updateQueue(loginUser,id,queue,queueName);
return returnDataList(result);
}catch (Exception e){
logger.error(UPDATE_QUEUE_ERROR.getMsg(),e);
return error(UPDATE_QUEUE_ERROR.getCode(), UPDATE_QUEUE_ERROR.getMsg());
}
}
/**
* verify queue and queue name
*
* @param loginUser
* @param queue
* @param queueName
* @return
*/
@PostMapping(value = "/verify-queue")
@ResponseStatus(HttpStatus.OK)
public Result verifyQueue(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value ="queue") String queue,
@RequestParam(value ="queueName") String queueName
) {
try{
logger.info("login user {}, verfiy queue: {} queue name: {}",
loginUser.getUserName(),queue,queueName);
return queueService.verifyQueue(queue,queueName);
}catch (Exception e){
logger.error(VERIFY_QUEUE_ERROR.getMsg(),e);
return error(Status.VERIFY_QUEUE_ERROR.getCode(), Status.VERIFY_QUEUE_ERROR.getMsg());
}
}
} }

17
escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java

@ -46,7 +46,6 @@ public class SchedulerController extends BaseController{
private static final Logger logger = LoggerFactory.getLogger(SchedulerController.class); private static final Logger logger = LoggerFactory.getLogger(SchedulerController.class);
public static final String DEFAULT_WARNING_TYPE = "NONE"; public static final String DEFAULT_WARNING_TYPE = "NONE";
public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; public static final String DEFAULT_NOTIFY_GROUP_ID = "1";
public static final String DEFAULT_MAX_TRY_TIMES = "0";
public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; public static final String DEFAULT_FAILURE_POLICY = "CONTINUE";
@ -77,13 +76,15 @@ public class SchedulerController extends BaseController{
@RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy,
@RequestParam(value = "receivers", required = false) String receivers, @RequestParam(value = "receivers", required = false) String receivers,
@RequestParam(value = "receiversCc", required = false) String receiversCc, @RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," +
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}", "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}",
loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId, failureStrategy,receivers,receiversCc,processInstancePriority); loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId,
failureStrategy,receivers,receiversCc,processInstancePriority,workerGroupId);
try { try {
Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule,
warningType, warningGroupId, failureStrategy, receivers,receiversCc,processInstancePriority); warningType, warningGroupId, failureStrategy, receivers,receiversCc,processInstancePriority,workerGroupId);
return returnDataList(result); return returnDataList(result);
}catch (Exception e){ }catch (Exception e){
@ -114,14 +115,16 @@ public class SchedulerController extends BaseController{
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
@RequestParam(value = "receivers", required = false) String receivers, @RequestParam(value = "receivers", required = false) String receivers,
@RequestParam(value = "receiversCc", required = false) String receiversCc, @RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " +
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}", "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}",
loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy,receivers,receiversCc,processInstancePriority); loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy,
receivers,receiversCc,processInstancePriority,workerGroupId);
try { try {
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule,
warningType, warningGroupId, failureStrategy, receivers,receiversCc,null,processInstancePriority); warningType, warningGroupId, failureStrategy, receivers,receiversCc,null,processInstancePriority, workerGroupId);
return returnDataList(result); return returnDataList(result);
}catch (Exception e){ }catch (Exception e){

34
escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java

@ -68,7 +68,7 @@ public class TaskRecordController extends BaseController{
try{ try{
logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}",
taskName, state, taskDate, startTime, endTime); taskName, state, taskDate, startTime, endTime);
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize);
return returnDataListPaging(result); return returnDataListPaging(result);
}catch (Exception e){ }catch (Exception e){
logger.error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); logger.error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e);
@ -77,4 +77,36 @@ public class TaskRecordController extends BaseController{
} }
/**
* query history task record list paging
*
* @param loginUser
* @return
*/
@GetMapping("/history-list-paging")
@ResponseStatus(HttpStatus.OK)
public Result queryHistoryTaskRecordListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskName", required = false) String taskName,
@RequestParam(value = "state", required = false) String state,
@RequestParam(value = "sourceTable", required = false) String sourceTable,
@RequestParam(value = "destTable", required = false) String destTable,
@RequestParam(value = "taskDate", required = false) String taskDate,
@RequestParam(value = "startDate", required = false) String startTime,
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize
){
try{
logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}",
taskName, state, taskDate, startTime, endTime);
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e);
return error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg());
}
}
} }

14
escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java

@ -64,13 +64,14 @@ public class UsersController extends BaseController{
@RequestParam(value = "userName") String userName, @RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword, @RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "tenantId") int tenantId, @RequestParam(value = "tenantId") int tenantId,
@RequestParam(value = "queue") String queue,
@RequestParam(value = "email") String email, @RequestParam(value = "email") String email,
@RequestParam(value = "phone", required = false) String phone) { @RequestParam(value = "phone", required = false) String phone) {
logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, proxyUsers: {}", logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}",
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone); loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue);
try { try {
Map<String, Object> result = usersService.createUser(loginUser, userName, userPassword, email, tenantId, phone); Map<String, Object> result = usersService.createUser(loginUser, userName, userPassword,email,tenantId, phone,queue);
return returnDataList(result); return returnDataList(result);
}catch (Exception e){ }catch (Exception e){
logger.error(CREATE_USER_ERROR.getMsg(),e); logger.error(CREATE_USER_ERROR.getMsg(),e);
@ -127,13 +128,14 @@ public class UsersController extends BaseController{
@RequestParam(value = "id") int id, @RequestParam(value = "id") int id,
@RequestParam(value = "userName") String userName, @RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword, @RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "queue") String queue,
@RequestParam(value = "email") String email, @RequestParam(value = "email") String email,
@RequestParam(value = "tenantId") int tenantId, @RequestParam(value = "tenantId") int tenantId,
@RequestParam(value = "phone", required = false) String phone) { @RequestParam(value = "phone", required = false) String phone) {
logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, proxyUsers: {}", logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}",
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone); loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue);
try { try {
Map<String, Object> result = usersService.updateUser(id,userName,userPassword,email,tenantId,phone); Map<String, Object> result = usersService.updateUser(id,userName,userPassword,email,tenantId,phone,queue);
return returnDataList(result); return returnDataList(result);
}catch (Exception e){ }catch (Exception e){
logger.error(UPDATE_USER_ERROR.getMsg(),e); logger.error(UPDATE_USER_ERROR.getMsg(),e);

144
escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java

@ -0,0 +1,144 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.WorkerGroupService;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
/**
* worker group controller
*/
@RestController
@RequestMapping("/worker-group")
public class WorkerGroupController extends BaseController{
private static final Logger logger = LoggerFactory.getLogger(WorkerGroupController.class);
@Autowired
WorkerGroupService workerGroupService;
/**
* create or update a worker group
* @param loginUser
* @param id
* @param name
* @param ipList
* @return
*/
@PostMapping(value = "/save")
@ResponseStatus(HttpStatus.OK)
public Result saveWorkerGroup(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id", required = false, defaultValue = "0") int id,
@RequestParam(value = "name") String name,
@RequestParam(value = "ipList") String ipList
) {
logger.info("save worker group: login user {}, id:{}, name: {}, ipList: {} ",
loginUser.getUserName(), id, name, ipList);
try {
Map<String, Object> result = workerGroupService.saveWorkerGroup(id, name, ipList);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.SAVE_ERROR.getMsg(),e);
return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg());
}
}
/**
* query worker groups paging
* @param loginUser
* @param pageNo
* @param searchVal
* @param pageSize
* @return
*/
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
public Result queryAllWorkerGroupsPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize
) {
logger.info("query all worker group paging: login user {}, pageNo:{}, pageSize:{}, searchVal:{}",
loginUser.getUserName() , pageNo, pageSize, searchVal);
try {
Map<String, Object> result = workerGroupService.queryAllGroupPaging(pageNo, pageSize, searchVal);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.SAVE_ERROR.getMsg(),e);
return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg());
}
}
/**
* query all worker groups
* @param loginUser
* @return
*/
@GetMapping(value = "/all-groups")
@ResponseStatus(HttpStatus.OK)
public Result queryAllWorkerGroups(@RequestAttribute(value = Constants.SESSION_USER) User loginUser
) {
logger.info("query all worker group: login user {}",
loginUser.getUserName() );
try {
Map<String, Object> result = workerGroupService.queryAllGroup();
return returnDataList(result);
}catch (Exception e){
logger.error(Status.SAVE_ERROR.getMsg(),e);
return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg());
}
}
/**
* delete worker group by id
* @param loginUser
* @param id
* @return
*/
@GetMapping(value = "/delete-by-id")
@ResponseStatus(HttpStatus.OK)
public Result deleteById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") Integer id
) {
logger.info("delete worker group: login user {}, id:{} ",
loginUser.getUserName() , id);
try {
Map<String, Object> result = workerGroupService.deleteWorkerGroupById(id);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.SAVE_ERROR.getMsg(),e);
return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg());
}
}
}

60
escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java

@ -0,0 +1,60 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api.dto;
import cn.escheduler.common.enums.ExecutionStatus;
/**
* command state count
*/
public class CommandStateCount {
private int errorCount;
private int normalCount;
private ExecutionStatus commandState;
public CommandStateCount(){}
public CommandStateCount(int errorCount, int normalCount, ExecutionStatus commandState) {
this.errorCount = errorCount;
this.normalCount = normalCount;
this.commandState = commandState;
}
public int getErrorCount() {
return errorCount;
}
public void setErrorCount(int errorCount) {
this.errorCount = errorCount;
}
public int getNormalCount() {
return normalCount;
}
public void setNormalCount(int normalCount) {
this.normalCount = normalCount;
}
public ExecutionStatus getCommandState() {
return commandState;
}
public void setCommandState(ExecutionStatus commandState) {
this.commandState = commandState;
}
}

22
escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java

@ -149,6 +149,17 @@ public enum Status {
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists"), TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists"),
IP_IS_EMPTY(10125,"ip is empty"), IP_IS_EMPTY(10125,"ip is empty"),
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}"), SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}"),
CREATE_QUEUE_ERROR(10127, "create queue error"),
QUEUE_NOT_EXIST(10128, "queue {0} not exists"),
QUEUE_VALUE_EXIST(10129, "queue value {0} already exists"),
QUEUE_NAME_EXIST(10130, "queue name {0} already exists"),
UPDATE_QUEUE_ERROR(10131, "update queue error"),
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required"),
VERIFY_QUEUE_ERROR(10133,"verify queue error"),
NAME_NULL(10134,"name must be not null"),
NAME_EXIST(10135, "name {0} already exists"),
SAVE_ERROR(10136, "save error"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found"),
@ -198,6 +209,17 @@ public enum Status {
QUERY_DATABASE_STATE_ERROR(70001,"query database state error"), QUERY_DATABASE_STATE_ERROR(70001,"query database state error"),
QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error"), QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error"),
CREATE_ACCESS_TOKEN_ERROR(70001,"create access token error"),
GENERATE_TOKEN_ERROR(70002,"generate token error"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70003,"query access token list paging error"),
COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error"),
QUEUE_COUNT_ERROR(90001,"queue count error"),
; ;
private int code; private int code;

58
escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java

@ -22,6 +22,7 @@ import cn.escheduler.dao.mapper.UserMapper;
import cn.escheduler.dao.model.Session; import cn.escheduler.dao.model.Session;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.model.User;
import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -64,37 +65,36 @@ public class LoginHandlerInterceptor implements HandlerInterceptor {
@Override @Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) { public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) {
Session session = sessionService.getSession(request); // get token
String token = request.getHeader("token");
if(logger.isDebugEnabled()){ User user = null;
logger.debug("session info : " + session); if (StringUtils.isEmpty(token)){
} Session session = sessionService.getSession(request);
if (session == null) { if (session == null) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED); response.setStatus(HttpStatus.SC_UNAUTHORIZED);
logger.info("session info is null "); logger.info("session info is null ");
return false; return false;
}
//get user object from session
user = userMapper.queryById(session.getUserId());
// if user is null
if (user == null) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED);
logger.info("user does not exist");
return false;
}
}else {
user = userMapper.queryUserByToken(token);
if (user == null) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED);
logger.info("user token has expired");
return false;
}
} }
if(logger.isDebugEnabled()){
logger.debug("session id: {}", session.getId());
}
//get user object from session
User user = userMapper.queryById(session.getUserId());
if(logger.isDebugEnabled()){
logger.info("user info : " + user);
}
if (user == null) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED);
return false;
}
request.setAttribute(Constants.SESSION_USER, user); request.setAttribute(Constants.SESSION_USER, user);
return true; return true;
} }

3
escheduler-api/src/main/java/cn/escheduler/api/quartz/ProcessScheduleJob.java

@ -125,13 +125,14 @@ public class ProcessScheduleJob implements Job {
} }
Command command = new Command(); Command command = new Command();
command.setCommandType(CommandType.START_PROCESS); command.setCommandType(CommandType.SCHEDULER);
command.setExecutorId(schedule.getUserId()); command.setExecutorId(schedule.getUserId());
command.setFailureStrategy(schedule.getFailureStrategy()); command.setFailureStrategy(schedule.getFailureStrategy());
command.setProcessDefinitionId(schedule.getProcessDefinitionId()); command.setProcessDefinitionId(schedule.getProcessDefinitionId());
command.setScheduleTime(scheduledFireTime); command.setScheduleTime(scheduledFireTime);
command.setStartTime(fireTime); command.setStartTime(fireTime);
command.setWarningGroupId(schedule.getWarningGroupId()); command.setWarningGroupId(schedule.getWarningGroupId());
command.setWorkerGroupId(schedule.getWorkerGroupId());
command.setWarningType(schedule.getWarningType()); command.setWarningType(schedule.getWarningType());
command.setProcessInstancePriority(schedule.getProcessInstancePriority()); command.setProcessInstancePriority(schedule.getProcessInstancePriority());

185
escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java

@ -0,0 +1,185 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.CheckUtils;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.utils.*;
import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
/**
* user service
*/
@Service
public class AccessTokenService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenService.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
/**
* query access token list
*
* @param loginUser
* @param searchVal
* @param pageNo
* @param pageSize
* @return
*/
public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Integer count;
List<AccessToken> accessTokenList;
if (loginUser.getUserType() == UserType.ADMIN_USER){
count = accessTokenMapper.countAccessTokenPaging(0,searchVal);
accessTokenList = accessTokenMapper.queryAccessTokenPaging(0,searchVal, pageInfo.getStart(), pageSize);
}else {
count = accessTokenMapper.countAccessTokenPaging(loginUser.getId(),searchVal);
accessTokenList = accessTokenMapper.queryAccessTokenPaging(loginUser.getId(),searchVal, pageInfo.getStart(), pageSize);
}
pageInfo.setTotalCount(count);
pageInfo.setLists(accessTokenList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check
*
* @param result
* @param bool
* @param userNoOperationPerm
* @param status
* @return
*/
private boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm, String status) {
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(status, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/**
* create token
*
* @param userId
* @param expireTime
* @param token
* @return
*/
public Map<String, Object> createToken(int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = new AccessToken();
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setCreateTime(new Date());
accessToken.setUpdateTime(new Date());
// insert
int insert = accessTokenMapper.insert(accessToken);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ALERT_GROUP_ERROR);
}
return result;
}
/**
* generate token
* @param userId
* @param expireTime
* @return
*/
public Map<String, Object> generateToken(int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5);
String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis()));
result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete access token
* @param loginUser
* @param id
* @return
*/
public Map<String, Object> delAccessTokenById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NOT_EXIST, id);
return result;
}
accessTokenMapper.delete(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update token by id
* @param id
* @param userId
* @param expireTime
* @param token
* @return
*/
public Map<String, Object> updateToken(int id,int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = new AccessToken();
accessToken.setId(id);
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setUpdateTime(new Date());
accessTokenMapper.update(accessToken);
putMsg(result, Status.SUCCESS);
return result;
}
}

199
escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java

@ -17,29 +17,29 @@
package cn.escheduler.api.service; package cn.escheduler.api.service;
import cn.escheduler.api.dto.CommandStateCount;
import cn.escheduler.api.dto.DefineUserDto; import cn.escheduler.api.dto.DefineUserDto;
import cn.escheduler.api.dto.TaskCountDto; import cn.escheduler.api.dto.TaskCountDto;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.queue.ITaskQueue;
import cn.escheduler.common.queue.TaskQueueFactory;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.mapper.ProcessInstanceMapper;
import cn.escheduler.dao.mapper.ProjectMapper;
import cn.escheduler.dao.mapper.TaskInstanceMapper;
import cn.escheduler.dao.model.DefinitionGroupByUser; import cn.escheduler.dao.model.DefinitionGroupByUser;
import cn.escheduler.dao.model.ExecuteStatusCount; import cn.escheduler.dao.model.ExecuteStatusCount;
import cn.escheduler.dao.model.Project; import cn.escheduler.dao.model.Project;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.model.User;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.Date; import java.util.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** /**
* data analysis service * data analysis service
@ -55,15 +55,21 @@ public class DataAnalysisService {
@Autowired @Autowired
ProjectService projectService; ProjectService projectService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired @Autowired
ProcessInstanceMapper processInstanceMapper; ProcessInstanceMapper processInstanceMapper;
@Autowired @Autowired
ProcessDefinitionMapper processDefinitionMapper; ProcessDefinitionMapper processDefinitionMapper;
@Autowired
CommandMapper commandMapper;
@Autowired
ErrorCommandMapper errorCommandMapper;
@Autowired
TaskInstanceMapper taskInstanceMapper;
/** /**
* statistical task instance status data * statistical task instance status data
* *
@ -206,4 +212,175 @@ public class DataAnalysisService {
} }
return false; return false;
} }
/**
* statistical command status data
*
* @param loginUser
* @param projectId
* @param startDate
* @param endDate
* @return
*/
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
if(projectId != 0){
Project project = projectMapper.queryById(projectId);
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId));
if (getResultStatus(result)){
return result;
}
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
// count command state
List<ExecuteStatusCount> commandStateCounts =
commandMapper.countCommandState(loginUser.getId(),
loginUser.getUserType(), start, end, projectId);
// count error command state
List<ExecuteStatusCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(loginUser.getId(),
loginUser.getUserType(), start, end, projectId);
//
Map<ExecutionStatus,Map<String,Integer>> dataMap = new HashMap<>();
Map<String,Integer> commonCommand = new HashMap<>();
commonCommand.put("commandState",0);
commonCommand.put("errorCommandState",0);
// init data map
dataMap.put(ExecutionStatus.SUBMITTED_SUCCESS,commonCommand);
dataMap.put(ExecutionStatus.RUNNING_EXEUTION,commonCommand);
dataMap.put(ExecutionStatus.READY_PAUSE,commonCommand);
dataMap.put(ExecutionStatus.PAUSE,commonCommand);
dataMap.put(ExecutionStatus.READY_STOP,commonCommand);
dataMap.put(ExecutionStatus.STOP,commonCommand);
dataMap.put(ExecutionStatus.FAILURE,commonCommand);
dataMap.put(ExecutionStatus.SUCCESS,commonCommand);
dataMap.put(ExecutionStatus.NEED_FAULT_TOLERANCE,commonCommand);
dataMap.put(ExecutionStatus.KILL,commonCommand);
dataMap.put(ExecutionStatus.WAITTING_THREAD,commonCommand);
dataMap.put(ExecutionStatus.WAITTING_DEPEND,commonCommand);
// put command state
for (ExecuteStatusCount executeStatusCount : commandStateCounts){
Map<String,Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getExecutionStatus()));
commandStateCountsMap.put("commandState", executeStatusCount.getCount());
dataMap.put(executeStatusCount.getExecutionStatus(),commandStateCountsMap);
}
// put error command state
for (ExecuteStatusCount errorExecutionStatus : errorCommandStateCounts){
Map<String,Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getExecutionStatus()));
errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getExecutionStatus(),errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
Iterator<Map.Entry<ExecutionStatus, Map<String, Integer>>> iterator = dataMap.entrySet().iterator();
while (iterator.hasNext()){
Map.Entry<ExecutionStatus, Map<String, Integer>> next = iterator.next();
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"),
next.getValue().get("commandState"),next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* count queue state
* @param loginUser
* @param projectId
* @return
*/
public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5);
if(projectId != 0){
Project project = projectMapper.queryById(projectId);
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId));
if (getResultStatus(result)){
return result;
}
}
ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance();
List<String> tasksQueueList = tasksQueue.getAllTasks(cn.escheduler.common.Constants.SCHEDULER_TASKS_QUEUE);
List<String> tasksKillList = tasksQueue.getAllTasks(cn.escheduler.common.Constants.SCHEDULER_TASKS_KILL);
Map<String,Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER){
dataMap.put("taskQueue",tasksQueueList.size());
dataMap.put("taskKill",tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i =0;
for (String taskQueueStr : tasksQueueList){
if (StringUtils.isNotEmpty(taskQueueStr)){
String[] splits = taskQueueStr.split("_");
if (splits.length == 4){
tasksQueueIds[i++]=Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList){
if (StringUtils.isNotEmpty(taskKillStr)){
String[] splits = taskKillStr.split("_");
if (splits.length == 2){
tasksKillIds[i++]=Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
if (tasksQueueIds.length != 0){
taskQueueCount = taskInstanceMapper.countTask(loginUser.getId(),loginUser.getUserType(),projectId, tasksQueueIds);
}
if (tasksQueueIds.length != 0){
taskKillCount = taskInstanceMapper.countTask(loginUser.getId(),loginUser.getUserType(),projectId, tasksQueueIds);
}
dataMap.put("taskQueue",taskQueueCount);
dataMap.put("taskKill",taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
} }

41
escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java

@ -38,6 +38,7 @@ import org.springframework.transaction.annotation.Transactional;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager; import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.*; import java.util.*;
/** /**
@ -209,12 +210,13 @@ public class DataSourceService extends BaseService{
switch (dataSource.getType()) { switch (dataSource.getType()) {
case HIVE: case HIVE:
case SQLSERVER:
separator = ";"; separator = ";";
break; break;
case MYSQL: case MYSQL:
separator = "&";
break;
case POSTGRESQL: case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
separator = "&"; separator = "&";
break; break;
default: default:
@ -367,6 +369,18 @@ public class DataSourceService extends BaseService{
datasource = JSONObject.parseObject(parameter, SparkDataSource.class); datasource = JSONObject.parseObject(parameter, SparkDataSource.class);
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break; break;
case CLICKHOUSE:
datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class);
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break;
case ORACLE:
datasource = JSONObject.parseObject(parameter, OracleDataSource.class);
Class.forName(Constants.COM_ORACLE_JDBC_DRIVER);
break;
case SQLSERVER:
datasource = JSONObject.parseObject(parameter, SQLServerDataSource.class);
Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER);
break;
default: default:
break; break;
} }
@ -392,6 +406,11 @@ public class DataSourceService extends BaseService{
Connection con = getConnection(type, parameter); Connection con = getConnection(type, parameter);
if (con != null) { if (con != null) {
isConnection = true; isConnection = true;
try {
con.close();
} catch (SQLException e) {
logger.error("close connection fail at DataSourceService::checkConnection()", e);
}
} }
return isConnection; return isConnection;
} }
@ -428,9 +447,14 @@ public class DataSourceService extends BaseService{
String address = buildAddress(type, host, port); String address = buildAddress(type, host, port);
String jdbcUrl = address + "/" + database; String jdbcUrl = address + "/" + database;
String separator = ""; String separator = "";
if (Constants.MYSQL.equals(type.name()) || Constants.POSTGRESQL.equals(type.name())) { if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())) {
separator = "&"; separator = "&";
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { } else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
|| Constants.SQLSERVER.equals(type.name())) {
separator = ";"; separator = ";";
} }
@ -479,6 +503,15 @@ public class DataSourceService extends BaseService{
} }
sb.deleteCharAt(sb.length() - 1); sb.deleteCharAt(sb.length() - 1);
} }
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
sb.append(Constants.JDBC_ORACLE);
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
} }
return sb.toString(); return sb.toString();

7
escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java

@ -90,7 +90,7 @@ public class ExecutorService extends BaseService{
FailureStrategy failureStrategy, String startNodeList, FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId, TaskDependType taskDependType, WarningType warningType, int warningGroupId,
String receivers, String receiversCc, RunMode runMode, String receivers, String receiversCc, RunMode runMode,
Priority processInstancePriority, Integer timeout) throws ParseException { Priority processInstancePriority, int workerGroupId, Integer timeout) throws ParseException {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
// timeout is valid // timeout is valid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
@ -115,7 +115,7 @@ public class ExecutorService extends BaseService{
*/ */
int create = this.createCommand(commandType, processDefinitionId, int create = this.createCommand(commandType, processDefinitionId,
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(),
warningGroupId, runMode,processInstancePriority); warningGroupId, runMode,processInstancePriority, workerGroupId);
if(create > 0 ){ if(create > 0 ){
/** /**
* according to the process definition ID updateProcessInstance and CC recipient * according to the process definition ID updateProcessInstance and CC recipient
@ -405,7 +405,7 @@ public class ExecutorService extends BaseService{
TaskDependType nodeDep, FailureStrategy failureStrategy, TaskDependType nodeDep, FailureStrategy failureStrategy,
String startNodeList, String schedule, WarningType warningType, String startNodeList, String schedule, WarningType warningType,
int excutorId, int warningGroupId, int excutorId, int warningGroupId,
RunMode runMode,Priority processInstancePriority) throws ParseException { RunMode runMode,Priority processInstancePriority, int workerGroupId) throws ParseException {
/** /**
* instantiate command schedule instance * instantiate command schedule instance
@ -436,6 +436,7 @@ public class ExecutorService extends BaseService{
command.setExecutorId(excutorId); command.setExecutorId(excutorId);
command.setWarningGroupId(warningGroupId); command.setWarningGroupId(warningGroupId);
command.setProcessInstancePriority(processInstancePriority); command.setProcessInstancePriority(processInstancePriority);
command.setWorkerGroupId(workerGroupId);
Date start = null; Date start = null;
Date end = null; Date end = null;

7
escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java

@ -509,7 +509,7 @@ public class ProcessInstanceService extends BaseDAGService {
} }
// local params // local params
Map<String, List<Property>> localUserDefParams = new HashMap<>(); Map<String, Map<String,Object>> localUserDefParams = new HashMap<>();
for (TaskNode taskNode : taskNodeList) { for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams(); String parameter = taskNode.getParams();
Map<String, String> map = JSONUtils.toMap(parameter); Map<String, String> map = JSONUtils.toMap(parameter);
@ -517,8 +517,11 @@ public class ProcessInstanceService extends BaseDAGService {
if (localParams != null && !localParams.isEmpty()) { if (localParams != null && !localParams.isEmpty()) {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSON.parseArray(localParams, Property.class); List<Property> localParamsList = JSON.parseArray(localParams, Property.class);
Map<String,Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType",taskNode.getType());
localParamsMap.put("localParamsList",localParamsList);
if (localParamsList.size() > 0) { if (localParamsList.size() > 0) {
localUserDefParams.put(taskNode.getName(), localParamsList); localUserDefParams.put(taskNode.getName(), localParamsMap);
} }
} }

238
escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java

@ -18,12 +18,18 @@ package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.mapper.QueueMapper; import cn.escheduler.dao.mapper.QueueMapper;
import cn.escheduler.dao.model.Queue; import cn.escheduler.dao.model.Queue;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.model.User;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -32,29 +38,219 @@ import java.util.Map;
* queue service * queue service
*/ */
@Service @Service
public class QueueService extends BaseService{ public class QueueService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(TenantService.class);
@Autowired
private QueueMapper queueMapper; @Autowired
private QueueMapper queueMapper;
/**
* query queue list /**
* * query queue list
* @param loginUser *
* @return * @param loginUser
*/ * @return
public Map<String, Object> queryList(User loginUser) { */
Map<String, Object> result = new HashMap<>(5); public Map<String, Object> queryList(User loginUser) {
if (checkAdmin(loginUser, result)) { Map<String, Object> result = new HashMap<>(5);
return result; if (checkAdmin(loginUser, result)) {
return result;
}
List<Queue> queueList = queueMapper.queryAllQueue();
result.put(Constants.DATA_LIST, queueList);
putMsg(result, Status.SUCCESS);
return result;
} }
List<Queue> queueList = queueMapper.queryAllQueue(); /**
result.put(Constants.DATA_LIST, queueList); * query queue list paging
putMsg(result,Status.SUCCESS); *
* @param loginUser
* @param searchVal
* @param pageNo
* @param pageSize
* @return
*/
public Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Integer count = queueMapper.countQueuePaging(searchVal);
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize);
List<Queue> queueList = queueMapper.queryQueuePaging(searchVal, pageInfo.getStart(), pageSize);
pageInfo.setTotalCount(count);
pageInfo.setLists(queueList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create queue
*
* @param loginUser
* @param queue
* @param queueName
* @return
*/
public Map<String, Object> createQueue(User loginUser, String queue, String queueName) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
if(StringUtils.isEmpty(queue)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queue);
return result;
}
if(StringUtils.isEmpty(queueName)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queueName);
return result;
}
if (checkQueueNameExist(queueName)) {
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
if (checkQueueExist(queue)) {
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
Queue queueObj = new Queue();
Date now = new Date();
queueObj.setQueue(queue);
queueObj.setQueueName(queueName);
queueObj.setCreateTime(now);
queueObj.setUpdateTime(now);
queueMapper.insert(queueObj);
putMsg(result, Status.SUCCESS);
return result;
}
return result; /**
} * update queue
*
* @param loginUser
* @param id
* @param queue
* @param queueName
* @return
*/
public Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Queue queueObj = queueMapper.queryById(id);
if (queueObj == null) {
putMsg(result, Status.QUEUE_NOT_EXIST, id);
return result;
}
// whether queue value or queueName is changed
if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) {
putMsg(result, Status.NEED_NOT_UPDATE_QUEUE);
return result;
}
// check queue name is exist
if (!queueName.equals(queueObj.getQueueName())) {
if(checkQueueNameExist(queueName)){
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
}
// check queue value is exist
if (!queue.equals(queueObj.getQueue())) {
if(checkQueueExist(queue)){
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
}
// update queue
Date now = new Date();
queueObj.setQueue(queue);
queueObj.setQueueName(queueName);
queueObj.setUpdateTime(now);
queueMapper.update(queueObj);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify queue and queueName
*
* @param queue
* @param queueName
* @return
*/
public Result verifyQueue(String queue, String queueName) {
Result result=new Result();
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queue);
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queueName);
return result;
}
if(checkQueueNameExist(queueName)){
logger.error("queue name {} has exist, can't create again.", queueName);
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
if(checkQueueExist(queue)){
logger.error("queue value {} has exist, can't create again.", queue);
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check queue exist
*
* @param queue
* @return
*/
private boolean checkQueueExist(String queue) {
return queueMapper.queryByQueue(queue) == null ? false : true;
}
/**
* check queue name exist
*
* @param queueName
* @return
*/
private boolean checkQueueNameExist(String queueName) {
return queueMapper.queryByQueueName(queueName) == null ? false : true;
}
} }

7
escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java

@ -88,7 +88,7 @@ public class SchedulerService extends BaseService {
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) @Transactional(value = "TransactionManager", rollbackFor = Exception.class)
public Map<String, Object> insertSchedule(User loginUser, String projectName, Integer processDefineId, String schedule, WarningType warningType, public Map<String, Object> insertSchedule(User loginUser, String projectName, Integer processDefineId, String schedule, WarningType warningType,
int warningGroupId, FailureStrategy failureStrategy, int warningGroupId, FailureStrategy failureStrategy,
String receivers, String receiversCc,Priority processInstancePriority) throws IOException { String receivers, String receiversCc,Priority processInstancePriority, int workerGroupId) throws IOException {
Map<String, Object> result = new HashMap<String, Object>(5); Map<String, Object> result = new HashMap<String, Object>(5);
@ -133,6 +133,7 @@ public class SchedulerService extends BaseService {
scheduleObj.setUserName(loginUser.getUserName()); scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setReleaseState(ReleaseState.OFFLINE); scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(processInstancePriority); scheduleObj.setProcessInstancePriority(processInstancePriority);
scheduleObj.setWorkerGroupId(workerGroupId);
scheduleMapper.insert(scheduleObj); scheduleMapper.insert(scheduleObj);
/** /**
@ -156,13 +157,14 @@ public class SchedulerService extends BaseService {
* @param warningGroupId * @param warningGroupId
* @param failureStrategy * @param failureStrategy
* @param scheduleStatus * @param scheduleStatus
* @param workerGroupId
* @return * @return
*/ */
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) @Transactional(value = "TransactionManager", rollbackFor = Exception.class)
public Map<String, Object> updateSchedule(User loginUser, String projectName, Integer id, String scheduleExpression, WarningType warningType, public Map<String, Object> updateSchedule(User loginUser, String projectName, Integer id, String scheduleExpression, WarningType warningType,
int warningGroupId, FailureStrategy failureStrategy, int warningGroupId, FailureStrategy failureStrategy,
String receivers, String receiversCc, ReleaseState scheduleStatus, String receivers, String receiversCc, ReleaseState scheduleStatus,
Priority processInstancePriority) throws IOException { Priority processInstancePriority, int workerGroupId) throws IOException {
Map<String, Object> result = new HashMap<String, Object>(5); Map<String, Object> result = new HashMap<String, Object>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
@ -221,6 +223,7 @@ public class SchedulerService extends BaseService {
if (scheduleStatus != null) { if (scheduleStatus != null) {
schedule.setReleaseState(scheduleStatus); schedule.setReleaseState(scheduleStatus);
} }
schedule.setWorkerGroupId(workerGroupId);
schedule.setUpdateTime(now); schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority); schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.update(schedule); scheduleMapper.update(schedule);

9
escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java

@ -29,6 +29,8 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static cn.escheduler.common.Constants.*;
/** /**
* task record service * task record service
*/ */
@ -51,7 +53,7 @@ public class TaskRecordService extends BaseService{
* @param pageSize * @param pageSize
* @return * @return
*/ */
public Map<String,Object> queryTaskRecordListPaging(String taskName, String startDate, public Map<String,Object> queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate,
String taskDate, String sourceTable, String taskDate, String sourceTable,
String destTable, String endDate, String destTable, String endDate,
String state, Integer pageNo, Integer pageSize) { String state, Integer pageNo, Integer pageSize) {
@ -69,8 +71,9 @@ public class TaskRecordService extends BaseService{
map.put("offset", pageInfo.getStart().toString()); map.put("offset", pageInfo.getStart().toString());
map.put("pageSize", pageInfo.getPageSize().toString()); map.put("pageSize", pageInfo.getPageSize().toString());
int count = TaskRecordDao.countTaskRecord(map); String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG;
List<TaskRecord> recordList = TaskRecordDao.queryAllTaskRecord(map); int count = TaskRecordDao.countTaskRecord(map, table);
List<TaskRecord> recordList = TaskRecordDao.queryAllTaskRecord(map, table);
pageInfo.setTotalCount(count); pageInfo.setTotalCount(count);
pageInfo.setLists(recordList); pageInfo.setLists(recordList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);

4
escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java

@ -80,6 +80,10 @@ public class TenantService extends BaseService{
Tenant tenant = new Tenant(); Tenant tenant = new Tenant();
Date now = new Date(); Date now = new Date();
if (!tenantCode.matches("^[0-9a-zA-Z_.]{1,}$") || tenantCode.startsWith("-")){
putMsg(result, Status.VERIFY_TENANT_CODE_ERROR);
return result;
}
tenant.setTenantCode(tenantCode); tenant.setTenantCode(tenantCode);
tenant.setTenantName(tenantName); tenant.setTenantName(tenantName);
tenant.setQueueId(queueId); tenant.setQueueId(queueId);

13
escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java

@ -87,7 +87,8 @@ public class UsersService extends BaseService {
String userPassword, String userPassword,
String email, String email,
int tenantId, int tenantId,
String phone) throws Exception { String phone,
String queue) throws Exception {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
result = CheckUtils.checkUserParams(userName, userPassword, email, phone); result = CheckUtils.checkUserParams(userName, userPassword, email, phone);
@ -114,6 +115,7 @@ public class UsersService extends BaseService {
user.setUserType(UserType.GENERAL_USER); user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(now); user.setCreateTime(now);
user.setUpdateTime(now); user.setUpdateTime(now);
user.setQueue(queue);
// save user // save user
userMapper.insert(user); userMapper.insert(user);
@ -194,7 +196,13 @@ public class UsersService extends BaseService {
* @param phone * @param phone
* @return * @return
*/ */
public Map<String, Object> updateUser(int userId, String userName, String userPassword, String email, int tenantId, String phone) throws Exception { public Map<String, Object> updateUser(int userId,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
@ -218,6 +226,7 @@ public class UsersService extends BaseService {
if (StringUtils.isNotEmpty(email)) { if (StringUtils.isNotEmpty(email)) {
user.setEmail(email); user.setEmail(email);
} }
user.setQueue(queue);
user.setPhone(phone); user.setPhone(phone);
user.setUpdateTime(now); user.setUpdateTime(now);

155
escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java

@ -0,0 +1,155 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.dao.mapper.WorkerGroupMapper;
import cn.escheduler.dao.model.User;
import cn.escheduler.dao.model.WorkerGroup;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* work group service
*/
@Service
public class WorkerGroupService extends BaseService {
@Autowired
WorkerGroupMapper workerGroupMapper;
/**
* create or update a worker group
* @param id
* @param name
* @param ipList
* @return
*/
public Map<String, Object> saveWorkerGroup(int id, String name, String ipList){
Map<String, Object> result = new HashMap<>(5);
if(StringUtils.isEmpty(name)){
putMsg(result, Status.NAME_NULL);
return result;
}
Date now = new Date();
WorkerGroup workerGroup = null;
if(id != 0){
workerGroup = workerGroupMapper.queryById(id);
}else{
workerGroup = new WorkerGroup();
workerGroup.setCreateTime(now);
}
workerGroup.setName(name);
workerGroup.setIpList(ipList);
workerGroup.setUpdateTime(now);
if(checkWorkerGroupNameExists(workerGroup)){
putMsg(result, Status.NAME_EXIST, workerGroup.getName());
return result;
}
if(workerGroup.getId() != 0 ){
workerGroupMapper.update(workerGroup);
}else{
workerGroupMapper.insert(workerGroup);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check worker group name exists
* @param workerGroup
* @return
*/
private boolean checkWorkerGroupNameExists(WorkerGroup workerGroup) {
List<WorkerGroup> workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName());
if(workerGroupList.size() > 0 ){
// new group has same name..
if(workerGroup.getId() == 0){
return true;
}
// update group...
for(WorkerGroup group : workerGroupList){
if(group.getId() != workerGroup.getId()){
return true;
}
}
}
return false;
}
/**
* query worker group paging
* @param pageNo
* @param pageSize
* @param searchVal
* @return
*/
public Map<String,Object> queryAllGroupPaging(Integer pageNo, Integer pageSize, String searchVal) {
Map<String, Object> result = new HashMap<>(5);
int count = workerGroupMapper.countPaging(searchVal);
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
List<WorkerGroup> workerGroupList = workerGroupMapper.queryListPaging(pageInfo.getStart(), pageSize, searchVal);
pageInfo.setTotalCount(count);
pageInfo.setLists(workerGroupList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete worker group by id
* @param id
* @return
*/
public Map<String,Object> deleteWorkerGroupById(Integer id) {
Map<String, Object> result = new HashMap<>(5);
int delete = workerGroupMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query all worker group
* @return
*/
public Map<String,Object> queryAllGroup() {
Map<String, Object> result = new HashMap<>(5);
List<WorkerGroup> workerGroupList = workerGroupMapper.queryAllWorkerGroup();
result.put(Constants.DATA_LIST, workerGroupList);
putMsg(result, Status.SUCCESS);
return result;
}
}

9
escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java

@ -82,6 +82,9 @@ public class Constants {
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver";
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
/** /**
* database type * database type
@ -90,6 +93,9 @@ public class Constants {
public static final String POSTGRESQL = "POSTGRESQL"; public static final String POSTGRESQL = "POSTGRESQL";
public static final String HIVE = "HIVE"; public static final String HIVE = "HIVE";
public static final String SPARK = "SPARK"; public static final String SPARK = "SPARK";
public static final String CLICKHOUSE = "CLICKHOUSE";
public static final String ORACLE = "ORACLE";
public static final String SQLSERVER = "SQLSERVER";
/** /**
* jdbc url * jdbc url
@ -97,6 +103,9 @@ public class Constants {
public static final String JDBC_MYSQL = "jdbc:mysql://"; public static final String JDBC_MYSQL = "jdbc:mysql://";
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://"; public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String JDBC_ORACLE = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String ADDRESS = "address"; public static final String ADDRESS = "address";

162
escheduler-api/src/test/java/cn/escheduler/api/HttpClientTest.java

@ -0,0 +1,162 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api;
import java.io.File;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import cn.escheduler.common.utils.EncryptionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HttpClientTest {
private static final Logger logger = LoggerFactory.getLogger(HttpClientTest.class);
public static void main(String[] args) throws Exception {
// doGETParamPathVariableAndChinese();
// doGETParam();
// doPOSTParam();
String md5 = EncryptionUtils.getMd5(String.valueOf(System.currentTimeMillis()) + "张三");
System.out.println(md5);
System.out.println(md5.length());
}
public static void doPOSTParam()throws Exception{
// create Httpclient
CloseableHttpClient httpclient = HttpClients.createDefault();
// 创建http POST请求
HttpPost httpPost = new HttpPost("http://127.0.0.1:12345/escheduler/projects/create");
httpPost.setHeader("token", "123");
// set parameters
List<NameValuePair> parameters = new ArrayList<NameValuePair>();
parameters.add(new BasicNameValuePair("projectName", "qzw"));
parameters.add(new BasicNameValuePair("desc", "qzw"));
UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(parameters);
httpPost.setEntity(formEntity);
CloseableHttpResponse response = null;
try {
// execute
response = httpclient.execute(httpPost);
// eponse status code 200
if (response.getStatusLine().getStatusCode() == 200) {
String content = EntityUtils.toString(response.getEntity(), "UTF-8");
System.out.println(content);
}
} finally {
if (response != null) {
response.close();
}
httpclient.close();
}
}
/**
*
* @throws Exception
*/
public static void doGETParamPathVariableAndChinese()throws Exception{
// create HttpClient
CloseableHttpClient httpclient = HttpClients.createDefault();
List<NameValuePair> parameters = new ArrayList<NameValuePair>();
// parameters.add(new BasicNameValuePair("pageSize", "10"));
// define the parameters of the request
URI uri = new URIBuilder("http://127.0.0.1:12345/escheduler/projects/%E5%85%A8%E9%83%A8%E6%B5%81%E7%A8%8B%E6%B5%8B%E8%AF%95/process/list")
.build();
// create http GET request
HttpGet httpGet = new HttpGet(uri);
httpGet.setHeader("token","123");
//response object
CloseableHttpResponse response = null;
try {
// execute http get request
response = httpclient.execute(httpGet);
// reponse status code 200
if (response.getStatusLine().getStatusCode() == 200) {
String content = EntityUtils.toString(response.getEntity(), "UTF-8");
logger.info("start--------------->");
logger.info(content);
logger.info("end----------------->");
}
} finally {
if (response != null) {
response.close();
}
httpclient.close();
}
}
/**
*
* @throws Exception
*/
public static void doGETParam()throws Exception{
// create HttpClient
CloseableHttpClient httpclient = HttpClients.createDefault();
List<NameValuePair> parameters = new ArrayList<NameValuePair>();
parameters.add(new BasicNameValuePair("processInstanceId", "41415"));
// define the parameters of the request
URI uri = new URIBuilder("http://127.0.0.1:12345/escheduler/projects/%E5%85%A8%E9%83%A8%E6%B5%81%E7%A8%8B%E6%B5%8B%E8%AF%95/instance/view-variables")
.setParameters(parameters)
.build();
// create http GET request
HttpGet httpGet = new HttpGet(uri);
httpGet.setHeader("token","123");
//response object
CloseableHttpResponse response = null;
try {
// execute http get request
response = httpclient.execute(httpGet);
// reponse status code 200
if (response.getStatusLine().getStatusCode() == 200) {
String content = EntityUtils.toString(response.getEntity(), "UTF-8");
logger.info("start--------------->");
logger.info(content);
logger.info("end----------------->");
}
} finally {
if (response != null) {
response.close();
}
httpclient.close();
}
}
}

78
escheduler-api/src/test/java/cn/escheduler/api/controller/QueueControllerTest.java

@ -32,9 +32,12 @@ import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.WebApplicationContext;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@ -65,4 +68,79 @@ public class QueueControllerTest {
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test
public void queryPagingList() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
//paramsMap.add("processInstanceId","1380");
paramsMap.add("searchVal","");
paramsMap.add("pageNo","1");
paramsMap.add("pageSize","20");
MvcResult mvcResult = mockMvc.perform(get("/queue/list-paging")
.header("sessionId", "d4541e0d-0349-4f05-9c68-300176cd3c91")
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void createQueue() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("queue","ait111134");
paramsMap.add("queueName","aitName1");
MvcResult mvcResult = mockMvc.perform(post("/queue/create")
.header("sessionId", "d4541e0d-0349-4f05-9c68-300176cd3c91")
.params(paramsMap))
.andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
//Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void updateQueue() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("id","2");
paramsMap.add("queue","ait12");
paramsMap.add("queueName","aitName");
MvcResult mvcResult = mockMvc.perform(post("/queue/update")
.header("sessionId", "d4541e0d-0349-4f05-9c68-300176cd3c91")
.params(paramsMap))
.andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
//Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void verifyQueue() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("queue","ait123");
paramsMap.add("queueName","aitName");
MvcResult mvcResult = mockMvc.perform(post("/queue/verify-queue")
.header("sessionId", "d4541e0d-0349-4f05-9c68-300176cd3c91")
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
//Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
} }

28
escheduler-common/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.1-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-common</artifactId> <artifactId>escheduler-common</artifactId>
<name>escheduler-common</name> <name>escheduler-common</name>
@ -371,6 +371,32 @@
<groupId>com.github.oshi</groupId> <groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId> <artifactId>oshi-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<exclusions>
<exclusion>
<artifactId>com.fasterxml.jackson.core</artifactId>
<groupId>jackson-core</groupId>
</exclusion>
<exclusion>
<artifactId>com.fasterxml.jackson.core</artifactId>
<groupId>jackson-databind</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<exclusions>
<exclusion>
<artifactId>com.microsoft.azure</artifactId>
<groupId>azure-keyvault</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies> </dependencies>

57
escheduler-common/src/main/java/cn/escheduler/common/Constants.java

@ -70,25 +70,6 @@ public final class Constants {
*/ */
public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address";
/**
* spring.redis.maxIdle
*/
public static final String SPRING_REDIS_MAXIDLE = "spring.redis.maxIdle";
/**
* spring.redis.maxTotal
*/
public static final String SPRING_REDIS_MAXTOTAL = "spring.redis.maxTotal";
/**
* spring.redis.host
*/
public static final String SPRING_REDIS_HOST = "spring.redis.host";
/**
* spring.redis.port
*/
public static final String SPRING_REDIS_PORT = "spring.redis.port";
/** /**
* hdfs configuration * hdfs configuration
@ -117,9 +98,14 @@ public final class Constants {
public static final String ESCHEDULER_ENV_PATH = "escheduler.env.path"; public static final String ESCHEDULER_ENV_PATH = "escheduler.env.path";
/** /**
* escheduler.env.py * escheduler.env.sh
*/
public static final String ESCHEDULER_ENV_SH = ".escheduler_env.sh";
/**
* python home
*/ */
public static final String ESCHEDULER_ENV_PY = "escheduler.env.py"; public static final String PYTHON_HOME="PYTHON_HOME";
/** /**
* resource.view.suffixs * resource.view.suffixs
@ -255,8 +241,6 @@ public final class Constants {
public static final String SCHEDULER_QUEUE_IMPL = "escheduler.queue.impl"; public static final String SCHEDULER_QUEUE_IMPL = "escheduler.queue.impl";
public static final String SCHEDULER_QUEUE_REDIS_IMPL = "redis";
/** /**
* date format of yyyy-MM-dd HH:mm:ss * date format of yyyy-MM-dd HH:mm:ss
@ -463,6 +447,10 @@ public final class Constants {
public static final String TASK_RECORD_PWD = "task.record.datasource.password"; public static final String TASK_RECORD_PWD = "task.record.datasource.password";
public static String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd";
public static String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd";
public static final String STATUS = "status"; public static final String STATUS = "status";
@ -602,15 +590,29 @@ public final class Constants {
public static final String JDBC_POSTGRESQL_CLASS_NAME = "org.postgresql.Driver"; public static final String JDBC_POSTGRESQL_CLASS_NAME = "org.postgresql.Driver";
/** /**
* postgresql * hive
*/ */
public static final String JDBC_HIVE_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; public static final String JDBC_HIVE_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver";
/** /**
* postgresql * spark
*/ */
public static final String JDBC_SPARK_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; public static final String JDBC_SPARK_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver";
/**
* ClickHouse
*/
public static final String JDBC_CLICKHOUSE_CLASS_NAME = "ru.yandex.clickhouse.ClickHouseDriver";
/**
* Oracle
*/
public static final String JDBC_ORACLE_CLASS_NAME = "oracle.jdbc.driver.OracleDriver";
/**
* Oracle
*/
public static final String JDBC_SQLSERVER_CLASS_NAME = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
/** /**
* spark params constant * spark params constant
@ -812,4 +814,9 @@ public final class Constants {
public static final String CONTENT = "content"; public static final String CONTENT = "content";
public static final String DEPENDENT_SPLIT = ":||"; public static final String DEPENDENT_SPLIT = ":||";
public static final String DEPENDENT_ALL = "ALL"; public static final String DEPENDENT_ALL = "ALL";
/**
*
*/
} }

5
escheduler-common/src/main/java/cn/escheduler/common/enums/DbType.java

@ -25,6 +25,9 @@ public enum DbType {
* 1 postgresql * 1 postgresql
* 2 hive * 2 hive
* 3 spark * 3 spark
* 4 clickhouse
* 5 oracle
* 6 sqlserver
*/ */
MYSQL, POSTGRESQL, HIVE, SPARK MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER
} }

75
escheduler-common/src/main/java/cn/escheduler/common/job/db/ClickHouseDataSource.java

@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* data source of ClickHouse
*/
public class ClickHouseDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(ClickHouseDataSource.class);
/**
* gets the JDBC url for the data source connection
* @return
*/
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) {
jdbcUrl += "/";
}
jdbcUrl += getDatabase();
if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += "?" + getOther();
}
return jdbcUrl;
}
/**
* test whether the data source can be connected successfully
* @throws Exception
*/
@Override
public void isConnectable() throws Exception {
Connection con = null;
try {
Class.forName("ru.yandex.clickhouse.ClickHouseDriver");
con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword());
} finally {
if (con != null) {
try {
con.close();
} catch (SQLException e) {
logger.error("ClickHouse datasource try conn close conn error", e);
throw e;
}
}
}
}
}

6
escheduler-common/src/main/java/cn/escheduler/common/job/db/DataSourceFactory.java

@ -39,6 +39,12 @@ public class DataSourceFactory {
return JSONUtils.parseObject(parameter, HiveDataSource.class); return JSONUtils.parseObject(parameter, HiveDataSource.class);
case SPARK: case SPARK:
return JSONUtils.parseObject(parameter, SparkDataSource.class); return JSONUtils.parseObject(parameter, SparkDataSource.class);
case CLICKHOUSE:
return JSONUtils.parseObject(parameter, ClickHouseDataSource.class);
case ORACLE:
return JSONUtils.parseObject(parameter, OracleDataSource.class);
case SQLSERVER:
return JSONUtils.parseObject(parameter, SQLServerDataSource.class);
default: default:
return null; return null;
} }

75
escheduler-common/src/main/java/cn/escheduler/common/job/db/OracleDataSource.java

@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* data source of Oracle
*/
public class OracleDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(OracleDataSource.class);
/**
* gets the JDBC url for the data source connection
* @return
*/
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) {
jdbcUrl += "/";
}
jdbcUrl += getDatabase();
if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += "?" + getOther();
}
return jdbcUrl;
}
/**
* test whether the data source can be connected successfully
* @throws Exception
*/
@Override
public void isConnectable() throws Exception {
Connection con = null;
try {
Class.forName("oracle.jdbc.driver.OracleDriver");
con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword());
} finally {
if (con != null) {
try {
con.close();
} catch (SQLException e) {
logger.error("Oracle datasource try conn close conn error", e);
throw e;
}
}
}
}
}

71
escheduler-common/src/main/java/cn/escheduler/common/job/db/SQLServerDataSource.java

@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* data source of SQL Server
*/
public class SQLServerDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(SQLServerDataSource.class);
/**
* gets the JDBC url for the data source connection
* @return
*/
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
jdbcUrl += ";databaseName=" + getDatabase();
if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += ";" + getOther();
}
return jdbcUrl;
}
/**
* test whether the data source can be connected successfully
* @throws Exception
*/
@Override
public void isConnectable() throws Exception {
Connection con = null;
try {
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword());
} finally {
if (con != null) {
try {
con.close();
} catch (SQLException e) {
logger.error("SQL Server datasource try conn close conn error", e);
throw e;
}
}
}
}
}

16
escheduler-common/src/main/java/cn/escheduler/common/model/TaskNode.java

@ -113,6 +113,12 @@ public class TaskNode {
*/ */
private Priority taskInstancePriority; private Priority taskInstancePriority;
/**
* worker group id
*/
private int workerGroupId;
/** /**
* task time out * task time out
*/ */
@ -224,6 +230,7 @@ public class TaskNode {
Objects.equals(extras, taskNode.extras) && Objects.equals(extras, taskNode.extras) &&
Objects.equals(runFlag, taskNode.runFlag) && Objects.equals(runFlag, taskNode.runFlag) &&
Objects.equals(dependence, taskNode.dependence) && Objects.equals(dependence, taskNode.dependence) &&
Objects.equals(workerGroupId, taskNode.workerGroupId) &&
CollectionUtils.equalLists(depList, taskNode.depList); CollectionUtils.equalLists(depList, taskNode.depList);
} }
@ -303,6 +310,15 @@ public class TaskNode {
", dependence='" + dependence + '\'' + ", dependence='" + dependence + '\'' +
", taskInstancePriority=" + taskInstancePriority + ", taskInstancePriority=" + taskInstancePriority +
", timeout='" + timeout + '\'' + ", timeout='" + timeout + '\'' +
", workerGroupId='" + workerGroupId + '\'' +
'}'; '}';
} }
public int getWorkerGroupId() {
return workerGroupId;
}
public void setWorkerGroupId(int workerGroupId) {
this.workerGroupId = workerGroupId;
}
} }

9
escheduler-common/src/main/java/cn/escheduler/common/queue/ITaskQueue.java

@ -54,10 +54,17 @@ public interface ITaskQueue {
* an element pops out of the queue * an element pops out of the queue
* *
* @param key queue name * @param key queue name
* @param remove where remove the element
* @return * @return
*/ */
String poll(String key); String poll(String key, boolean remove);
/**
* remove a element from queue
* @param key
* @param value
*/
void removeNode(String key, String value);
/** /**
* add an element to the set * add an element to the set

33
escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueZkImpl.java

@ -137,10 +137,11 @@ public class TaskQueueZkImpl extends AbstractZKClient implements ITaskQueue {
* *
* 流程实例优先级_流程实例id_任务优先级_任务id high <- low * 流程实例优先级_流程实例id_任务优先级_任务id high <- low
* @param key task queue name * @param key task queue name
* @param remove whether remove the element
* @return the task id to be executed * @return the task id to be executed
*/ */
@Override @Override
public String poll(String key) { public String poll(String key, boolean remove) {
try{ try{
CuratorFramework zk = getZkClient(); CuratorFramework zk = getZkClient();
String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH; String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH;
@ -181,18 +182,11 @@ public class TaskQueueZkImpl extends AbstractZKClient implements ITaskQueue {
String[] vals = targetTaskKey.split(Constants.UNDERLINE); String[] vals = targetTaskKey.split(Constants.UNDERLINE);
try{ if(remove){
zk.delete().forPath(taskIdPath); removeNode(key, targetTaskKey);
// String path = conf.getString(Constants.ZOOKEEPER_SCHEDULER_ROOT) + Constants.SINGLE_SLASH + Constants.SCHEDULER_TASKS_QUEUE + "_remove" + Constants.SINGLE_SLASH + targetTaskKey;
// getZkClient().create().creatingParentContainersIfNeeded().withMode(CreateMode.PERSISTENT).forPath(path,
// Bytes.toBytes(targetTaskKey));
}catch(Exception e){
logger.error(String.format("delete task:%s from zookeeper fail, task detail: %s exception" ,targetTaskKey, vals[vals.length - 1]) ,e);
} }
logger.info("consume task: {},there still have {} tasks need to be executed", targetTaskKey, size - 1); logger.info("consume task: {},there still have {} tasks need to be executed", vals[vals.length - 1], size - 1);
return targetTaskKey;
return vals[vals.length - 1];
}else{ }else{
logger.error("should not go here, task queue poll error, please check!"); logger.error("should not go here, task queue poll error, please check!");
} }
@ -204,6 +198,21 @@ public class TaskQueueZkImpl extends AbstractZKClient implements ITaskQueue {
return null; return null;
} }
@Override
public void removeNode(String key, String nodeValue){
CuratorFramework zk = getZkClient();
String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH;
String taskIdPath = tasksQueuePath + nodeValue;
logger.info("consume task {}", taskIdPath);
try{
zk.delete().forPath(taskIdPath);
}catch(Exception e){
logger.error(String.format("delete task:%s from zookeeper fail, exception:" ,nodeValue) ,e);
}
}
/** /**

7
escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java

@ -46,13 +46,6 @@ public class CommonUtils {
return envPath; return envPath;
} }
/**
* @return get the path of Python system environment variables
*/
public static String getPythonSystemEnvPath() {
return getString(ESCHEDULER_ENV_PY);
}
/** /**
* @return get queue implementation name * @return get queue implementation name
*/ */

2
escheduler-common/src/main/java/cn/escheduler/common/utils/DateUtils.java

@ -72,7 +72,7 @@ public class DateUtils {
public static Date parse(String date,String format){ public static Date parse(String date,String format){
try { try {
return new SimpleDateFormat(format).parse(date); return new SimpleDateFormat(format).parse(date);
} catch (ParseException e) { } catch (Exception e) {
logger.error("error while parse date:" + date, e); logger.error("error while parse date:" + date, e);
} }
return null; return null;

56
escheduler-common/src/main/java/cn/escheduler/common/utils/FileUtils.java

@ -368,5 +368,61 @@ public class FileUtils {
org.apache.commons.io.FileUtils.forceDelete(new File(filename)); org.apache.commons.io.FileUtils.forceDelete(new File(filename));
} }
/**
* Gets all the parent subdirectories of the parentDir directory
* @param parentDir
* @return
*/
public static File[] getAllDir(String parentDir){
if(parentDir == null || "".equals(parentDir)) {
throw new RuntimeException("parentDir can not be empty");
}
File file = new File(parentDir);
if(!file.exists() || !file.isDirectory()) {
throw new RuntimeException("parentDir not exist, or is not a directory:"+parentDir);
}
File[] schemaDirs = file.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
if (pathname.isDirectory()) {
return true;
}
else {
return false;
}
}
});
return schemaDirs;
}
/**
* Get Content
* @param inputStream
* @return
* @throws IOException
*/
public static String readFile2Str(InputStream inputStream) throws IOException{
String all_content=null;
try {
all_content = new String();
InputStream ins = inputStream;
ByteArrayOutputStream outputstream = new ByteArrayOutputStream();
byte[] str_b = new byte[1024];
int i = -1;
while ((i=ins.read(str_b)) > 0) {
outputstream.write(str_b,0,i);
}
all_content = outputstream.toString();
return all_content;
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e);
}
}
} }

104
escheduler-common/src/main/java/cn/escheduler/common/utils/MysqlUtil.java

@ -0,0 +1,104 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
public class MysqlUtil {
public static final Logger logger = LoggerFactory.getLogger(MysqlUtil.class);
private static MysqlUtil instance;
MysqlUtil() {
}
public static MysqlUtil getInstance() {
if (null == instance) {
syncInit();
}
return instance;
}
private static synchronized void syncInit() {
if (instance == null) {
instance = new MysqlUtil();
}
}
public void release(ResultSet rs, Statement stmt, Connection conn) {
try {
if (rs != null) {
rs.close();
rs = null;
}
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e);
} finally {
try {
if (stmt != null) {
stmt.close();
stmt = null;
}
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e);
} finally {
try {
if (conn != null) {
conn.close();
conn = null;
}
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e);
}
}
}
}
public static void realeaseResource(ResultSet rs, PreparedStatement ps, Connection conn) {
MysqlUtil.getInstance().release(rs,ps,conn);
if (null != rs) {
try {
rs.close();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
}
}
if (null != ps) {
try {
ps.close();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
}
}
if (null != conn) {
try {
conn.close();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
}
}
}
}

150
escheduler-common/src/main/java/cn/escheduler/common/utils/SchemaUtils.java

@ -0,0 +1,150 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.utils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Metadata related common classes
*
*/
public class SchemaUtils {
private static final Logger logger = LoggerFactory.getLogger(SchemaUtils.class);
private static Pattern p = Pattern.compile("\\s*|\t|\r|\n");
/**
* 获取所有upgrade目录下的可升级的schema
* Gets upgradable schemas for all upgrade directories
* @return
*/
@SuppressWarnings("unchecked")
public static List<String> getAllSchemaList() {
List<String> schemaDirList = new ArrayList<>();
File[] schemaDirArr = FileUtils.getAllDir("sql/upgrade");
if(schemaDirArr == null || schemaDirArr.length == 0) {
return null;
}
for(File file : schemaDirArr) {
schemaDirList.add(file.getName());
}
Collections.sort(schemaDirList , new Comparator() {
@Override
public int compare(Object o1 , Object o2){
try {
String dir1 = String.valueOf(o1);
String dir2 = String.valueOf(o2);
String version1 = dir1.split("_")[0];
String version2 = dir2.split("_")[0];
if(version1.equals(version2)) {
return 0;
}
if(SchemaUtils.isAGreatVersion(version1, version2)) {
return 1;
}
return -1;
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e);
}
}
});
return schemaDirList;
}
/**
* 判断schemaVersion是否比version版本高
* Determine whether schemaVersion is higher than version
* @param schemaVersion
* @param version
* @return
*/
public static boolean isAGreatVersion(String schemaVersion, String version) {
if(StringUtils.isEmpty(schemaVersion) || StringUtils.isEmpty(version)) {
throw new RuntimeException("schemaVersion or version is empty");
}
String[] schemaVersionArr = schemaVersion.split("\\.");
String[] versionArr = version.split("\\.");
int arrLength = schemaVersionArr.length < versionArr.length ? schemaVersionArr.length : versionArr.length;
for(int i = 0 ; i < arrLength ; i++) {
if(Integer.valueOf(schemaVersionArr[i]) > Integer.valueOf(versionArr[i])) {
return true;
}else if(Integer.valueOf(schemaVersionArr[i]) < Integer.valueOf(versionArr[i])) {
return false;
}
}
// 说明直到第arrLength-1个元素,两个版本号都一样,此时谁的arrLength大,谁的版本号就大
// If the version and schema version is the same from 0 up to the arrlength-1 element,whoever has a larger arrLength has a larger version number
return schemaVersionArr.length > versionArr.length;
}
/**
* Gets the current software version number of the system
* @return
*/
public static String getSoftVersion() {
String soft_version;
try {
soft_version = FileUtils.readFile2Str(new FileInputStream(new File("sql/soft_version")));
soft_version = replaceBlank(soft_version);
} catch (FileNotFoundException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("Failed to get the product version description file. The file could not be found", e);
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("Failed to get product version number description file, failed to read the file", e);
}
return soft_version;
}
/**
* 去掉字符串中的空格回车换行和制表符
* Strips the string of space carriage returns and tabs
* @param str
* @return
*/
public static String replaceBlank(String str) {
String dest = "";
if (str!=null) {
Matcher m = p.matcher(str);
dest = m.replaceAll("");
}
return dest;
}
}

317
escheduler-common/src/main/java/cn/escheduler/common/utils/ScriptRunner.java

@ -0,0 +1,317 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.Reader;
import java.sql.*;
/*
* Slightly modified version of the com.ibatis.common.jdbc.ScriptRunner class
* from the iBATIS Apache project. Only removed dependency on Resource class
* and a constructor
*/
/*
* Copyright 2004 Clinton Begin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tool to run database scripts
*/
public class ScriptRunner {
public static final Logger logger = LoggerFactory.getLogger(ScriptRunner.class);
private static final String DEFAULT_DELIMITER = ";";
private Connection connection;
private boolean stopOnError;
private boolean autoCommit;
private String delimiter = DEFAULT_DELIMITER;
private boolean fullLineDelimiter = false;
/**
* Default constructor
*/
public ScriptRunner(Connection connection, boolean autoCommit, boolean stopOnError) {
this.connection = connection;
this.autoCommit = autoCommit;
this.stopOnError = stopOnError;
}
public static void main(String[] args) {
String dbName = "db_mmu";
String appKey = dbName.substring(dbName.lastIndexOf("_")+1, dbName.length());
System.out.println(appKey);
}
public void setDelimiter(String delimiter, boolean fullLineDelimiter) {
this.delimiter = delimiter;
this.fullLineDelimiter = fullLineDelimiter;
}
/**
* Runs an SQL script (read in using the Reader parameter)
*
* @param reader
* - the source of the script
*/
public void runScript(Reader reader) throws IOException, SQLException {
try {
boolean originalAutoCommit = connection.getAutoCommit();
try {
if (originalAutoCommit != this.autoCommit) {
connection.setAutoCommit(this.autoCommit);
}
runScript(connection, reader);
} finally {
connection.setAutoCommit(originalAutoCommit);
}
} catch (IOException e) {
throw e;
} catch (SQLException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Error running script. Cause: " + e, e);
}
}
public void runScript(Reader reader, String dbName) throws IOException, SQLException {
try {
boolean originalAutoCommit = connection.getAutoCommit();
try {
if (originalAutoCommit != this.autoCommit) {
connection.setAutoCommit(this.autoCommit);
}
runScript(connection, reader, dbName);
} finally {
connection.setAutoCommit(originalAutoCommit);
}
} catch (IOException e) {
throw e;
} catch (SQLException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Error running script. Cause: " + e, e);
}
}
/**
* Runs an SQL script (read in using the Reader parameter) using the connection
* passed in
*
* @param conn
* - the connection to use for the script
* @param reader
* - the source of the script
* @throws SQLException
* if any SQL errors occur
* @throws IOException
* if there is an error reading from the Reader
*/
private void runScript(Connection conn, Reader reader) throws IOException, SQLException {
StringBuffer command = null;
try {
LineNumberReader lineReader = new LineNumberReader(reader);
String line = null;
while ((line = lineReader.readLine()) != null) {
if (command == null) {
command = new StringBuffer();
}
String trimmedLine = line.trim();
if (trimmedLine.startsWith("--")) {
logger.info(trimmedLine);
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) {
// Do nothing
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) {
// Do nothing
} else if (trimmedLine.startsWith("delimiter")) {
String newDelimiter = trimmedLine.split(" ")[1];
this.setDelimiter(newDelimiter, fullLineDelimiter);
} else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter())
|| fullLineDelimiter && trimmedLine.equals(getDelimiter())) {
command.append(line.substring(0, line.lastIndexOf(getDelimiter())));
command.append(" ");
Statement statement = conn.createStatement();
// logger.info(command.toString());
boolean hasResults = false;
logger.info("sql:"+command.toString());
if (stopOnError) {
hasResults = statement.execute(command.toString());
} else {
try {
statement.execute(command.toString());
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw e;
}
}
ResultSet rs = statement.getResultSet();
if (hasResults && rs != null) {
ResultSetMetaData md = rs.getMetaData();
int cols = md.getColumnCount();
for (int i = 0; i < cols; i++) {
String name = md.getColumnLabel(i);
logger.info(name + "\t");
}
logger.info("");
while (rs.next()) {
for (int i = 0; i < cols; i++) {
String value = rs.getString(i);
logger.info(value + "\t");
}
logger.info("");
}
}
command = null;
try {
statement.close();
} catch (Exception e) {
// Ignore to workaround a bug in Jakarta DBCP
}
Thread.yield();
} else {
command.append(line);
command.append(" ");
}
}
} catch (SQLException e) {
logger.error("Error executing: " + command.toString());
throw e;
} catch (IOException e) {
e.fillInStackTrace();
logger.error("Error executing: " + command.toString());
throw e;
}
}
private void runScript(Connection conn, Reader reader , String dbName) throws IOException, SQLException {
StringBuffer command = null;
String sql = "";
String appKey = dbName.substring(dbName.lastIndexOf("_")+1, dbName.length());
try {
LineNumberReader lineReader = new LineNumberReader(reader);
String line = null;
while ((line = lineReader.readLine()) != null) {
if (command == null) {
command = new StringBuffer();
}
String trimmedLine = line.trim();
if (trimmedLine.startsWith("--")) {
logger.info(trimmedLine);
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) {
// Do nothing
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) {
// Do nothing
} else if (trimmedLine.startsWith("delimiter")) {
String newDelimiter = trimmedLine.split(" ")[1];
this.setDelimiter(newDelimiter, fullLineDelimiter);
} else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter())
|| fullLineDelimiter && trimmedLine.equals(getDelimiter())) {
command.append(line.substring(0, line.lastIndexOf(getDelimiter())));
command.append(" ");
Statement statement = conn.createStatement();
// logger.info(command.toString());
sql = command.toString().replaceAll("\\{\\{APPDB\\}\\}", dbName);
boolean hasResults = false;
logger.info("sql:"+sql);
if (stopOnError) {
hasResults = statement.execute(sql);
} else {
try {
statement.execute(sql);
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw e;
}
}
ResultSet rs = statement.getResultSet();
if (hasResults && rs != null) {
ResultSetMetaData md = rs.getMetaData();
int cols = md.getColumnCount();
for (int i = 0; i < cols; i++) {
String name = md.getColumnLabel(i);
logger.info(name + "\t");
}
logger.info("");
while (rs.next()) {
for (int i = 0; i < cols; i++) {
String value = rs.getString(i);
logger.info(value + "\t");
}
logger.info("");
}
}
command = null;
try {
statement.close();
} catch (Exception e) {
// Ignore to workaround a bug in Jakarta DBCP
}
Thread.yield();
} else {
command.append(line);
command.append(" ");
}
}
} catch (SQLException e) {
logger.error("Error executing: " + sql);
throw e;
} catch (IOException e) {
e.fillInStackTrace();
logger.error("Error executing: " + sql);
throw e;
}
}
private String getDelimiter() {
return delimiter;
}
}

1
escheduler-common/src/main/resources/common/common.properties

@ -18,7 +18,6 @@ hdfs.startup.state=true
# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions # system env path. self configuration, please make sure the directory and file exists and have read write execute permissions
escheduler.env.path=/opt/.escheduler_env.sh escheduler.env.path=/opt/.escheduler_env.sh
escheduler.env.py=/opt/escheduler_env.py
#resource.view.suffixs #resource.view.suffixs
resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml

6
escheduler-common/src/test/java/cn/escheduler/common/queue/TaskQueueImplTest.java

@ -49,9 +49,9 @@ public class TaskQueueImplTest {
tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"4"); tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"4");
//pop //pop
String node1 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE); String node1 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, false);
assertEquals(node1,"1"); assertEquals(node1,"1");
String node2 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE); String node2 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, false);
assertEquals(node2,"2"); assertEquals(node2,"2");
//sadd //sadd
@ -99,7 +99,7 @@ public class TaskQueueImplTest {
} }
} }
String node1 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE); String node1 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, false);
assertEquals(node1,"0"); assertEquals(node1,"0");
//clear all data //clear all data

2
escheduler-dao/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.1-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-dao</artifactId> <artifactId>escheduler-dao</artifactId>
<name>escheduler-dao</name> <name>escheduler-dao</name>

54
escheduler-dao/readme.txt

@ -0,0 +1,54 @@
-- 用户指定队列
alter table t_escheduler_user add queue varchar(64);
-- 访问token
CREATE TABLE `t_escheduler_access_token` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`user_id` int(11) DEFAULT NULL COMMENT '用户id',
`token` varchar(64) DEFAULT NULL COMMENT 'token令牌',
`expire_time` datetime DEFAULT NULL COMMENT 'token有效结束时间',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8;
CREATE TABLE `t_escheduler_error_command` (
`id` int(11) NOT NULL COMMENT '主键',
`command_type` tinyint(4) NULL DEFAULT NULL COMMENT '命令类型:0 启动工作流,1 从当前节点开始执行,2 恢复被容错的工作流,3 恢复暂停流程,4 从失败节点开始执行,5 补数,6 调度,7 重跑,8 暂停,9 停止,10 恢复等待线程',
`executor_id` int(11) NULL DEFAULT NULL COMMENT '命令执行者',
`process_definition_id` int(11) NULL DEFAULT NULL COMMENT '流程定义id',
`command_param` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '命令的参数(json格式)',
`task_depend_type` tinyint(4) NULL DEFAULT NULL COMMENT '节点依赖类型',
`failure_strategy` tinyint(4) NULL DEFAULT 0 COMMENT '失败策略:0结束,1继续',
`warning_type` tinyint(4) NULL DEFAULT 0 COMMENT '告警类型',
`warning_group_id` int(11) NULL DEFAULT NULL COMMENT '告警组',
`schedule_time` datetime(0) NULL DEFAULT NULL COMMENT '预期运行时间',
`start_time` datetime(0) NULL DEFAULT NULL COMMENT '开始时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`dependence` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '依赖字段',
`process_instance_priority` int(11) NULL DEFAULT NULL COMMENT '流程实例优先级:0 Highest,1 High,2 Medium,3 Low,4 Lowest',
`message` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '执行信息',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;
CREATE TABLE `t_escheduler_worker_group` (
`id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id',
`name` varchar(256) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL COMMENT '组名称',
`ip_list` varchar(256) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL COMMENT 'worker地址列表',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;
ALTER TABLE `t_escheduler_task_instance`
ADD COLUMN `worker_group_id` int(11) NULL DEFAULT -1 COMMENT '任务指定运行的worker分组' AFTER `task_instance_priority`;
ALTER TABLE `t_escheduler_command`
ADD COLUMN `worker_group_id` int(11) NULL DEFAULT -1 COMMENT '任务指定运行的worker分组' NULL AFTER `process_instance_priority`;
ALTER TABLE `t_escheduler_error_command`
ADD COLUMN `worker_group_id` int(11) NULL DEFAULT -1 COMMENT '任务指定运行的worker分组' NULL AFTER `process_instance_priority`;
ALTER TABLE `t_escheduler_schedules`
ADD COLUMN `worker_group_id` int(11) NULL DEFAULT -1 COMMENT '任务指定运行的worker分组' NULL AFTER `process_instance_priority`;

119
escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java

@ -59,7 +59,7 @@ public class ProcessDao extends AbstractBaseDao {
ExecutionStatus.READY_STOP.ordinal()}; ExecutionStatus.READY_STOP.ordinal()};
@Autowired @Autowired
private ProjectMapper projectMapper; private UserMapper userMapper;
@Autowired @Autowired
private ProcessDefinitionMapper processDefineMapper; private ProcessDefinitionMapper processDefineMapper;
@ -88,6 +88,12 @@ public class ProcessDao extends AbstractBaseDao {
@Autowired @Autowired
private ResourceMapper resourceMapper; private ResourceMapper resourceMapper;
@Autowired
private WorkerGroupMapper workerGroupMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
/** /**
* task queue impl * task queue impl
*/ */
@ -102,7 +108,7 @@ public class ProcessDao extends AbstractBaseDao {
*/ */
@Override @Override
protected void init() { protected void init() {
projectMapper = getMapper(ProjectMapper.class); userMapper=getMapper(UserMapper.class);
processDefineMapper = getMapper(ProcessDefinitionMapper.class); processDefineMapper = getMapper(ProcessDefinitionMapper.class);
processInstanceMapper = getMapper(ProcessInstanceMapper.class); processInstanceMapper = getMapper(ProcessInstanceMapper.class);
dataSourceMapper = getMapper(DataSourceMapper.class); dataSourceMapper = getMapper(DataSourceMapper.class);
@ -112,6 +118,7 @@ public class ProcessDao extends AbstractBaseDao {
scheduleMapper = getMapper(ScheduleMapper.class); scheduleMapper = getMapper(ScheduleMapper.class);
udfFuncMapper = getMapper(UdfFuncMapper.class); udfFuncMapper = getMapper(UdfFuncMapper.class);
resourceMapper = getMapper(ResourceMapper.class); resourceMapper = getMapper(ResourceMapper.class);
workerGroupMapper = getMapper(WorkerGroupMapper.class);
taskQueue = TaskQueueFactory.getTaskQueueInstance(); taskQueue = TaskQueueFactory.getTaskQueueInstance();
} }
@ -120,48 +127,72 @@ public class ProcessDao extends AbstractBaseDao {
* find one command from command queue, construct process instance * find one command from command queue, construct process instance
* @param logger * @param logger
* @param host * @param host
* @param vaildThreadNum * @param validThreadNum
* @return * @return
*/ */
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) @Transactional(value = "TransactionManager",rollbackFor = Exception.class)
public ProcessInstance scanCommand(Logger logger, String host, int vaildThreadNum){ public ProcessInstance scanCommand(Logger logger, String host, int validThreadNum){
ProcessInstance processInstance = null; ProcessInstance processInstance = null;
Command command = findOneCommand(); Command command = findOneCommand();
if (command == null) { if (command == null) {
return null; return null;
} }
logger.info(String.format("find one command: id: %d, type: %s", command.getId(),command.getCommandType().toString())); logger.info(String.format("find one command: id: %d, type: %s", command.getId(),command.getCommandType().toString()));
processInstance = constructProcessInstance(command, host); try{
processInstance = constructProcessInstance(command, host);
//cannot construct process instance, return null; //cannot construct process instance, return null;
if(processInstance == null){ if(processInstance == null){
logger.error("scan command, command parameter is error: %s", command.toString()); logger.error("scan command, command parameter is error: %s", command.toString());
}else{ delCommandByid(command.getId());
// check thread number enough for this command, if not, change state to waiting thread. saveErrorCommand(command, "process instance is null");
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
if(vaildThreadNum < commandThreadCount){
logger.info("there is not enough thread for this command: {}",command.toString() );
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null; return null;
}else if(!checkThreadNum(command, validThreadNum)){
logger.info("there is not enough thread for this command: {}",command.toString() );
return setWaitingThreadProcess(command, processInstance);
}else{ }else{
processInstance.setCommandType(command.getCommandType()); processInstance.setCommandType(command.getCommandType());
processInstance.addHistoryCmd(command.getCommandType()); processInstance.addHistoryCmd(command.getCommandType());
saveProcessInstance(processInstance); saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance); this.setSubProcessParam(processInstance);
delCommandByid(command.getId());
return processInstance;
} }
}catch (Exception e){
logger.error("scan command error ", e);
saveErrorCommand(command, e.toString());
delCommandByid(command.getId());
} }
// delete command return null;
delCommandByid(command.getId()); }
return processInstance;
private void saveErrorCommand(Command command, String message) {
ErrorCommand errorCommand = new ErrorCommand(command, message);
this.errorCommandMapper.insert(errorCommand);
}
/**
* set process waiting thread
* @param command
* @param processInstance
* @return
*/
private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) {
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null;
}
private boolean checkThreadNum(Command command, int validThreadNum) {
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
return validThreadNum >= commandThreadCount;
} }
/** /**
@ -245,7 +276,7 @@ public class ProcessDao extends AbstractBaseDao {
public ProcessInstance findProcessInstanceByScheduleTime(int defineId, Date scheduleTime){ public ProcessInstance findProcessInstanceByScheduleTime(int defineId, Date scheduleTime){
return processInstanceMapper.queryByScheduleTime(defineId, return processInstanceMapper.queryByScheduleTime(defineId,
DateUtils.dateToString(scheduleTime), 0,null, null); DateUtils.dateToString(scheduleTime), 0, null, null);
} }
/** /**
@ -450,6 +481,7 @@ public class ProcessDao extends AbstractBaseDao {
processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson());
// set process instance priority // set process instance priority
processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
processInstance.setWorkerGroupId(command.getWorkerGroupId());
return processInstance; return processInstance;
} }
@ -669,7 +701,7 @@ public class ProcessDao extends AbstractBaseDao {
paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(processInstance.getId())); paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(processInstance.getId()));
processInstance.setCommandParam(JSONUtils.toJson(paramMap)); processInstance.setCommandParam(JSONUtils.toJson(paramMap));
processInstance.setIsSubProcess(Flag.YES); processInstance.setIsSubProcess(Flag.YES);
this.updateProcessInstance(processInstance); this.saveProcessInstance(processInstance);
} }
// copy parent instance user def params to sub process.. // copy parent instance user def params to sub process..
String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID); String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID);
@ -677,7 +709,7 @@ public class ProcessDao extends AbstractBaseDao {
ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId));
if(parentInstance != null){ if(parentInstance != null){
processInstance.setGlobalParams(parentInstance.getGlobalParams()); processInstance.setGlobalParams(parentInstance.getGlobalParams());
this.updateProcessInstance(processInstance); this.saveProcessInstance(processInstance);
}else{ }else{
logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam);
} }
@ -1194,7 +1226,7 @@ public class ProcessDao extends AbstractBaseDao {
public int updateProcessInstance(Integer processInstanceId, String processJson, public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag, String globalParams, Date scheduleTime, Flag flag,
String locations, String connects){ String locations, String connects){
return processInstanceMapper.updateProcessInstance( processInstanceId, processJson, return processInstanceMapper.updateProcessInstance(processInstanceId, processJson,
globalParams, scheduleTime, locations, connects, flag); globalParams, scheduleTime, locations, connects, flag);
} }
@ -1538,4 +1570,25 @@ public class ProcessDao extends AbstractBaseDao {
DateUtils.dateToString(dateInterval.getEndTime()), DateUtils.dateToString(dateInterval.getEndTime()),
stateArray); stateArray);
} }
/**
* query user queue by process instance id
* @param processInstanceId
* @return
*/
public String queryQueueByProcessInstanceId(int processInstanceId){
return userMapper.queryQueueByProcessInstanceId(processInstanceId);
}
/**
* query worker group by id
* @param workerGroupId
* @return
*/
public WorkerGroup queryWorkerGroupById(int workerGroupId){
return workerGroupMapper.queryById(workerGroupId);
}
} }

15
escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java

@ -17,6 +17,7 @@
package cn.escheduler.dao; package cn.escheduler.dao;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.dao.model.TaskRecord; import cn.escheduler.dao.model.TaskRecord;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
@ -39,6 +40,8 @@ public class TaskRecordDao {
private static Logger logger = LoggerFactory.getLogger(TaskRecordDao.class.getName()); private static Logger logger = LoggerFactory.getLogger(TaskRecordDao.class.getName());
/** /**
* 加载配置文件 * 加载配置文件
*/ */
@ -133,7 +136,7 @@ public class TaskRecordDao {
* @param filterMap * @param filterMap
* @return * @return
*/ */
public static int countTaskRecord(Map<String, String> filterMap){ public static int countTaskRecord(Map<String, String> filterMap, String table){
int count = 0; int count = 0;
Connection conn = null; Connection conn = null;
@ -142,7 +145,7 @@ public class TaskRecordDao {
if(conn == null){ if(conn == null){
return count; return count;
} }
String sql = "select count(1) as count from eamp_hive_log_hd"; String sql = String.format("select count(1) as count from %s", table);
sql += getWhereString(filterMap); sql += getWhereString(filterMap);
PreparedStatement pstmt; PreparedStatement pstmt;
pstmt = conn.prepareStatement(sql); pstmt = conn.prepareStatement(sql);
@ -170,9 +173,9 @@ public class TaskRecordDao {
* @param filterMap * @param filterMap
* @return * @return
*/ */
public static List<TaskRecord> queryAllTaskRecord(Map<String,String> filterMap ) { public static List<TaskRecord> queryAllTaskRecord(Map<String,String> filterMap , String table) {
String sql = "select * from eamp_hive_log_hd "; String sql = String.format("select * from %s", table);
sql += getWhereString(filterMap); sql += getWhereString(filterMap);
int offset = Integer.parseInt(filterMap.get("offset")); int offset = Integer.parseInt(filterMap.get("offset"));
@ -201,8 +204,8 @@ public class TaskRecordDao {
taskRecord.setProcId(resultSet.getInt("PROC_ID")); taskRecord.setProcId(resultSet.getInt("PROC_ID"));
taskRecord.setProcName(resultSet.getString("PROC_NAME")); taskRecord.setProcName(resultSet.getString("PROC_NAME"));
taskRecord.setProcDate(resultSet.getString("PROC_DATE")); taskRecord.setProcDate(resultSet.getString("PROC_DATE"));
taskRecord.setStartDate(resultSet.getDate("STARTDATE")); taskRecord.setStartTime(DateUtils.stringToDate(resultSet.getString("STARTDATE")));
taskRecord.setEndDate(resultSet.getDate("ENDDATE")); taskRecord.setEndTime(DateUtils.stringToDate(resultSet.getString("ENDDATE")));
taskRecord.setResult(resultSet.getString("RESULT")); taskRecord.setResult(resultSet.getString("RESULT"));
taskRecord.setDuration(resultSet.getInt("DURATION")); taskRecord.setDuration(resultSet.getInt("DURATION"));
taskRecord.setNote(resultSet.getString("NOTE")); taskRecord.setNote(resultSet.getString("NOTE"));

90
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapper.java

@ -0,0 +1,90 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.AccessToken;
import cn.escheduler.dao.model.User;
import org.apache.ibatis.annotations.*;
import org.apache.ibatis.type.EnumOrdinalTypeHandler;
import org.apache.ibatis.type.JdbcType;
import java.sql.Timestamp;
import java.util.List;
public interface AccessTokenMapper {
/**
* insert accessToken
* @param accessToken
* @return
*/
@InsertProvider(type = AccessTokenMapperProvider.class, method = "insert")
@Options(useGeneratedKeys = true,keyProperty = "accessToken.id")
@SelectKey(statement = "SELECT LAST_INSERT_ID()", keyProperty = "accessToken.id", before = false, resultType = int.class)
int insert(@Param("accessToken") AccessToken accessToken);
/**
* delete accessToken
* @param accessTokenId
* @return
*/
@DeleteProvider(type = AccessTokenMapperProvider.class, method = "delete")
int delete(@Param("accessTokenId") int accessTokenId);
/**
* update accessToken
*
* @param accessToken
* @return
*/
@UpdateProvider(type = AccessTokenMapperProvider.class, method = "update")
int update(@Param("accessToken") AccessToken accessToken);
/**
* query access token list paging
* @param searchVal
* @param offset
* @param pageSize
* @return
*/
@Results(value = {@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "userId", column = "user_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "token", column = "token", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "expireTime", column = "expire_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE)
})
@SelectProvider(type = AccessTokenMapperProvider.class, method = "queryAccessTokenPaging")
List<AccessToken> queryAccessTokenPaging(@Param("userId") Integer userId,
@Param("searchVal") String searchVal,
@Param("offset") Integer offset,
@Param("pageSize") Integer pageSize);
/**
* count access token by search value
* @param searchVal
* @return
*/
@SelectProvider(type = AccessTokenMapperProvider.class, method = "countAccessTokenPaging")
Integer countAccessTokenPaging(@Param("userId") Integer userId
,@Param("searchVal") String searchVal);
}

136
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapperProvider.java

@ -0,0 +1,136 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.jdbc.SQL;
import java.util.Map;
/**
* access token mapper provider
*
*/
public class AccessTokenMapperProvider {
private static final String TABLE_NAME = "t_escheduler_access_token";
/**
* insert accessToken
*
* @param parameter
* @return
*/
public String insert(Map<String, Object> parameter) {
return new SQL() {
{
INSERT_INTO(TABLE_NAME);
VALUES("`user_id`", "#{accessToken.userId}");
VALUES("`token`", "#{accessToken.token}");
VALUES("`expire_time`", "#{accessToken.expireTime}");;
VALUES("`create_time`", "#{accessToken.createTime}");
VALUES("`update_time`", "#{accessToken.updateTime}");
}
}.toString();
}
/**
* delete accessToken
*
* @param parameter
* @return
*/
public String delete(Map<String, Object> parameter) {
return new SQL() {
{
DELETE_FROM(TABLE_NAME);
WHERE("`id`=#{accessTokenId}");
}
}.toString();
}
/**
* update accessToken
*
* @param parameter
* @return
*/
public String update(Map<String, Object> parameter) {
return new SQL() {
{
UPDATE(TABLE_NAME);
SET("`user_id`=#{accessToken.userId}");
SET("`token`=#{accessToken.token}");
SET("`expire_time`=#{accessToken.expireTime}");
SET("`update_time`=#{accessToken.updateTime}");
WHERE("`id`=#{accessToken.id}");
}
}.toString();
}
/**
* count user number by search value
* @param parameter
* @return
*/
public String countAccessTokenPaging(Map<String, Object> parameter) {
return new SQL() {{
SELECT("count(0)");
FROM(TABLE_NAME + " t,t_escheduler_user u");
Object searchVal = parameter.get("searchVal");
WHERE("u.id = t.user_id");
if(parameter.get("userId") != null && (int)parameter.get("userId") != 0){
WHERE(" u.id = #{userId}");
}
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE(" u.user_name like concat('%', #{searchVal}, '%')");
}
}}.toString();
}
/**
* query user list paging
* @param parameter
* @return
*/
public String queryAccessTokenPaging(Map<String, Object> parameter) {
return new SQL() {
{
SELECT("t.*,u.user_name");
FROM(TABLE_NAME + " t,t_escheduler_user u");
Object searchVal = parameter.get("searchVal");
WHERE("u.id = t.user_id");
if(parameter.get("userId") != null && (int)parameter.get("userId") != 0){
WHERE(" u.id = #{userId}");
}
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE(" u.user_name like concat('%', #{searchVal}, '%') ");
}
ORDER_BY(" t.update_time desc limit #{offset},#{pageSize} ");
}
}.toString();
}
}

18
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapper.java

@ -18,12 +18,15 @@ package cn.escheduler.dao.mapper;
import cn.escheduler.common.enums.*; import cn.escheduler.common.enums.*;
import cn.escheduler.dao.model.Command; import cn.escheduler.dao.model.Command;
import cn.escheduler.dao.model.ExecuteStatusCount;
import org.apache.ibatis.annotations.*; import org.apache.ibatis.annotations.*;
import org.apache.ibatis.type.EnumOrdinalTypeHandler; import org.apache.ibatis.type.EnumOrdinalTypeHandler;
import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.JdbcType;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* command mapper * command mapper
@ -76,6 +79,7 @@ public interface CommandMapper {
@Result(property = "scheduleTime", column = "schedule_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "scheduleTime", column = "schedule_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "startTime", column = "start_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "startTime", column = "start_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = CommandMapperProvider.class, method = "queryOneCommand") @SelectProvider(type = CommandMapperProvider.class, method = "queryOneCommand")
@ -98,9 +102,23 @@ public interface CommandMapper {
@Result(property = "scheduleTime", column = "schedule_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "scheduleTime", column = "schedule_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "startTime", column = "start_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "startTime", column = "start_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = CommandMapperProvider.class, method = "queryAllCommand") @SelectProvider(type = CommandMapperProvider.class, method = "queryAllCommand")
List<Command> queryAllCommand(); List<Command> queryAllCommand();
@Results(value = {
@Result(property = "state", column = "state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ExecutionStatus.class, jdbcType = JdbcType.TINYINT),
@Result(property = "count", column = "count", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
})
@SelectProvider(type = CommandMapperProvider.class, method = "countCommandState")
List<ExecuteStatusCount> countCommandState(
@Param("userId") int userId,
@Param("userType") UserType userType,
@Param("startTime") Date startTime,
@Param("endTime") Date endTime,
@Param("projectId") int projectId);
} }

31
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapperProvider.java

@ -51,6 +51,7 @@ public class CommandMapperProvider {
VALUES("`warning_group_id`", "#{command.warningGroupId}"); VALUES("`warning_group_id`", "#{command.warningGroupId}");
VALUES("`schedule_time`", "#{command.scheduleTime}"); VALUES("`schedule_time`", "#{command.scheduleTime}");
VALUES("`update_time`", "#{command.updateTime}"); VALUES("`update_time`", "#{command.updateTime}");
VALUES("`worker_group_id`", "#{command.workerGroupId}");
VALUES("`start_time`", "#{command.startTime}"); VALUES("`start_time`", "#{command.startTime}");
} }
@ -95,6 +96,7 @@ public class CommandMapperProvider {
SET("`warning_group_id`=#{command.warningGroupId}"); SET("`warning_group_id`=#{command.warningGroupId}");
SET("`schedule_time`=#{command.scheduleTime}"); SET("`schedule_time`=#{command.scheduleTime}");
SET("`update_time`=#{command.updateTime}"); SET("`update_time`=#{command.updateTime}");
SET("`worker_group_id`=#{command.workerGroupId}");
SET("`start_time`=#{command.startTime}"); SET("`start_time`=#{command.startTime}");
WHERE("`id`=#{command.id}"); WHERE("`id`=#{command.id}");
@ -139,6 +141,31 @@ public class CommandMapperProvider {
}.toString(); }.toString();
} }
/**
*
* count command type
* @param parameter
* @return
*/
public String countCommandState(Map<String, Object> parameter){
return new SQL(){
{
SELECT ("command_type as state,COUNT(*) AS count");
FROM(TABLE_NAME + " cmd,t_escheduler_process_definition process");
WHERE("cmd.process_definition_id = process.id");
if(parameter.get("projectId") != null && (int)parameter.get("projectId") != 0){
WHERE( "process.project_id = #{projectId} ");
}else{
if(parameter.get("userType") != null && String.valueOf(parameter.get("userType")) == "GENERAL_USER") {
AND();
WHERE("process.project_id in (select id as project_id from t_escheduler_project tp where tp.user_id= #{userId} " +
"union select project_id from t_escheduler_relation_project_user tr where tr.user_id= #{userId} )");
}
}
WHERE("cmd.start_time >= #{startTime} and cmd.update_time <= #{endTime}");
GROUP_BY("cmd.command_type");
}
}.toString();
}
} }

59
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapper.java

@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import cn.escheduler.common.enums.*;
import cn.escheduler.dao.model.Command;
import cn.escheduler.dao.model.ErrorCommand;
import cn.escheduler.dao.model.ExecuteStatusCount;
import org.apache.ibatis.annotations.*;
import org.apache.ibatis.type.EnumOrdinalTypeHandler;
import org.apache.ibatis.type.JdbcType;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
/**
* command mapper
*/
public interface ErrorCommandMapper {
/**
* inert error command
* @param errorCommand
* @return
*/
@InsertProvider(type = ErrorCommandMapperProvider.class, method = "insert")
@Options(useGeneratedKeys = true,keyProperty = "errorCommand.id")
@SelectKey(statement = "SELECT LAST_INSERT_ID()", keyProperty = "errorCommand.id", before = false, resultType = int.class)
int insert(@Param("errorCommand") ErrorCommand errorCommand);
@Results(value = {
@Result(property = "state", column = "state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ExecutionStatus.class, jdbcType = JdbcType.TINYINT),
@Result(property = "count", column = "count", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
})
@SelectProvider(type = ErrorCommandMapperProvider.class, method = "countCommandState")
List<ExecuteStatusCount> countCommandState(
@Param("userId") int userId,
@Param("userType") UserType userType,
@Param("startTime") Date startTime,
@Param("endTime") Date endTime,
@Param("projectId") int projectId);
}

71
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapperProvider.java

@ -0,0 +1,71 @@
package cn.escheduler.dao.mapper;
import cn.escheduler.common.enums.*;
import cn.escheduler.common.utils.EnumFieldUtil;
import org.apache.ibatis.jdbc.SQL;
import java.util.Map;
public class ErrorCommandMapperProvider {
private static final String TABLE_NAME = "t_escheduler_error_command";
/**
* inert command
*
* @param parameter
* @return
*/
public String insert(Map<String, Object> parameter) {
return new SQL() {
{
INSERT_INTO(TABLE_NAME);
VALUES("`id`", "#{errorCommand.id}");
VALUES("`command_type`", EnumFieldUtil.genFieldStr("errorCommand.commandType", CommandType.class));
VALUES("`process_definition_id`", "#{errorCommand.processDefinitionId}");
VALUES("`executor_id`", "#{errorCommand.executorId}");
VALUES("`command_param`", "#{errorCommand.commandParam}");
VALUES("`task_depend_type`", EnumFieldUtil.genFieldStr("errorCommand.taskDependType", TaskDependType.class));
VALUES("`failure_strategy`", EnumFieldUtil.genFieldStr("errorCommand.failureStrategy", FailureStrategy.class));
VALUES("`warning_type`", EnumFieldUtil.genFieldStr("errorCommand.warningType", WarningType.class));
VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("errorCommand.processInstancePriority", Priority.class));
VALUES("`warning_group_id`", "#{errorCommand.warningGroupId}");
VALUES("`schedule_time`", "#{errorCommand.scheduleTime}");
VALUES("`update_time`", "#{errorCommand.updateTime}");
VALUES("`start_time`", "#{errorCommand.startTime}");
VALUES("`worker_group_id`", "#{errorCommand.workerGroupId}");
VALUES("`message`", "#{errorCommand.message}");
}
}.toString();
}
/**
*
* count command type
* @param parameter
* @return
*/
public String countCommandState(Map<String, Object> parameter){
return new SQL(){
{
SELECT("command_type as state,COUNT(*) AS count");
FROM(TABLE_NAME + " cmd,t_escheduler_process_definition process");
WHERE("cmd.process_definition_id = process.id");
if(parameter.get("projectId") != null && (int)parameter.get("projectId") != 0){
WHERE( "process.project_id = #{projectId} ");
}else{
if(parameter.get("userType") != null && String.valueOf(parameter.get("userType")) == "GENERAL_USER") {
AND();
WHERE("process.project_id in (select id as project_id from t_escheduler_project tp where tp.user_id= #{userId} " +
"union select project_id from t_escheduler_relation_project_user tr where tr.user_id= #{userId} )");
}
}
WHERE("cmd.start_time >= #{startTime} and cmd.update_time <= #{endTime}");
GROUP_BY("cmd.command_type");
}
}.toString();
}
}

11
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java

@ -94,6 +94,7 @@ public interface ProcessInstanceMapper {
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "tenantCode", column = "tenant_code", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "tenantCode", column = "tenant_code", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryDetailById") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryDetailById")
@ -131,6 +132,7 @@ public interface ProcessInstanceMapper {
@Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryById") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryById")
@ -168,6 +170,7 @@ public interface ProcessInstanceMapper {
@Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -205,6 +208,7 @@ public interface ProcessInstanceMapper {
@Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -251,6 +255,7 @@ public interface ProcessInstanceMapper {
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -346,6 +351,7 @@ public interface ProcessInstanceMapper {
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -437,6 +443,7 @@ public interface ProcessInstanceMapper {
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -480,6 +487,7 @@ public interface ProcessInstanceMapper {
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -523,6 +531,7 @@ public interface ProcessInstanceMapper {
@Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -564,6 +573,7 @@ public interface ProcessInstanceMapper {
@Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastRunningProcess") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastRunningProcess")
@ -605,6 +615,7 @@ public interface ProcessInstanceMapper {
@Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "historyCmd", column = "history_cmd", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastManualProcess") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastManualProcess")

2
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java

@ -67,6 +67,7 @@ public class ProcessInstanceMapperProvider {
VALUES("`dependence_schedule_times`", "#{processInstance.dependenceScheduleTimes}"); VALUES("`dependence_schedule_times`", "#{processInstance.dependenceScheduleTimes}");
VALUES("`is_sub_process`", EnumFieldUtil.genFieldStr("processInstance.isSubProcess", Flag.class)); VALUES("`is_sub_process`", EnumFieldUtil.genFieldStr("processInstance.isSubProcess", Flag.class));
VALUES("`executor_id`", "#{processInstance.executorId}"); VALUES("`executor_id`", "#{processInstance.executorId}");
VALUES("`worker_group_id`", "#{processInstance.workerGroupId}");
VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("processInstance.processInstancePriority", Priority.class)); VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("processInstance.processInstancePriority", Priority.class));
} }
}.toString(); }.toString();
@ -139,6 +140,7 @@ public class ProcessInstanceMapperProvider {
SET("`dependence_schedule_times`=#{processInstance.dependenceScheduleTimes}"); SET("`dependence_schedule_times`=#{processInstance.dependenceScheduleTimes}");
SET("`is_sub_process`="+EnumFieldUtil.genFieldStr("processInstance.isSubProcess", Flag.class)); SET("`is_sub_process`="+EnumFieldUtil.genFieldStr("processInstance.isSubProcess", Flag.class));
SET("`executor_id`=#{processInstance.executorId}"); SET("`executor_id`=#{processInstance.executorId}");
SET("`worker_group_id`=#{processInstance.workerGroupId}");
WHERE("`id`=#{processInstance.id}"); WHERE("`id`=#{processInstance.id}");

35
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapper.java

@ -20,6 +20,7 @@ import cn.escheduler.dao.model.Queue;
import org.apache.ibatis.annotations.*; import org.apache.ibatis.annotations.*;
import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.JdbcType;
import java.sql.Timestamp;
import java.util.List; import java.util.List;
/** /**
@ -64,7 +65,9 @@ public interface QueueMapper {
*/ */
@Results(value = {@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Results(value = {@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "queueName", column = "queue_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "queueName", column = "queue_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE)
}) })
@SelectProvider(type = QueueMapperProvider.class, method = "queryById") @SelectProvider(type = QueueMapperProvider.class, method = "queryById")
Queue queryById(@Param("queueId") int queueId); Queue queryById(@Param("queueId") int queueId);
@ -76,13 +79,41 @@ public interface QueueMapper {
*/ */
@Results(value = {@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Results(value = {@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "queueName", column = "queue_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "queueName", column = "queue_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE)
}) })
@SelectProvider(type = QueueMapperProvider.class, method = "queryAllQueue") @SelectProvider(type = QueueMapperProvider.class, method = "queryAllQueue")
List<Queue> queryAllQueue(); List<Queue> queryAllQueue();
/**
* query all queue list
* @return
*/
@Results(value = {@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "queueName", column = "queue_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE)
})
@SelectProvider(type = QueueMapperProvider.class, method = "queryQueuePaging")
List<Queue> queryQueuePaging(@Param("searchVal") String searchVal,
@Param("offset") Integer offset,
@Param("pageSize") Integer pageSize);
/**
* count queue by search value
* @param searchVal
* @return
*/
@SelectProvider(type = QueueMapperProvider.class, method = "countQueuePaging")
Integer countQueuePaging(@Param("searchVal") String searchVal);
@SelectProvider(type = QueueMapperProvider.class, method = "queryByQueue")
Queue queryByQueue(@Param("queue") String queue);
@SelectProvider(type = QueueMapperProvider.class, method = "queryByQueueName")
Queue queryByQueueName(@Param("queueName") String queueName);
} }

80
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapperProvider.java

@ -16,6 +16,7 @@
*/ */
package cn.escheduler.dao.mapper; package cn.escheduler.dao.mapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.jdbc.SQL; import org.apache.ibatis.jdbc.SQL;
import java.util.Map; import java.util.Map;
@ -39,7 +40,8 @@ public class QueueMapperProvider {
INSERT_INTO(TABLE_NAME); INSERT_INTO(TABLE_NAME);
VALUES("`queue_name`", "#{queue.queueName}"); VALUES("`queue_name`", "#{queue.queueName}");
VALUES("`queue`", "#{queue.queue}"); VALUES("`queue`", "#{queue.queue}");
VALUES("`create_time`", "#{queue.createTime}");
VALUES("`update_time`", "#{queue.updateTime}");
} }
}.toString(); }.toString();
} }
@ -73,6 +75,7 @@ public class QueueMapperProvider {
SET("`queue_name`=#{queue.queueName}"); SET("`queue_name`=#{queue.queueName}");
SET("`queue`=#{queue.queue}"); SET("`queue`=#{queue.queue}");
SET("`update_time`=#{queue.updateTime}");
WHERE("`id`=#{queue.id}"); WHERE("`id`=#{queue.id}");
} }
@ -80,10 +83,6 @@ public class QueueMapperProvider {
} }
/** /**
* query queue by id * query queue by id
* *
@ -117,5 +116,76 @@ public class QueueMapperProvider {
}.toString(); }.toString();
} }
/**
* count queue by search value
* @param parameter
* @return
*/
public String countQueuePaging(Map<String, Object> parameter) {
return new SQL() {{
SELECT("count(0)");
FROM(TABLE_NAME);
Object searchVal = parameter.get("searchVal");
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE( " queue_name like concat('%', #{searchVal}, '%') ");
}
}}.toString();
}
/**
* query tenant list paging
* @param parameter
* @return
*/
public String queryQueuePaging(Map<String, Object> parameter) {
return new SQL() {
{
SELECT("*");
FROM(TABLE_NAME);
Object searchVal = parameter.get("searchVal");
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE( " queue_name like concat('%', #{searchVal}, '%') ");
}
ORDER_BY(" update_time desc limit #{offset},#{pageSize} ");
}
}.toString();
}
/**
* query by queue
*
* @param parameter
* @return
*/
public String queryByQueue(Map<String, Object> parameter) {
return new SQL() {
{
SELECT("*");
FROM(TABLE_NAME);
WHERE("`queue` = #{queue}");
}
}.toString();
}
/**
* query by queue name
*
* @param parameter
* @return
*/
public String queryByQueueName(Map<String, Object> parameter) {
return new SQL() {
{
SELECT("*");
FROM(TABLE_NAME);
WHERE("`queue_name` = #{queueName}");
}
}.toString();
}
} }

4
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java

@ -75,6 +75,7 @@ public interface ScheduleMapper {
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.INTEGER), @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.INTEGER),
@Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT), @Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT),
@Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER), @Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ScheduleMapperProvider.class, method = "queryByProcessDefineIdPaging") @SelectProvider(type = ScheduleMapperProvider.class, method = "queryByProcessDefineIdPaging")
@ -117,6 +118,7 @@ public interface ScheduleMapper {
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.INTEGER), @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.INTEGER),
@Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT), @Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT),
@Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER), @Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ScheduleMapperProvider.class, method = "querySchedulerListByProjectName") @SelectProvider(type = ScheduleMapperProvider.class, method = "querySchedulerListByProjectName")
@ -141,6 +143,7 @@ public interface ScheduleMapper {
@Result(property = "userId", column = "user_id", javaType = int.class, jdbcType = JdbcType.INTEGER), @Result(property = "userId", column = "user_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT), @Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT),
@Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER), @Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ScheduleMapperProvider.class, method = "queryById") @SelectProvider(type = ScheduleMapperProvider.class, method = "queryById")
@ -164,6 +167,7 @@ public interface ScheduleMapper {
@Result(property = "userId", column = "user_id", javaType = int.class, jdbcType = JdbcType.INTEGER), @Result(property = "userId", column = "user_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT), @Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT),
@Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER), @Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ScheduleMapperProvider.class, method = "selectAllByProcessDefineArray") @SelectProvider(type = ScheduleMapperProvider.class, method = "selectAllByProcessDefineArray")

2
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapperProvider.java

@ -48,6 +48,7 @@ public class ScheduleMapperProvider {
VALUES("`user_id`", "#{schedule.userId}"); VALUES("`user_id`", "#{schedule.userId}");
VALUES("`release_state`", EnumFieldUtil.genFieldStr("schedule.releaseState", ReleaseState.class)); VALUES("`release_state`", EnumFieldUtil.genFieldStr("schedule.releaseState", ReleaseState.class));
VALUES("`warning_group_id`", "#{schedule.warningGroupId}"); VALUES("`warning_group_id`", "#{schedule.warningGroupId}");
VALUES("`worker_group_id`", "#{schedule.workerGroupId}");
VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("schedule.processInstancePriority", Priority.class)); VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("schedule.processInstancePriority", Priority.class));
}}.toString(); }}.toString();
} }
@ -67,6 +68,7 @@ public class ScheduleMapperProvider {
SET("`user_id`=#{schedule.userId}"); SET("`user_id`=#{schedule.userId}");
SET("`release_state`=" + EnumFieldUtil.genFieldStr("schedule.releaseState", ReleaseState.class)); SET("`release_state`=" + EnumFieldUtil.genFieldStr("schedule.releaseState", ReleaseState.class));
SET("`warning_group_id`=#{schedule.warningGroupId}"); SET("`warning_group_id`=#{schedule.warningGroupId}");
SET("`worker_group_id`=#{schedule.workerGroupId}");
SET("`process_instance_priority`="+ EnumFieldUtil.genFieldStr("schedule.processInstancePriority", Priority.class)); SET("`process_instance_priority`="+ EnumFieldUtil.genFieldStr("schedule.processInstancePriority", Priority.class));
WHERE("`id` = #{schedule.id}"); WHERE("`id` = #{schedule.id}");

19
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapper.java

@ -88,6 +88,7 @@ public interface TaskInstanceMapper {
@Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT), @Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryById") @SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryById")
@ -131,6 +132,7 @@ public interface TaskInstanceMapper {
@Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT), @Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = TaskInstanceMapperProvider.class, method = "findValidTaskListByProcessId") @SelectProvider(type = TaskInstanceMapperProvider.class, method = "findValidTaskListByProcessId")
@ -164,6 +166,7 @@ public interface TaskInstanceMapper {
@Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT), @Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryByHostAndStatus") @SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryByHostAndStatus")
@ -255,6 +258,7 @@ public interface TaskInstanceMapper {
@Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT), @Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryTaskInstanceListPaging") @SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryTaskInstanceListPaging")
@ -299,9 +303,24 @@ public interface TaskInstanceMapper {
@Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "appLink", column = "app_link", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT),
@Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT), @Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "taskInstancePriority", column = "task_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryByInstanceIdAndName") @SelectProvider(type = TaskInstanceMapperProvider.class, method = "queryByInstanceIdAndName")
TaskInstance queryByInstanceIdAndName(@Param("processInstanceId") int processInstanceId, TaskInstance queryByInstanceIdAndName(@Param("processInstanceId") int processInstanceId,
@Param("name") String name); @Param("name") String name);
/**
* count task
* @param userId
* @param userType
* @param projectId
* @return
*/
@SelectProvider(type = TaskInstanceMapperProvider.class, method = "countTask")
Integer countTask(@Param("userId") int userId,
@Param("userType") UserType userType,
@Param("projectId") int projectId,
@Param("taskIds") int[] taskIds);
} }

42
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapperProvider.java

@ -62,6 +62,7 @@ public class TaskInstanceMapperProvider {
VALUES("`max_retry_times`", "#{taskInstance.maxRetryTimes}"); VALUES("`max_retry_times`", "#{taskInstance.maxRetryTimes}");
VALUES("`retry_interval`", "#{taskInstance.retryInterval}"); VALUES("`retry_interval`", "#{taskInstance.retryInterval}");
VALUES("`app_link`", "#{taskInstance.appLink}"); VALUES("`app_link`", "#{taskInstance.appLink}");
VALUES("`worker_group_id`", "#{taskInstance.workerGroupId}");
VALUES("`flag`", EnumFieldUtil.genFieldStr("taskInstance.flag", Flag.class)); VALUES("`flag`", EnumFieldUtil.genFieldStr("taskInstance.flag", Flag.class));
VALUES("`task_instance_priority`", EnumFieldUtil.genFieldStr("taskInstance.taskInstancePriority", Priority.class)); VALUES("`task_instance_priority`", EnumFieldUtil.genFieldStr("taskInstance.taskInstancePriority", Priority.class));
@ -114,6 +115,7 @@ public class TaskInstanceMapperProvider {
SET("`max_retry_times`=#{taskInstance.maxRetryTimes}"); SET("`max_retry_times`=#{taskInstance.maxRetryTimes}");
SET("`retry_interval`=#{taskInstance.retryInterval}"); SET("`retry_interval`=#{taskInstance.retryInterval}");
SET("`app_link`=#{taskInstance.appLink}"); SET("`app_link`=#{taskInstance.appLink}");
SET("`worker_group_id`=#{taskInstance.workerGroupId}");
SET("`flag`="+ EnumFieldUtil.genFieldStr("taskInstance.flag", Flag.class)); SET("`flag`="+ EnumFieldUtil.genFieldStr("taskInstance.flag", Flag.class));
SET("`task_instance_priority`="+ EnumFieldUtil.genFieldStr("taskInstance.taskInstancePriority", Priority.class)); SET("`task_instance_priority`="+ EnumFieldUtil.genFieldStr("taskInstance.taskInstancePriority", Priority.class));
@ -185,7 +187,7 @@ public class TaskInstanceMapperProvider {
{ {
SELECT ("state, count(0) as count"); SELECT ("state, count(0) as count");
FROM(TABLE_NAME + " t"); FROM(TABLE_NAME + " t");
LEFT_OUTER_JOIN(DEFINE_TABLE_NAME+ " d on d.id=t.process_definition_id"); LEFT_OUTER_JOIN(DEFINE_TABLE_NAME + " d on d.id=t.process_definition_id");
LEFT_OUTER_JOIN("t_escheduler_project p on p.id=d.project_id"); LEFT_OUTER_JOIN("t_escheduler_project p on p.id=d.project_id");
if(parameter.get("projectId") != null && (int)parameter.get("projectId") != 0){ if(parameter.get("projectId") != null && (int)parameter.get("projectId") != 0){
WHERE( "p.id = #{projectId} "); WHERE( "p.id = #{projectId} ");
@ -404,4 +406,42 @@ public class TaskInstanceMapperProvider {
} }
/**
*
* count task
* @param parameter
* @return
*/
public String countTask(Map<String, Object> parameter){
StringBuffer taskIdsStr = new StringBuffer();
int[] stateArray = (int[]) parameter.get("taskIds");
for(int i=0;i<stateArray.length;i++){
taskIdsStr.append(stateArray[i]);
if(i<stateArray.length-1){
taskIdsStr.append(",");
}
}
return new SQL(){
{
SELECT("count(1) as count");
FROM(TABLE_NAME + " task,t_escheduler_process_definition process");
WHERE("task.process_definition_id=process.id");
if(parameter.get("projectId") != null && (int)parameter.get("projectId") != 0){
WHERE( "process.project_id = #{projectId} ");
}else{
if(parameter.get("userType") != null && String.valueOf(parameter.get("userType")) == "GENERAL_USER") {
AND();
WHERE("process.project_id in (select id as project_id from t_escheduler_project tp where tp.user_id= #{userId} " +
"union select project_id from t_escheduler_relation_project_user tr where tr.user_id= #{userId} )");
}
}
WHERE("task.id in (" + taskIdsStr.toString() + ")");
}
}.toString();
}
} }

29
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java

@ -222,4 +222,33 @@ public interface UserMapper {
}) })
@SelectProvider(type = UserMapperProvider.class, method = "queryTenantCodeByUserId") @SelectProvider(type = UserMapperProvider.class, method = "queryTenantCodeByUserId")
User queryTenantCodeByUserId(@Param("userId") int userId); User queryTenantCodeByUserId(@Param("userId") int userId);
/**
* query user queue by process instance id
* @param processInstanceId
* @return
*/
@SelectProvider(type = UserMapperProvider.class, method = "queryQueueByProcessInstanceId")
String queryQueueByProcessInstanceId(@Param("processInstanceId") int processInstanceId);
/**
* query user by token
* @param token
* @return
*/
@Results(value = {
@Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "userPassword", column = "user_password", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "email", column = "email", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "phone", column = "phone", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "userType", column = "user_type", typeHandler = EnumOrdinalTypeHandler.class, javaType = UserType.class, jdbcType = JdbcType.TINYINT),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE),
@Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE)
})
@SelectProvider(type = UserMapperProvider.class, method = "queryUserByToken")
User queryUserByToken(@Param("token") String token);
} }

34
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java

@ -47,6 +47,7 @@ public class UserMapperProvider {
VALUES("`phone`", "#{user.phone}"); VALUES("`phone`", "#{user.phone}");
VALUES("`user_type`", EnumFieldUtil.genFieldStr("user.userType", UserType.class)); VALUES("`user_type`", EnumFieldUtil.genFieldStr("user.userType", UserType.class));
VALUES("`tenant_id`", "#{user.tenantId}"); VALUES("`tenant_id`", "#{user.tenantId}");
VALUES("`queue`", "#{user.queue}");
VALUES("`create_time`", "#{user.createTime}"); VALUES("`create_time`", "#{user.createTime}");
VALUES("`update_time`", "#{user.updateTime}"); VALUES("`update_time`", "#{user.updateTime}");
} }
@ -86,6 +87,7 @@ public class UserMapperProvider {
SET("`phone`=#{user.phone}"); SET("`phone`=#{user.phone}");
SET("`user_type`="+EnumFieldUtil.genFieldStr("user.userType", UserType.class)); SET("`user_type`="+EnumFieldUtil.genFieldStr("user.userType", UserType.class));
SET("`tenant_id`=#{user.tenantId}"); SET("`tenant_id`=#{user.tenantId}");
SET("`queue`=#{user.queue}");
SET("`create_time`=#{user.createTime}"); SET("`create_time`=#{user.createTime}");
SET("`update_time`=#{user.updateTime}"); SET("`update_time`=#{user.updateTime}");
@ -247,4 +249,36 @@ public class UserMapperProvider {
}.toString(); }.toString();
} }
/**
* query tenant code by user id
* @param parameter
* @return
*/
public String queryQueueByProcessInstanceId(Map<String, Object> parameter) {
return new SQL() {
{
SELECT("queue");
FROM(TABLE_NAME + " u,t_escheduler_process_instance p");
WHERE("u.id = p.executor_id and p.id=#{processInstanceId}");
}
}.toString();
}
/**
* query user by id
* @param parameter
* @return
*/
public String queryUserByToken(Map<String, Object> parameter) {
return new SQL() {
{
SELECT("u.*");
FROM(TABLE_NAME + " u ,t_escheduler_access_token t");
WHERE(" u.id = t.user_id and token=#{token} and t.expire_time > NOW()");
}
}.toString();
}
} }

131
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapper.java

@ -0,0 +1,131 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import cn.escheduler.dao.model.WorkerGroup;
import org.apache.ibatis.annotations.*;
import org.apache.ibatis.type.JdbcType;
import java.util.Date;
import java.util.List;
/**
* worker group mapper
*/
public interface WorkerGroupMapper {
/**
* query all worker group list
*
* @return
*/
@Results(value = {
@Result(property = "id", column = "id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "ipList", column = "ip_list", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "name", column = "name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
@Result(property = "updateTime", column = "update_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
})
@SelectProvider(type = WorkerGroupMapperProvider.class, method = "queryAllWorkerGroup")
List<WorkerGroup> queryAllWorkerGroup();
/**
* query worker group by name
*
* @return
*/
@Results(value = {
@Result(property = "id", column = "id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "ipList", column = "ip_list", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "name", column = "name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
@Result(property = "updateTime", column = "update_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
})
@SelectProvider(type = WorkerGroupMapperProvider.class, method = "queryWorkerGroupByName")
List<WorkerGroup> queryWorkerGroupByName(@Param("name") String name);
/**
* query worker group paging by search value
*
* @return
*/
@Results(value = {
@Result(property = "id", column = "id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "ipList", column = "ip_list", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "name", column = "name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
@Result(property = "updateTime", column = "update_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
})
@SelectProvider(type = WorkerGroupMapperProvider.class, method = "queryListPaging")
List<WorkerGroup> queryListPaging(@Param("offset") int offset,
@Param("pageSize") int pageSize,
@Param("searchVal") String searchVal);
/**
* count worker group by search value
* @param searchVal
* @return
*/
@SelectProvider(type = WorkerGroupMapperProvider.class, method = "countPaging")
int countPaging(@Param("searchVal") String searchVal);
/**
* insert worker server
*
* @param workerGroup
* @return
*/
@InsertProvider(type = WorkerGroupMapperProvider.class, method = "insert")
@Options(useGeneratedKeys = true,keyProperty = "workerGroup.id")
@SelectKey(statement = "SELECT LAST_INSERT_ID()", keyProperty = "workerGroup.id", before = false, resultType = int.class)
int insert(@Param("workerGroup") WorkerGroup workerGroup);
/**
* update worker
*
* @param workerGroup
* @return
*/
@UpdateProvider(type = WorkerGroupMapperProvider.class, method = "update")
int update(@Param("workerGroup") WorkerGroup workerGroup);
/**
* delete work group by id
* @param id
* @return
*/
@DeleteProvider(type = WorkerGroupMapperProvider.class, method = "deleteById")
int deleteById(@Param("id") int id);
/**
* query work group by id
* @param id
* @return
*/
@Results(value = {
@Result(property = "id", column = "id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "ipList", column = "ip_list", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "name", column = "name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
@Result(property = "updateTime", column = "update_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
})
@SelectProvider(type = WorkerGroupMapperProvider.class, method = "queryById")
WorkerGroup queryById(@Param("id") int id);
}

160
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapperProvider.java

@ -0,0 +1,160 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.jdbc.SQL;
import java.util.Map;
/**
* worker group mapper provider
*/
public class WorkerGroupMapperProvider {
private static final String TABLE_NAME = "t_escheduler_worker_group";
/**
* query worker list
* @return
*/
public String queryAllWorkerGroup() {
return new SQL() {{
SELECT("*");
FROM(TABLE_NAME);
ORDER_BY("update_time desc");
}}.toString();
}
/**
* insert worker server
* @param parameter
* @return
*/
public String insert(Map<String, Object> parameter) {
return new SQL() {{
INSERT_INTO(TABLE_NAME);
VALUES("id", "#{workerGroup.id}");
VALUES("name", "#{workerGroup.name}");
VALUES("ip_list", "#{workerGroup.ipList}");
VALUES("create_time", "#{workerGroup.createTime}");
VALUES("update_time", "#{workerGroup.updateTime}");
}}.toString();
}
/**
* update worker group
*
* @param parameter
* @return
*/
public String update(Map<String, Object> parameter) {
return new SQL() {{
UPDATE(TABLE_NAME);
SET("name = #{workerGroup.name}");
SET("ip_list = #{workerGroup.ipList}");
SET("create_time = #{workerGroup.createTime}");
SET("update_time = #{workerGroup.updateTime}");
WHERE("id = #{workerGroup.id}");
}}.toString();
}
/**
* delete worker group by id
* @param parameter
* @return
*/
public String deleteById(Map<String, Object> parameter) {
return new SQL() {{
DELETE_FROM(TABLE_NAME);
WHERE("id = #{id}");
}}.toString();
}
/**
* query worker group by name
* @param parameter
* @return
*/
public String queryWorkerGroupByName(Map<String, Object> parameter) {
return new SQL() {{
SELECT("*");
FROM(TABLE_NAME);
WHERE("name = #{name}");
}}.toString();
}
/**
* query worker group by id
* @param parameter
* @return
*/
public String queryById(Map<String, Object> parameter) {
return new SQL() {{
SELECT("*");
FROM(TABLE_NAME);
WHERE("id = #{id}");
}}.toString();
}
/**
* query worker group by id
* @param parameter
* @return
*/
public String queryListPaging(Map<String, Object> parameter) {
return new SQL() {{
SELECT("*");
FROM(TABLE_NAME);
Object searchVal = parameter.get("searchVal");
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE( " name like concat('%', #{searchVal}, '%') ");
}
ORDER_BY(" update_time desc limit #{offset},#{pageSize} ");
}}.toString();
}
/**
* count worker group number by search value
* @param parameter
* @return
*/
public String countPaging(Map<String, Object> parameter) {
return new SQL() {{
SELECT("count(0)");
FROM(TABLE_NAME);
Object searchVal = parameter.get("searchVal");
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE( " name like concat('%', #{searchVal}, '%') ");
}
}}.toString();
}
}

126
escheduler-dao/src/main/java/cn/escheduler/dao/model/AccessToken.java

@ -0,0 +1,126 @@
package cn.escheduler.dao.model;
import java.util.Date;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class AccessToken {
/**
* id
*/
private int id;
/**
* user id
*/
private int userId;
/**
* user name
*/
private String userName;
/**
* user token
*/
private String token;
/**
* token expire time
*/
private Date expireTime;
/**
* create time
*/
private Date createTime;
/**
* update time
*/
private Date updateTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public Date getExpireTime() {
return expireTime;
}
public void setExpireTime(Date expireTime) {
this.expireTime = expireTime;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
@Override
public String toString() {
return "AccessToken{" +
"id=" + id +
", userId=" + userId +
", userName='" + userName + '\'' +
", token='" + token + '\'' +
", expireTime=" + expireTime +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
}
}

16
escheduler-dao/src/main/java/cn/escheduler/dao/model/Command.java

@ -91,6 +91,12 @@ public class Command {
private Date updateTime; private Date updateTime;
/**
*
*/
private int workerGroupId;
public Command(){ public Command(){
this.taskDependType = TaskDependType.TASK_POST; this.taskDependType = TaskDependType.TASK_POST;
this.failureStrategy = FailureStrategy.CONTINUE; this.failureStrategy = FailureStrategy.CONTINUE;
@ -229,6 +235,15 @@ public class Command {
this.updateTime = updateTime; this.updateTime = updateTime;
} }
public int getWorkerGroupId() {
return workerGroupId;
}
public void setWorkerGroupId(int workerGroupId) {
this.workerGroupId = workerGroupId;
}
@Override @Override
public String toString() { public String toString() {
return "Command{" + return "Command{" +
@ -245,6 +260,7 @@ public class Command {
", startTime=" + startTime + ", startTime=" + startTime +
", processInstancePriority=" + processInstancePriority + ", processInstancePriority=" + processInstancePriority +
", updateTime=" + updateTime + ", updateTime=" + updateTime +
", workerGroupId=" + workerGroupId +
'}'; '}';
} }
} }

290
escheduler-dao/src/main/java/cn/escheduler/dao/model/ErrorCommand.java

@ -0,0 +1,290 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.model;
import cn.escheduler.common.enums.*;
import java.util.Date;
/**
* command
*/
public class ErrorCommand {
/**
* id
*/
private int id;
/**
* command type
*/
private CommandType commandType;
/**
* process definition id
*/
private int processDefinitionId;
/**
* executor id
*/
private int executorId;
/**
* command parameter, format json
*/
private String commandParam;
/**
* task depend type
*/
private TaskDependType taskDependType;
/**
* failure strategy
*/
private FailureStrategy failureStrategy;
/**
* warning type
*/
private WarningType warningType;
/**
* warning group id
*/
private Integer warningGroupId;
/**
* schedule time
*/
private Date scheduleTime;
/**
* start time
*/
private Date startTime;
/**
* process instance priority
*/
private Priority processInstancePriority;
/**
* update time
*/
private Date updateTime;
/**
* 执行信息
*/
private String message;
/**
* worker group id
*/
private int workerGroupId;
public ErrorCommand(Command command, String message){
this.commandType = command.getCommandType();
this.executorId = command.getExecutorId();
this.processDefinitionId = command.getProcessDefinitionId();
this.commandParam = command.getCommandParam();
this.warningType = command.getWarningType();
this.warningGroupId = command.getWarningGroupId();
this.scheduleTime = command.getScheduleTime();
this.taskDependType = command.getTaskDependType();
this.failureStrategy = command.getFailureStrategy();
this.startTime = command.getStartTime();
this.updateTime = command.getUpdateTime();
this.processInstancePriority = command.getProcessInstancePriority();
this.message = message;
}
public ErrorCommand(
CommandType commandType,
TaskDependType taskDependType,
FailureStrategy failureStrategy,
int executorId,
int processDefinitionId,
String commandParam,
WarningType warningType,
int warningGroupId,
Date scheduleTime,
Priority processInstancePriority,
String message){
this.commandType = commandType;
this.executorId = executorId;
this.processDefinitionId = processDefinitionId;
this.commandParam = commandParam;
this.warningType = warningType;
this.warningGroupId = warningGroupId;
this.scheduleTime = scheduleTime;
this.taskDependType = taskDependType;
this.failureStrategy = failureStrategy;
this.startTime = new Date();
this.updateTime = new Date();
this.processInstancePriority = processInstancePriority;
this.message = message;
}
public TaskDependType getTaskDependType() {
return taskDependType;
}
public void setTaskDependType(TaskDependType taskDependType) {
this.taskDependType = taskDependType;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public CommandType getCommandType() {
return commandType;
}
public void setCommandType(CommandType commandType) {
this.commandType = commandType;
}
public int getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(int processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
public FailureStrategy getFailureStrategy() {
return failureStrategy;
}
public void setFailureStrategy(FailureStrategy failureStrategy) {
this.failureStrategy = failureStrategy;
}
public void setCommandParam(String commandParam) {
this.commandParam = commandParam;
}
public String getCommandParam() {
return commandParam;
}
public WarningType getWarningType() {
return warningType;
}
public void setWarningType(WarningType warningType) {
this.warningType = warningType;
}
public Integer getWarningGroupId() {
return warningGroupId;
}
public void setWarningGroupId(Integer warningGroupId) {
this.warningGroupId = warningGroupId;
}
public Date getScheduleTime() {
return scheduleTime;
}
public void setScheduleTime(Date scheduleTime) {
this.scheduleTime = scheduleTime;
}
public Date getStartTime() {
return startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
public int getExecutorId() {
return executorId;
}
public void setExecutorId(int executorId) {
this.executorId = executorId;
}
public Priority getProcessInstancePriority() {
return processInstancePriority;
}
public void setProcessInstancePriority(Priority processInstancePriority) {
this.processInstancePriority = processInstancePriority;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public int getWorkerGroupId() {
return workerGroupId;
}
public void setWorkerGroupId(int workerGroupId) {
this.workerGroupId = workerGroupId;
}
@Override
public String toString() {
return "Command{" +
"id=" + id +
", commandType=" + commandType +
", processDefinitionId=" + processDefinitionId +
", executorId=" + executorId +
", commandParam='" + commandParam + '\'' +
", taskDependType=" + taskDependType +
", failureStrategy=" + failureStrategy +
", warningType=" + warningType +
", warningGroupId=" + warningGroupId +
", scheduleTime=" + scheduleTime +
", startTime=" + startTime +
", processInstancePriority=" + processInstancePriority +
", updateTime=" + updateTime +
", message=" + message +
'}';
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}

8
escheduler-dao/src/main/java/cn/escheduler/dao/model/ExecuteStatusCount.java

@ -50,4 +50,12 @@ public class ExecuteStatusCount {
public void setCount(int count) { public void setCount(int count) {
this.count = count; this.count = count;
} }
@Override
public String toString() {
return "ExecuteStatusCount{" +
"state=" + state +
", count=" + count +
'}';
}
} }

13
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessInstance.java

@ -177,6 +177,12 @@ public class ProcessInstance {
*/ */
private Priority processInstancePriority; private Priority processInstancePriority;
/**
* worker group id
*/
private int workerGroupId;
public ProcessInstance(){ public ProcessInstance(){
} }
@ -481,6 +487,13 @@ public class ProcessInstance {
this.duration = duration; this.duration = duration;
} }
public int getWorkerGroupId() {
return workerGroupId;
}
public void setWorkerGroupId(int workerGroupId) {
this.workerGroupId = workerGroupId;
}
@Override @Override
public String toString() { public String toString() {

29
escheduler-dao/src/main/java/cn/escheduler/dao/model/Queue.java

@ -16,6 +16,8 @@
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.model;
import java.util.Date;
/** /**
* queue * queue
*/ */
@ -34,6 +36,15 @@ public class Queue {
*/ */
private String queue; private String queue;
/**
* create time
*/
private Date createTime;
/**
* update time
*/
private Date updateTime;
public int getId() { public int getId() {
return id; return id;
} }
@ -58,12 +69,30 @@ public class Queue {
this.queue = queue; this.queue = queue;
} }
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
@Override @Override
public String toString() { public String toString() {
return "Queue{" + return "Queue{" +
"id=" + id + "id=" + id +
", queueName='" + queueName + '\'' + ", queueName='" + queueName + '\'' +
", queue='" + queue + '\'' + ", queue='" + queue + '\'' +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}'; '}';
} }
} }

16
escheduler-dao/src/main/java/cn/escheduler/dao/model/Schedule.java

@ -111,6 +111,11 @@ public class Schedule {
*/ */
private Priority processInstancePriority; private Priority processInstancePriority;
/**
* worker group id
*/
private int workerGroupId;
public int getWarningGroupId() { public int getWarningGroupId() {
return warningGroupId; return warningGroupId;
} }
@ -256,6 +261,15 @@ public class Schedule {
this.processInstancePriority = processInstancePriority; this.processInstancePriority = processInstancePriority;
} }
public int getWorkerGroupId() {
return workerGroupId;
}
public void setWorkerGroupId(int workerGroupId) {
this.workerGroupId = workerGroupId;
}
@Override @Override
public String toString() { public String toString() {
return "Schedule{" + return "Schedule{" +
@ -276,6 +290,8 @@ public class Schedule {
", releaseState=" + releaseState + ", releaseState=" + releaseState +
", warningGroupId=" + warningGroupId + ", warningGroupId=" + warningGroupId +
", processInstancePriority=" + processInstancePriority + ", processInstancePriority=" + processInstancePriority +
", workerGroupId=" + workerGroupId +
'}'; '}';
} }
} }

16
escheduler-dao/src/main/java/cn/escheduler/dao/model/TaskInstance.java

@ -182,6 +182,13 @@ public class TaskInstance {
private String dependentResult; private String dependentResult;
/**
* worker group id
* @return
*/
private int workerGroupId;
public ProcessInstance getProcessInstance() { public ProcessInstance getProcessInstance() {
return processInstance; return processInstance;
} }
@ -439,6 +446,14 @@ public class TaskInstance {
this.processInstancePriority = processInstancePriority; this.processInstancePriority = processInstancePriority;
} }
public int getWorkerGroupId() {
return workerGroupId;
}
public void setWorkerGroupId(int workerGroupId) {
this.workerGroupId = workerGroupId;
}
@Override @Override
public String toString() { public String toString() {
return "TaskInstance{" + return "TaskInstance{" +
@ -470,6 +485,7 @@ public class TaskInstance {
", retryInterval=" + retryInterval + ", retryInterval=" + retryInterval +
", taskInstancePriority=" + taskInstancePriority + ", taskInstancePriority=" + taskInstancePriority +
", processInstancePriority=" + processInstancePriority + ", processInstancePriority=" + processInstancePriority +
", workGroupId=" + workerGroupId +
'}'; '}';
} }

24
escheduler-dao/src/main/java/cn/escheduler/dao/model/TaskRecord.java

@ -46,12 +46,12 @@ public class TaskRecord {
/** /**
* start date * start date
*/ */
private Date startDate; private Date startTime;
/** /**
* end date * end date
*/ */
private Date endDate; private Date endTime;
/** /**
* result * result
@ -136,20 +136,20 @@ public class TaskRecord {
this.procDate = procDate; this.procDate = procDate;
} }
public Date getStartDate() { public Date getStartTime() {
return startDate; return startTime;
} }
public void setStartDate(Date startDate) { public void setStartTime(Date startTime) {
this.startDate = startDate; this.startTime = startTime;
} }
public Date getEndDate() { public Date getEndTime() {
return endDate; return endTime;
} }
public void setEndDate(Date endDate) { public void setEndTime(Date endTime) {
this.endDate = endDate; this.endTime = endTime;
} }
public String getResult() { public String getResult() {
@ -238,8 +238,8 @@ public class TaskRecord {
+" proc id:" + procId +" proc id:" + procId
+ " proc name:" + procName + " proc name:" + procName
+ " proc date: " + procDate + " proc date: " + procDate
+ " start date:" + startDate + " start date:" + startTime
+ " end date:" + endDate + " end date:" + endTime
+ " result : " + result + " result : " + result
+ " duration : " + duration + " duration : " + duration
+ " note : " + note + " note : " + note

49
escheduler-dao/src/main/java/cn/escheduler/dao/model/User.java

@ -79,6 +79,12 @@ public class User {
* alert group * alert group
*/ */
private String alertGroup; private String alertGroup;
/**
* user specified queue
*/
private String queue;
/** /**
* create time * create time
*/ */
@ -194,23 +200,12 @@ public class User {
this.tenantCode = tenantCode; this.tenantCode = tenantCode;
} }
@Override public String getQueue() {
public String toString() { return queue;
return "User{" + }
"id=" + id +
", userName='" + userName + '\'' + public void setQueue(String queue) {
", userPassword='" + userPassword + '\'' + this.queue = queue;
", email='" + email + '\'' +
", phone='" + phone + '\'' +
", userType=" + userType +
", tenantId=" + tenantId +
", tenantCode='" + tenantCode + '\'' +
", tenantName='" + tenantName + '\'' +
", queueName='" + queueName + '\'' +
", alertGroup='" + alertGroup + '\'' +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
} }
@Override @Override
@ -237,4 +232,24 @@ public class User {
result = 31 * result + userName.hashCode(); result = 31 * result + userName.hashCode();
return result; return result;
} }
@Override
public String toString() {
return "User{" +
"id=" + id +
", userName='" + userName + '\'' +
", userPassword='" + userPassword + '\'' +
", email='" + email + '\'' +
", phone='" + phone + '\'' +
", userType=" + userType +
", tenantId=" + tenantId +
", tenantCode='" + tenantCode + '\'' +
", tenantName='" + tenantName + '\'' +
", queueName='" + queueName + '\'' +
", alertGroup='" + alertGroup + '\'' +
", queue='" + queue + '\'' +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
}
} }

88
escheduler-dao/src/main/java/cn/escheduler/dao/model/WorkerGroup.java

@ -0,0 +1,88 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.model;
import java.util.Date;
/**
* worker group for task running
*/
public class WorkerGroup {
private int id;
private String name;
private String ipList;
private Date createTime;
private Date updateTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getIpList() {
return ipList;
}
public void setIpList(String ipList) {
this.ipList = ipList;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
@Override
public String toString() {
return "Worker group model{" +
"id= " + id +
",name= " + name +
",ipList= " + ipList +
",createTime= " + createTime +
",updateTime= " + updateTime +
"}";
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}

82
escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/EschedulerManager.java

@ -0,0 +1,82 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.upgrade;
import cn.escheduler.common.utils.SchemaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* upgrade manager
*/
public class EschedulerManager {
private static final Logger logger = LoggerFactory.getLogger(EschedulerManager.class);
UpgradeDao upgradeDao = UpgradeDao.getInstance();
public void initEscheduler() {
this.initEschedulerSchema();
}
public void initEschedulerSchema() {
logger.info("Start initializing the ark manager mysql table structure");
upgradeDao.initEschedulerSchema();
}
/**
* upgrade escheduler
*/
public void upgradeEscheduler() throws Exception{
// Gets a list of all upgrades
List<String> schemaList = SchemaUtils.getAllSchemaList();
if(schemaList == null || schemaList.size() == 0) {
logger.info("There is no schema to upgrade!");
}else {
String version = "";
// The target version of the upgrade
String schemaVersion = "";
for(String schemaDir : schemaList) {
// Gets the version of the current system
if (upgradeDao.isExistsTable("t_escheduler_version")) {
version = upgradeDao.getCurrentVersion();
}else {
version = "1.0.0";
}
schemaVersion = schemaDir.split("_")[0];
if(SchemaUtils.isAGreatVersion(schemaVersion , version)) {
logger.info("upgrade escheduler metadata version from " + version + " to " + schemaVersion);
logger.info("Begin upgrading escheduler's mysql table structure");
upgradeDao.upgradeEscheduler(schemaDir);
}
}
}
// Assign the value of the version field in the version table to the version of the product
upgradeDao.updateVersion(SchemaUtils.getSoftVersion());
}
}

299
escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/UpgradeDao.java

@ -0,0 +1,299 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.upgrade;
import cn.escheduler.common.utils.MysqlUtil;
import cn.escheduler.common.utils.ScriptRunner;
import cn.escheduler.dao.AbstractBaseDao;
import cn.escheduler.dao.datasource.ConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class UpgradeDao extends AbstractBaseDao {
public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class);
private static final String T_VERSION_NAME = "t_escheduler_version";
@Override
protected void init() {
}
private static class UpgradeDaoHolder {
private static final UpgradeDao INSTANCE = new UpgradeDao();
}
private UpgradeDao() {
}
public static final UpgradeDao getInstance() {
return UpgradeDaoHolder.INSTANCE;
}
public void initEschedulerSchema() {
// Execute the escheduler DDL, it cannot be rolled back
runInitEschedulerDDL();
// Execute the escheduler DML, it can be rolled back
runInitEschedulerDML();
}
private void runInitEschedulerDML() {
Connection conn = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
conn.setAutoCommit(false);
// 执行escheduler_dml.sql脚本,导入escheduler相关的数据
// Execute the ark_manager_dml.sql script to import the data related to escheduler
ScriptRunner initScriptRunner = new ScriptRunner(conn, false, true);
Reader initSqlReader = new FileReader(new File("sql/create/release-1.0.0_schema/mysql/escheduler_dml.sql"));
initScriptRunner.runScript(initSqlReader);
conn.commit();
} catch (IOException e) {
try {
conn.rollback();
} catch (SQLException e1) {
logger.error(e1.getMessage(),e1);
}
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} catch (Exception e) {
try {
conn.rollback();
} catch (SQLException e1) {
logger.error(e1.getMessage(),e1);
}
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} finally {
MysqlUtil.realeaseResource(null, null, conn);
}
}
private void runInitEschedulerDDL() {
Connection conn = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
// Execute the escheduler_ddl.sql script to create the table structure of escheduler
ScriptRunner initScriptRunner = new ScriptRunner(conn, true, true);
Reader initSqlReader = new FileReader(new File("sql/create/release-1.0.0_schema/mysql/escheduler_ddl.sql"));
initScriptRunner.runScript(initSqlReader);
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} finally {
MysqlUtil.realeaseResource(null, null, conn);
}
}
public boolean isExistsTable(String tableName) {
Connection conn = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
ResultSet rs = conn.getMetaData().getTables(null, null, tableName, null);
if (rs.next()) {
return true;
} else {
return false;
}
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} finally {
MysqlUtil.realeaseResource(null, null, conn);
}
}
public String getCurrentVersion() {
String sql = String.format("select version from %s",T_VERSION_NAME);
Connection conn = null;
ResultSet rs = null;
PreparedStatement pstmt = null;
String version = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if (rs.next()) {
version = rs.getString(1);
}
return version;
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("sql: " + sql, e);
} finally {
MysqlUtil.realeaseResource(rs, pstmt, conn);
}
}
public void upgradeEscheduler(String schemaDir) {
upgradeEschedulerDDL(schemaDir);
upgradeEschedulerDML(schemaDir);
}
private void upgradeEschedulerDML(String schemaDir) {
String schemaVersion = schemaDir.split("_")[0];
String mysqlSQLFilePath = "sql/upgrade/" + schemaDir + "/mysql/escheduler_dml.sql";
Connection conn = null;
PreparedStatement pstmt = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
conn.setAutoCommit(false);
// Execute the upgraded escheduler dml
ScriptRunner scriptRunner = new ScriptRunner(conn, false, true);
Reader sqlReader = new FileReader(new File(mysqlSQLFilePath));
scriptRunner.runScript(sqlReader);
if (isExistsTable(T_VERSION_NAME)) {
// Change version in the version table to the new version
String upgradeSQL = String.format("update %s set version = ?",T_VERSION_NAME);
pstmt = conn.prepareStatement(upgradeSQL);
pstmt.setString(1, schemaVersion);
pstmt.executeUpdate();
}
conn.commit();
} catch (FileNotFoundException e) {
try {
conn.rollback();
} catch (SQLException e1) {
logger.error(e1.getMessage(),e1);
}
logger.error(e.getMessage(),e);
throw new RuntimeException("sql file not found ", e);
} catch (IOException e) {
try {
conn.rollback();
} catch (SQLException e1) {
logger.error(e1.getMessage(),e1);
}
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} catch (SQLException e) {
try {
conn.rollback();
} catch (SQLException e1) {
logger.error(e1.getMessage(),e1);
}
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} catch (Exception e) {
try {
conn.rollback();
} catch (SQLException e1) {
logger.error(e1.getMessage(),e1);
}
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} finally {
MysqlUtil.realeaseResource(null, pstmt, conn);
}
}
private void upgradeEschedulerDDL(String schemaDir) {
String mysqlSQLFilePath = "sql/upgrade/" + schemaDir + "/mysql/escheduler_ddl.sql";
Connection conn = null;
PreparedStatement pstmt = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
String dbName = conn.getCatalog();
logger.info(dbName);
conn.setAutoCommit(true);
// Execute the escheduler ddl.sql for the upgrade
ScriptRunner scriptRunner = new ScriptRunner(conn, true, true);
Reader sqlReader = new FileReader(new File(mysqlSQLFilePath));
scriptRunner.runScript(sqlReader);
} catch (FileNotFoundException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("sql file not found ", e);
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
} finally {
MysqlUtil.realeaseResource(null, pstmt, conn);
}
}
public void updateVersion(String version) {
// Change version in the version table to the new version
String upgradeSQL = String.format("update %s set version = ?",T_VERSION_NAME);
PreparedStatement pstmt = null;
Connection conn = null;
try {
conn = ConnectionFactory.getDataSource().getConnection();
pstmt = conn.prepareStatement(upgradeSQL);
pstmt.setString(1, version);
pstmt.executeUpdate();
} catch (SQLException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("sql: " + upgradeSQL, e);
} finally {
MysqlUtil.realeaseResource(null, pstmt, conn);
}
}
}

44
escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/CreateEscheduler.java

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.upgrade.shell;
import cn.escheduler.dao.upgrade.EschedulerManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* init escheduler
*
*/
public class CreateEscheduler {
private static final Logger logger = LoggerFactory.getLogger(CreateEscheduler.class);
public static void main(String[] args) {
Thread.currentThread().setName("manager-CreateEscheduler");
EschedulerManager eschedulerManager = new EschedulerManager();
eschedulerManager.initEscheduler();
logger.info("init escheduler finished");
try {
eschedulerManager.upgradeEscheduler();
logger.info("upgrade escheduler finished");
} catch (Exception e) {
logger.error("upgrade escheduler failed",e);
}
}
}

38
escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/InitEscheduler.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.upgrade.shell;
import cn.escheduler.dao.upgrade.EschedulerManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* init escheduler
*
*/
public class InitEscheduler {
private static final Logger logger = LoggerFactory.getLogger(InitEscheduler.class);
public static void main(String[] args) {
Thread.currentThread().setName("manager-InitEscheduler");
EschedulerManager eschedulerManager = new EschedulerManager();
eschedulerManager.initEscheduler();
logger.info("init escheduler finished");
}
}

47
escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/UpgradeEscheduler.java

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.upgrade.shell;
import cn.escheduler.dao.upgrade.EschedulerManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* upgrade escheduler database
*/
public class UpgradeEscheduler {
private static final Logger logger = LoggerFactory.getLogger(UpgradeEscheduler.class);
public static void main(String[] args) {
Thread.currentThread().setName("manager-UpgradeEscheduler");
EschedulerManager eschedulerManager = new EschedulerManager();
try {
eschedulerManager.upgradeEscheduler();
logger.info("upgrade escheduler finished");
} catch (Exception e) {
logger.error(e.getMessage(),e);
logger.info("Upgrade escheduler failed");
throw new RuntimeException(e);
}
}
}

6
escheduler-dao/src/main/resources/dao/data_source.properties

@ -1,9 +1,9 @@
# base spring data source configuration # base spring data source configuration
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.driver-class-name=com.mysql.jdbc.Driver spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/escheduler?characterEncoding=UTF-8 spring.datasource.url=jdbc:mysql://192.168.220.188:3306/escheduler_new?characterEncoding=UTF-8
spring.datasource.username=xx spring.datasource.username=root
spring.datasource.password=xx spring.datasource.password=root@123
# connection configuration # connection configuration
spring.datasource.initialSize=5 spring.datasource.initialSize=5

62
escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AccessTokenMapperTest.java

@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import cn.escheduler.common.utils.EncryptionUtils;
import cn.escheduler.dao.datasource.ConnectionFactory;
import cn.escheduler.dao.model.AccessToken;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Date;
import java.util.List;
public class AccessTokenMapperTest {
AccessTokenMapper accessTokenMapper;
@Before
public void before(){
accessTokenMapper = ConnectionFactory.getSqlSession().getMapper(AccessTokenMapper.class);
}
@Test
public void testInsert(){
AccessToken accessToken = new AccessToken();
accessToken.setUserId(10);
accessToken.setExpireTime(new Date());
accessToken.setToken("ssssssssssssssssssssssssss");
accessToken.setCreateTime(new Date());
accessToken.setUpdateTime(new Date());
accessTokenMapper.insert(accessToken);
}
@Test
public void testListPaging(){
Integer count = accessTokenMapper.countAccessTokenPaging(1,"");
Assert.assertEquals(count, (Integer) 5);
List<AccessToken> accessTokenList = accessTokenMapper.queryAccessTokenPaging(1,"", 0, 2);
Assert.assertEquals(accessTokenList.size(), 5);
}
}

12
escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserMapperTest.java

@ -60,4 +60,16 @@ public class UserMapperTest {
} }
@Test
public void queryQueueByProcessInstanceId(){
String queue = userMapper.queryQueueByProcessInstanceId(41388);
Assert.assertEquals(queue, "ait");
}
@Test
public void testQueryUserByToken(){
User user = userMapper.queryUserByToken("ad9e8fccfc11bd18bb45aa994568b8ef");
Assert.assertEquals(user.getUserName(), "qiaozhanwei");
}
} }

69
escheduler-dao/src/test/java/cn/escheduler/dao/mapper/WorkerGroupMapperTest.java

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.mapper;
import cn.escheduler.dao.datasource.ConnectionFactory;
import cn.escheduler.dao.model.WorkerGroup;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Date;
import java.util.List;
/**
* worker group mapper test
*/
public class WorkerGroupMapperTest {
WorkerGroupMapper workerGroupMapper;
@Before
public void before() {
workerGroupMapper = ConnectionFactory.getSqlSession().getMapper(WorkerGroupMapper.class);
}
@Test
public void test() {
WorkerGroup workerGroup = new WorkerGroup();
String name = "workerGroup3";
workerGroup.setName(name);
workerGroup.setIpList("192.168.220.154,192.168.220.188");
workerGroup.setCreateTime(new Date());
workerGroup.setUpdateTime(new Date());
workerGroupMapper.insert(workerGroup);
Assert.assertNotEquals(workerGroup.getId(), 0);
List<WorkerGroup> workerGroups2 = workerGroupMapper.queryWorkerGroupByName(name);
Assert.assertEquals(workerGroups2.size(), 1);
workerGroup.setName("workerGroup11");
workerGroupMapper.update(workerGroup);
List<WorkerGroup> workerGroups = workerGroupMapper.queryAllWorkerGroup();
Assert.assertNotEquals(workerGroups.size(), 0);
workerGroupMapper.deleteById(workerGroup.getId());
workerGroups = workerGroupMapper.queryAllWorkerGroup();
Assert.assertEquals(workerGroups.size(), 0);
}
}

2
escheduler-rpc/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.1-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

2
escheduler-server/pom.xml

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.1-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-server</artifactId> <artifactId>escheduler-server</artifactId>
<name>escheduler-server</name> <name>escheduler-server</name>

3
escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java

@ -404,6 +404,9 @@ public class MasterExecThread implements Runnable {
taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority());
} }
int workerGroupId = taskNode.getWorkerGroupId();
taskInstance.setWorkerGroupId(workerGroupId);
} }
return taskInstance; return taskInstance;
} }

56
escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java

@ -26,6 +26,7 @@ import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.model.ProcessDefinition; import cn.escheduler.dao.model.ProcessDefinition;
import cn.escheduler.dao.model.ProcessInstance; import cn.escheduler.dao.model.ProcessInstance;
import cn.escheduler.dao.model.TaskInstance; import cn.escheduler.dao.model.TaskInstance;
import cn.escheduler.dao.model.WorkerGroup;
import cn.escheduler.server.zk.ZKWorkerClient; import cn.escheduler.server.zk.ZKWorkerClient;
import com.cronutils.utils.StringUtils; import com.cronutils.utils.StringUtils;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
@ -33,7 +34,9 @@ import org.apache.curator.framework.recipes.locks.InterProcessMutex;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
@ -90,6 +93,42 @@ public class FetchTaskThread implements Runnable{
this.taskQueue = taskQueue; this.taskQueue = taskQueue;
} }
/**
* Check if the task runs on this worker
* @param taskInstance
* @param host
* @return
*/
private boolean checkWorkerGroup(TaskInstance taskInstance, String host){
int taskWorkerGroupId = taskInstance.getWorkerGroupId();
ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskInstance.getId());
if(processInstance == null){
logger.error("cannot find the task:{} process instance", taskInstance.getId());
return false;
}
int processWorkerGroupId = processInstance.getWorkerGroupId();
taskWorkerGroupId = (taskWorkerGroupId <= 0 ? processWorkerGroupId : taskWorkerGroupId);
if(taskWorkerGroupId <= 0){
return true;
}
WorkerGroup workerGroup = processDao.queryWorkerGroupById(taskWorkerGroupId);
if(workerGroup == null ){
logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId());
return true;
}
String ips = workerGroup.getIpList();
if(ips == null){
logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers",
taskInstance.getId(), workerGroup.getId());
}
String[] ipArray = ips.split(",");
List<String> ipList = Arrays.asList(ipArray);
return ipList.contains(host);
}
@Override @Override
public void run() { public void run() {
@ -116,11 +155,13 @@ public class FetchTaskThread implements Runnable{
} }
// task instance id str // task instance id str
String taskInstIdStr = taskQueue.poll(Constants.SCHEDULER_TASKS_QUEUE); String taskQueueStr = taskQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, false);
if (!StringUtils.isEmpty(taskInstIdStr)) { if (!StringUtils.isEmpty(taskQueueStr )) {
Date now = new Date();
String[] taskStringArray = taskQueueStr.split(Constants.UNDERLINE);
String taskInstIdStr = taskStringArray[taskStringArray.length - 1];
Date now = new Date();
Integer taskId = Integer.parseInt(taskInstIdStr); Integer taskId = Integer.parseInt(taskInstIdStr);
// find task instance by task id // find task instance by task id
@ -136,10 +177,15 @@ public class FetchTaskThread implements Runnable{
retryTimes--; retryTimes--;
} }
if (taskInstance == null) { if (taskInstance == null ) {
logger.error("task instance is null. task id : {} ", taskId); logger.error("task instance is null. task id : {} ", taskId);
continue; continue;
} }
if(!checkWorkerGroup(taskInstance, OSUtils.getHost())){
continue;
}
taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr);
logger.info("remove task:{} from queue", taskQueueStr);
// set execute task worker host // set execute task worker host
taskInstance.setHost(OSUtils.getHost()); taskInstance.setHost(OSUtils.getHost());
@ -172,7 +218,7 @@ public class FetchTaskThread implements Runnable{
FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, FileUtils.createWorkDirAndUserIfAbsent(execLocalPath,
processInstance.getTenantCode(), logger); processInstance.getTenantCode(), logger);
logger.info("task : {} ready to submit to task scheduler thread",taskId);
// submit task // submit task
workerExecService.submit(new TaskScheduleThread(taskInstance, processDao)); workerExecService.submit(new TaskScheduleThread(taskInstance, processDao));
} }

11
escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java

@ -36,6 +36,7 @@ import cn.escheduler.server.worker.task.AbstractTask;
import cn.escheduler.server.worker.task.TaskManager; import cn.escheduler.server.worker.task.TaskManager;
import cn.escheduler.server.worker.task.TaskProps; import cn.escheduler.server.worker.task.TaskProps;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -154,12 +155,18 @@ public class TaskScheduleThread implements Callable<Boolean> {
taskProps.setTenantCode(taskInstance.getProcessInstance().getTenantCode()); taskProps.setTenantCode(taskInstance.getProcessInstance().getTenantCode());
ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskInstance.getId()); ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskInstance.getId());
String queue = processDao.queryQueueByProcessInstanceId(processInstance.getId());
taskProps.setScheduleTime(processInstance.getScheduleTime()); taskProps.setScheduleTime(processInstance.getScheduleTime());
taskProps.setNodeName(taskInstance.getName()); taskProps.setNodeName(taskInstance.getName());
taskProps.setTaskInstId(taskInstance.getId()); taskProps.setTaskInstId(taskInstance.getId());
taskProps.setEnvFile(CommonUtils.getSystemEnvPath()); taskProps.setEnvFile(CommonUtils.getSystemEnvPath());
// set queue // set queue
taskProps.setQueue(taskInstance.getProcessInstance().getQueue()); if (StringUtils.isEmpty(queue)){
taskProps.setQueue(taskInstance.getProcessInstance().getQueue());
}else {
taskProps.setQueue(queue);
}
taskProps.setTaskStartTime(taskInstance.getStartTime()); taskProps.setTaskStartTime(taskInstance.getStartTime());
taskProps.setDefinedParams(allParamMap); taskProps.setDefinedParams(allParamMap);
@ -188,7 +195,7 @@ public class TaskScheduleThread implements Callable<Boolean> {
task.handle(); task.handle();
logger.info("task : {} exit status code : {}",taskProps.getTaskAppId(),task.getExitStatusCode()); logger.info("task : {} exit status code : {}", taskProps.getTaskAppId(),task.getExitStatusCode());
if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){
status = ExecutionStatus.SUCCESS; status = ExecutionStatus.SUCCESS;

73
escheduler-server/src/main/java/cn/escheduler/server/worker/task/PythonCommandExecutor.java

@ -16,12 +16,13 @@
*/ */
package cn.escheduler.server.worker.task; package cn.escheduler.server.worker.task;
import cn.escheduler.common.Constants;
import cn.escheduler.common.utils.FileUtils; import cn.escheduler.common.utils.FileUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File; import java.io.*;
import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
@ -34,6 +35,8 @@ import java.util.function.Consumer;
*/ */
public class PythonCommandExecutor extends AbstractCommandExecutor { public class PythonCommandExecutor extends AbstractCommandExecutor {
private static final Logger logger = LoggerFactory.getLogger(PythonCommandExecutor.class);
public static final String PYTHON = "python"; public static final String PYTHON = "python";
@ -63,27 +66,13 @@ public class PythonCommandExecutor extends AbstractCommandExecutor {
*/ */
@Override @Override
protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException {
logger.info("proxy user:{}, work dir:{}", tenantCode, taskDir); logger.info("tenant :{}, work dir:{}", tenantCode, taskDir);
if (!Files.exists(Paths.get(commandFile))) { if (!Files.exists(Paths.get(commandFile))) {
logger.info("generate command file:{}", commandFile); logger.info("generate command file:{}", commandFile);
StringBuilder sb = new StringBuilder(200); StringBuilder sb = new StringBuilder(200);
sb.append("#-*- encoding=utf8 -*-\n"); sb.append("#-*- encoding=utf8 -*-\n");
sb.append("import os,sys\n");
sb.append("BASEDIR = os.path.dirname(os.path.realpath(__file__))\n");
sb.append("os.chdir(BASEDIR)\n");
if (StringUtils.isNotEmpty(envFile)) {
String[] envArray = envFile.split("\\.");
if(envArray.length == 2){
String path = envArray[0];
logger.info("path:"+path);
int index = path.lastIndexOf("/");
sb.append(String.format("sys.path.append('%s')\n",path.substring(0,index)));
sb.append(String.format("import %s\n",path.substring(index+1)));
}
}
sb.append("\n\n"); sb.append("\n\n");
sb.append(String.format("import py_%s_node\n",taskAppId)); sb.append(String.format("import py_%s_node\n",taskAppId));
@ -96,7 +85,14 @@ public class PythonCommandExecutor extends AbstractCommandExecutor {
@Override @Override
protected String commandType() { protected String commandType() {
return PYTHON;
String envPath = System.getProperty("user.dir") + Constants.SINGLE_SLASH + "conf"+
Constants.SINGLE_SLASH +"env" + Constants.SINGLE_SLASH + Constants.ESCHEDULER_ENV_SH;
String pythonHome = getPythonHome(envPath);
if (StringUtils.isEmpty(pythonHome)){
return PYTHON;
}
return pythonHome;
} }
@Override @Override
@ -109,4 +105,45 @@ public class PythonCommandExecutor extends AbstractCommandExecutor {
return true; return true;
} }
/**
* get python home
* @param envPath
* @return
*/
private static String getPythonHome(String envPath){
BufferedReader br = null;
String line = null;
StringBuilder sb = new StringBuilder();
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(envPath)));
while ((line = br.readLine()) != null){
if (line.contains(Constants.PYTHON_HOME)){
sb.append(line);
break;
}
}
String result = sb.toString();
if (org.apache.commons.lang.StringUtils.isEmpty(result)){
return null;
}
String[] arrs = result.split("=");
if (arrs.length == 2){
return arrs[1];
}
}catch (IOException e){
logger.error("read file failed : " + e.getMessage(),e);
}finally {
try {
if (br != null){
br.close();
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
}
return null;
}
} }

14
escheduler-server/src/main/java/cn/escheduler/server/worker/task/processdure/ProcedureTask.java

@ -22,8 +22,11 @@ import cn.escheduler.common.enums.DbType;
import cn.escheduler.common.enums.Direct; import cn.escheduler.common.enums.Direct;
import cn.escheduler.common.enums.TaskTimeoutStrategy; import cn.escheduler.common.enums.TaskTimeoutStrategy;
import cn.escheduler.common.job.db.BaseDataSource; import cn.escheduler.common.job.db.BaseDataSource;
import cn.escheduler.common.job.db.ClickHouseDataSource;
import cn.escheduler.common.job.db.MySQLDataSource; import cn.escheduler.common.job.db.MySQLDataSource;
import cn.escheduler.common.job.db.OracleDataSource;
import cn.escheduler.common.job.db.PostgreDataSource; import cn.escheduler.common.job.db.PostgreDataSource;
import cn.escheduler.common.job.db.SQLServerDataSource;
import cn.escheduler.common.process.Property; import cn.escheduler.common.process.Property;
import cn.escheduler.common.task.AbstractParameters; import cn.escheduler.common.task.AbstractParameters;
import cn.escheduler.common.task.procedure.ProcedureParameters; import cn.escheduler.common.task.procedure.ProcedureParameters;
@ -111,6 +114,17 @@ public class ProcedureTask extends AbstractTask {
}else if (DbType.POSTGRESQL.name().equals(dataSource.getType().name())){ }else if (DbType.POSTGRESQL.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),PostgreDataSource.class); baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),PostgreDataSource.class);
Class.forName(Constants.JDBC_POSTGRESQL_CLASS_NAME); Class.forName(Constants.JDBC_POSTGRESQL_CLASS_NAME);
}else if (DbType.CLICKHOUSE.name().equals(dataSource.getType().name())){
// NOTE: currently, ClickHouse don't support procedure or UDF yet,
// but still load JDBC driver to keep source code sync with other DB
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),ClickHouseDataSource.class);
Class.forName(Constants.JDBC_CLICKHOUSE_CLASS_NAME);
}else if (DbType.ORACLE.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(), OracleDataSource.class);
Class.forName(Constants.JDBC_ORACLE_CLASS_NAME);
}else if (DbType.SQLSERVER.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(), SQLServerDataSource.class);
Class.forName(Constants.JDBC_SQLSERVER_CLASS_NAME);
} }
// get jdbc connection // get jdbc connection

2
escheduler-server/src/main/java/cn/escheduler/server/worker/task/python/PythonTask.java

@ -72,7 +72,7 @@ public class PythonTask extends AbstractTask {
this.pythonProcessTask = new PythonCommandExecutor(this::logHandle, this.pythonProcessTask = new PythonCommandExecutor(this::logHandle,
taskProps.getTaskDir(), taskProps.getTaskAppId(), taskProps.getTaskDir(), taskProps.getTaskAppId(),
taskProps.getTenantCode(), CommonUtils.getPythonSystemEnvPath(), taskProps.getTaskStartTime(), taskProps.getTenantCode(), null, taskProps.getTaskStartTime(),
taskProps.getTaskTimeout(), logger); taskProps.getTaskTimeout(), logger);
this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); this.processDao = DaoFactory.getDaoInstance(ProcessDao.class);
} }

9
escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java

@ -120,6 +120,15 @@ public class SqlTask extends AbstractTask {
}else if (DbType.SPARK.name().equals(dataSource.getType().name())){ }else if (DbType.SPARK.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),SparkDataSource.class); baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),SparkDataSource.class);
Class.forName(Constants.JDBC_SPARK_CLASS_NAME); Class.forName(Constants.JDBC_SPARK_CLASS_NAME);
}else if (DbType.CLICKHOUSE.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),ClickHouseDataSource.class);
Class.forName(Constants.JDBC_CLICKHOUSE_CLASS_NAME);
}else if (DbType.ORACLE.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),OracleDataSource.class);
Class.forName(Constants.JDBC_ORACLE_CLASS_NAME);
}else if (DbType.SQLSERVER.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),SQLServerDataSource.class);
Class.forName(Constants.JDBC_SQLSERVER_CLASS_NAME);
} }
Map<Integer,Property> sqlParamMap = new HashMap<Integer,Property>(); Map<Integer,Property> sqlParamMap = new HashMap<Integer,Property>();

65
escheduler-server/src/test/java/cn/escheduler/server/worker/EnvFileTest.java

@ -0,0 +1,65 @@
package cn.escheduler.server.worker;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
/**
* Created by qiaozhanwei on 2019/4/15.
*/
public class EnvFileTest {
private static final Logger logger = LoggerFactory.getLogger(EnvFileTest.class);
public static void main(String[] args) {
String path = System.getProperty("user.dir")+"\\script\\env\\.escheduler_env.sh";
String pythonHome = getPythonHome(path);
logger.info(pythonHome);
}
/**
* get python home
* @param path
* @return
*/
private static String getPythonHome(String path){
BufferedReader br = null;
String line = null;
StringBuilder sb = new StringBuilder();
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(path)));
while ((line = br.readLine()) != null){
if (line.contains("PYTHON_HOME")){
sb.append(line);
break;
}
}
String result = sb.toString();
if (StringUtils.isEmpty(result)){
return null;
}
String[] arrs = result.split("=");
if (arrs.length == 2){
return arrs[1];
}
}catch (IOException e){
logger.error("read file failed : " + e.getMessage(),e);
}finally {
try {
if (br != null){
br.close();
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
}
return null;
}
}

52
escheduler-server/src/test/java/cn/escheduler/server/worker/sql/SqlExecutorTest.java

@ -52,21 +52,63 @@ public class SqlExecutorTest {
@Test @Test
public void test() throws Exception { public void test() throws Exception {
String nodeName = "mysql sql test";
String taskAppId = "51_11282_263978";
String tenantCode = "hdfs";
int taskInstId = 263978;
sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId);
}
@Test
public void testClickhouse() throws Exception {
String nodeName = "ClickHouse sql test";
String taskAppId = "1_11_20";
String tenantCode = "default";
int taskInstId = 20;
sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId);
}
@Test
public void testOracle() throws Exception {
String nodeName = "oracle sql test";
String taskAppId = "2_13_25";
String tenantCode = "demo";
int taskInstId = 25;
sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId);
}
@Test
public void testSQLServer() throws Exception {
String nodeName = "SQL Server sql test";
String taskAppId = "3_14_27";
String tenantCode = "demo";
int taskInstId = 27;
sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId);
}
/**
* Basic test template for SQLTasks, mainly test different types of DBMS types
* @param nodeName node name for selected task
* @param taskAppId task app id
* @param tenantCode tenant code
* @param taskInstId task instance id
* @throws Exception
*/
private void sharedTestSqlTask(String nodeName, String taskAppId, String tenantCode, int taskInstId) throws Exception {
TaskProps taskProps = new TaskProps(); TaskProps taskProps = new TaskProps();
taskProps.setTaskDir(""); taskProps.setTaskDir("");
// processDefineId_processInstanceId_taskInstanceId // processDefineId_processInstanceId_taskInstanceId
taskProps.setTaskAppId("51_11282_263978"); taskProps.setTaskAppId(taskAppId);
// set tenant -> task execute linux user // set tenant -> task execute linux user
taskProps.setTenantCode("hdfs"); taskProps.setTenantCode(tenantCode);
taskProps.setTaskStartTime(new Date()); taskProps.setTaskStartTime(new Date());
taskProps.setTaskTimeout(360000); taskProps.setTaskTimeout(360000);
taskProps.setTaskInstId(263978); taskProps.setTaskInstId(taskInstId);
taskProps.setNodeName("mysql sql test"); taskProps.setNodeName(nodeName);
TaskInstance taskInstance = processDao.findTaskInstanceById(263978); TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId);
String taskJson = taskInstance.getTaskJson(); String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class);

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save