Browse Source

add mybatis plus (#881)

* update english documents

* refactor zk client

* update documents

* update zkclient

* update zkclient

* update documents

* add architecture-design

* change i18n

* update i18n

* update english documents

* add architecture-design

* update english documents

* update en-US documents

* add architecture-design

* update demo site

* add mybatis plus model

* modify mybatisplus

* modify mybatisplus

* change interface by mybatisplus
pull/2/head
bao liang 5 years ago committed by lgcareer
parent
commit
ef4ec213e3
  1. 2
      escheduler-alert/src/main/java/cn/escheduler/alert/AlertServer.java
  2. 2
      escheduler-alert/src/main/java/cn/escheduler/alert/manager/EnterpriseWeChatManager.java
  3. 2
      escheduler-alert/src/main/java/cn/escheduler/alert/manager/MsgManager.java
  4. 4
      escheduler-alert/src/main/java/cn/escheduler/alert/runner/AlertSender.java
  5. 2
      escheduler-alert/src/main/java/cn/escheduler/alert/utils/Constants.java
  6. 2
      escheduler-alert/src/main/java/cn/escheduler/alert/utils/EnterpriseWeChatUtils.java
  7. 4
      escheduler-alert/src/test/java/cn/escheduler/alert/utils/MailUtilsTest.java
  8. 3
      escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java
  9. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/AlertGroupController.java
  10. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/BaseController.java
  11. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java
  12. 7
      escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java
  13. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java
  14. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/LoggerController.java
  15. 5
      escheduler-api/src/main/java/cn/escheduler/api/controller/LoginController.java
  16. 6
      escheduler-api/src/main/java/cn/escheduler/api/controller/MonitorController.java
  17. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java
  18. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java
  19. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/ProjectController.java
  20. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java
  21. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java
  22. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java
  23. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/TaskInstanceController.java
  24. 6
      escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java
  25. 3
      escheduler-api/src/main/java/cn/escheduler/api/controller/TenantController.java
  26. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java
  27. 2
      escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java
  28. 9
      escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java
  29. 2
      escheduler-api/src/main/java/cn/escheduler/api/dto/DefineUserDto.java
  30. 2
      escheduler-api/src/main/java/cn/escheduler/api/dto/TaskCountDto.java
  31. 6
      escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java
  32. 29
      escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java
  33. 34
      escheduler-api/src/main/java/cn/escheduler/api/service/AlertGroupService.java
  34. 4
      escheduler-api/src/main/java/cn/escheduler/api/service/BaseDAGService.java
  35. 2
      escheduler-api/src/main/java/cn/escheduler/api/service/BaseService.java
  36. 100
      escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java
  37. 74
      escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java
  38. 18
      escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java
  39. 2
      escheduler-api/src/main/java/cn/escheduler/api/service/LoggerService.java
  40. 8
      escheduler-api/src/main/java/cn/escheduler/api/service/MonitorService.java
  41. 53
      escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java
  42. 24
      escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java
  43. 56
      escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java
  44. 21
      escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java
  45. 84
      escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java
  46. 53
      escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java
  47. 77
      escheduler-api/src/main/java/cn/escheduler/api/service/ServerService.java
  48. 9
      escheduler-api/src/main/java/cn/escheduler/api/service/SessionService.java
  49. 22
      escheduler-api/src/main/java/cn/escheduler/api/service/TaskInstanceService.java
  50. 2
      escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java
  51. 25
      escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java
  52. 73
      escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java
  53. 48
      escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java
  54. 20
      escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java
  55. 2
      escheduler-api/src/main/java/cn/escheduler/api/utils/ZookeeperMonitor.java
  56. 11
      escheduler-api/src/test/java/cn/escheduler/api/controller/AbstractControllerTest.java
  57. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/DataAnalysisServiceTest.java
  58. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/DataSourceServiceTest.java
  59. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/LoggerServiceTest.java
  60. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java
  61. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java
  62. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/ResourcesServiceTest.java
  63. 4
      escheduler-api/src/test/java/cn/escheduler/api/service/SchedulerServiceTest.java
  64. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/SessionServiceTest.java
  65. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/TaskInstanceServiceTest.java
  66. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/TenantServiceTest.java
  67. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/UdfFuncServiceTest.java
  68. 2
      escheduler-api/src/test/java/cn/escheduler/api/service/UsersServiceTest.java
  69. 185
      escheduler-common/.factorypath
  70. 5
      escheduler-common/pom.xml
  71. 7
      escheduler-common/src/main/java/cn/escheduler/common/utils/EnumFieldUtil.java
  72. 63
      escheduler-dao/pom.xml
  73. 20
      escheduler-dao/src/main/java/cn/escheduler/dao/AlertDao.java
  74. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/MonitorDBDao.java
  75. 152
      escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java
  76. 164
      escheduler-dao/src/main/java/cn/escheduler/dao/ServerDao.java
  77. 3
      escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java
  78. 53
      escheduler-dao/src/main/java/cn/escheduler/dao/datasource/ConnectionFactory.java
  79. 126
      escheduler-dao/src/main/java/cn/escheduler/dao/datasource/DatabaseConfiguration.java
  80. 78
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/AccessToken.java
  81. 16
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/Alert.java
  82. 42
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/AlertGroup.java
  83. 24
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/Command.java
  84. 58
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/CommandCount.java
  85. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/CycleDependency.java
  86. 12
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/DataSource.java
  87. 8
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/DatasourceUser.java
  88. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/DefinitionGroupByUser.java
  89. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/Dependency.java
  90. 8
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ErrorCommand.java
  91. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ExecuteStatusCount.java
  92. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/MonitorRecord.java
  93. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessData.java
  94. 41
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessDefinition.java
  95. 13
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstance.java
  96. 10
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstanceMap.java
  97. 20
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/Project.java
  98. 141
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProjectUser.java
  99. 10
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/Queue.java
  100. 10
      escheduler-dao/src/main/java/cn/escheduler/dao/entity/Resource.java
  101. Some files were not shown because too many files have changed in this diff Show More

2
escheduler-alert/src/main/java/cn/escheduler/alert/AlertServer.java

@ -20,7 +20,7 @@ import cn.escheduler.alert.runner.AlertSender;
import cn.escheduler.alert.utils.Constants; import cn.escheduler.alert.utils.Constants;
import cn.escheduler.common.thread.Stopper; import cn.escheduler.common.thread.Stopper;
import cn.escheduler.dao.AlertDao; import cn.escheduler.dao.AlertDao;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.Alert;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;

2
escheduler-alert/src/main/java/cn/escheduler/alert/manager/EnterpriseWeChatManager.java

@ -18,7 +18,7 @@ package cn.escheduler.alert.manager;
import cn.escheduler.alert.utils.Constants; import cn.escheduler.alert.utils.Constants;
import cn.escheduler.alert.utils.EnterpriseWeChatUtils; import cn.escheduler.alert.utils.EnterpriseWeChatUtils;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.Alert;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

2
escheduler-alert/src/main/java/cn/escheduler/alert/manager/MsgManager.java

@ -16,7 +16,7 @@
*/ */
package cn.escheduler.alert.manager; package cn.escheduler.alert.manager;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.Alert;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

4
escheduler-alert/src/main/java/cn/escheduler/alert/runner/AlertSender.java

@ -23,8 +23,8 @@ import cn.escheduler.alert.utils.EnterpriseWeChatUtils;
import cn.escheduler.common.enums.AlertStatus; import cn.escheduler.common.enums.AlertStatus;
import cn.escheduler.common.enums.AlertType; import cn.escheduler.common.enums.AlertType;
import cn.escheduler.dao.AlertDao; import cn.escheduler.dao.AlertDao;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.Alert;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;

2
escheduler-alert/src/main/java/cn/escheduler/alert/utils/Constants.java

@ -26,7 +26,7 @@ public class Constants {
*/ */
public static final String ALERT_PROPERTIES_PATH = "/alert.properties"; public static final String ALERT_PROPERTIES_PATH = "/alert.properties";
public static final String DATA_SOURCE_PROPERTIES_PATH = "/dao/data_source.properties"; public static final String DATA_SOURCE_PROPERTIES_PATH = "/dao/data_source.properties__";
public static final String SINGLE_SLASH = "/"; public static final String SINGLE_SLASH = "/";

2
escheduler-alert/src/main/java/cn/escheduler/alert/utils/EnterpriseWeChatUtils.java

@ -17,7 +17,7 @@
package cn.escheduler.alert.utils; package cn.escheduler.alert.utils;
import cn.escheduler.common.enums.ShowType; import cn.escheduler.common.enums.ShowType;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.Alert;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.google.common.reflect.TypeToken; import com.google.common.reflect.TypeToken;

4
escheduler-alert/src/test/java/cn/escheduler/alert/utils/MailUtilsTest.java

@ -21,8 +21,8 @@ import cn.escheduler.common.enums.AlertType;
import cn.escheduler.common.enums.ShowType; import cn.escheduler.common.enums.ShowType;
import cn.escheduler.dao.AlertDao; import cn.escheduler.dao.AlertDao;
import cn.escheduler.dao.DaoFactory; import cn.escheduler.dao.DaoFactory;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.Alert;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import freemarker.cache.StringTemplateLoader; import freemarker.cache.StringTemplateLoader;
import freemarker.template.Configuration; import freemarker.template.Configuration;
import freemarker.template.Template; import freemarker.template.Template;

3
escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java

@ -19,11 +19,10 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.AccessTokenService; import cn.escheduler.api.service.AccessTokenService;
import cn.escheduler.api.service.UsersService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/AlertGroupController.java

@ -21,7 +21,7 @@ import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.AlertType; import cn.escheduler.common.enums.AlertType;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/BaseController.java

@ -20,7 +20,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.Resource; import cn.escheduler.dao.entity.Resource;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java

@ -20,7 +20,7 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.service.DataAnalysisService; import cn.escheduler.api.service.DataAnalysisService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

7
escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java

@ -18,15 +18,12 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.DataSourceService; import cn.escheduler.api.service.DataSourceService;
import cn.escheduler.api.utils.CheckUtils;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.DbType;
import cn.escheduler.common.enums.ResUploadType;
import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.utils.CommonUtils;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.model.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;
@ -38,11 +35,9 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import static cn.escheduler.api.enums.Status.*; import static cn.escheduler.api.enums.Status.*;
import static cn.escheduler.common.utils.PropertyUtils.getBoolean;
/** /**

2
escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java

@ -23,7 +23,7 @@ import cn.escheduler.api.service.ExecutorService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.*; import cn.escheduler.common.enums.*;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.*; import io.swagger.annotations.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/LoggerController.java

@ -20,7 +20,7 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.service.LoggerService; import cn.escheduler.api.service.LoggerService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

5
escheduler-api/src/main/java/cn/escheduler/api/controller/LoginController.java

@ -22,14 +22,13 @@ import cn.escheduler.api.service.SessionService;
import cn.escheduler.api.service.UsersService; import cn.escheduler.api.service.UsersService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.*; import io.swagger.annotations.*;
import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
@ -37,8 +36,6 @@ import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.util.Locale;
import static cn.escheduler.api.enums.Status.*; import static cn.escheduler.api.enums.Status.*;
/** /**

6
escheduler-api/src/main/java/cn/escheduler/api/controller/MonitorController.java

@ -18,10 +18,9 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.service.MonitorService; import cn.escheduler.api.service.MonitorService;
import cn.escheduler.api.service.ServerService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -46,9 +45,6 @@ public class MonitorController extends BaseController{
private static final Logger logger = LoggerFactory.getLogger(MonitorController.class); private static final Logger logger = LoggerFactory.getLogger(MonitorController.class);
@Autowired
private ServerService serverService;
@Autowired @Autowired
private MonitorService monitorService; private MonitorService monitorService;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java

@ -21,7 +21,7 @@ import cn.escheduler.api.service.ProcessDefinitionService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.*; import io.swagger.annotations.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java

@ -25,7 +25,7 @@ import cn.escheduler.common.enums.Flag;
import cn.escheduler.common.queue.ITaskQueue; import cn.escheduler.common.queue.ITaskQueue;
import cn.escheduler.common.queue.TaskQueueFactory; import cn.escheduler.common.queue.TaskQueueFactory;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.*; import io.swagger.annotations.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/ProjectController.java

@ -23,7 +23,7 @@ import cn.escheduler.api.service.ProjectService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java

@ -22,7 +22,7 @@ import cn.escheduler.api.service.QueueService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java

@ -24,7 +24,7 @@ import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.ResourceType; import cn.escheduler.common.enums.ResourceType;
import cn.escheduler.common.enums.UdfType; import cn.escheduler.common.enums.UdfType;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java

@ -25,7 +25,7 @@ import cn.escheduler.common.enums.Priority;
import cn.escheduler.common.enums.ReleaseState; import cn.escheduler.common.enums.ReleaseState;
import cn.escheduler.common.enums.WarningType; import cn.escheduler.common.enums.WarningType;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.*; import io.swagger.annotations.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/TaskInstanceController.java

@ -22,7 +22,7 @@ import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.*; import io.swagger.annotations.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

6
escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java

@ -20,11 +20,7 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.service.TaskRecordService; import cn.escheduler.api.service.TaskRecordService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;

3
escheduler-api/src/main/java/cn/escheduler/api/controller/TenantController.java

@ -22,8 +22,7 @@ import cn.escheduler.api.service.TenantService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.apache.commons.lang3.StringUtils;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java

@ -22,7 +22,7 @@ import cn.escheduler.api.service.UsersService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

2
escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java

@ -21,7 +21,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.WorkerGroupService; import cn.escheduler.api.service.WorkerGroupService;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiImplicitParams;

9
escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java

@ -16,6 +16,7 @@
*/ */
package cn.escheduler.api.dto; package cn.escheduler.api.dto;
import cn.escheduler.common.enums.CommandType;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
/** /**
@ -25,10 +26,10 @@ public class CommandStateCount {
private int errorCount; private int errorCount;
private int normalCount; private int normalCount;
private ExecutionStatus commandState; private CommandType commandState;
public CommandStateCount(){} public CommandStateCount(){}
public CommandStateCount(int errorCount, int normalCount, ExecutionStatus commandState) { public CommandStateCount(int errorCount, int normalCount, CommandType commandState) {
this.errorCount = errorCount; this.errorCount = errorCount;
this.normalCount = normalCount; this.normalCount = normalCount;
this.commandState = commandState; this.commandState = commandState;
@ -50,11 +51,11 @@ public class CommandStateCount {
this.normalCount = normalCount; this.normalCount = normalCount;
} }
public ExecutionStatus getCommandState() { public CommandType getCommandState() {
return commandState; return commandState;
} }
public void setCommandState(ExecutionStatus commandState) { public void setCommandState(CommandType commandState) {
this.commandState = commandState; this.commandState = commandState;
} }
} }

2
escheduler-api/src/main/java/cn/escheduler/api/dto/DefineUserDto.java

@ -16,7 +16,7 @@
*/ */
package cn.escheduler.api.dto; package cn.escheduler.api.dto;
import cn.escheduler.dao.model.DefinitionGroupByUser; import cn.escheduler.dao.entity.DefinitionGroupByUser;
import java.util.List; import java.util.List;

2
escheduler-api/src/main/java/cn/escheduler/api/dto/TaskCountDto.java

@ -17,7 +17,7 @@
package cn.escheduler.api.dto; package cn.escheduler.api.dto;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.dao.model.ExecuteStatusCount; import cn.escheduler.dao.entity.ExecuteStatusCount;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

6
escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java

@ -18,9 +18,9 @@ package cn.escheduler.api.interceptor;
import cn.escheduler.api.service.SessionService; import cn.escheduler.api.service.SessionService;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.dao.entity.Session;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.UserMapper; import cn.escheduler.dao.mapper.UserMapper;
import cn.escheduler.dao.model.Session;
import cn.escheduler.dao.model.User;
import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -78,7 +78,7 @@ public class LoginHandlerInterceptor implements HandlerInterceptor {
} }
//get user object from session //get user object from session
user = userMapper.queryById(session.getUserId()); user = userMapper.selectById(session.getUserId());
// if user is null // if user is null
if (user == null) { if (user == null) {

29
escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java

@ -17,20 +17,19 @@
package cn.escheduler.api.service; package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.CheckUtils;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.utils.*; import cn.escheduler.common.utils.*;
import cn.escheduler.dao.entity.AccessToken;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*; import com.baomidou.mybatisplus.core.metadata.IPage;
import org.apache.commons.lang3.StringUtils; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.*; import java.util.*;
@ -59,18 +58,14 @@ public class AccessTokenService extends BaseService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize); PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Integer count; Page<AccessToken> page = new Page(pageNo, pageSize);
List<AccessToken> accessTokenList; int userId = loginUser.getId();
if (loginUser.getUserType() == UserType.ADMIN_USER){ if (loginUser.getUserType() == UserType.ADMIN_USER){
count = accessTokenMapper.countAccessTokenPaging(0,searchVal); userId = 0;
accessTokenList = accessTokenMapper.queryAccessTokenPaging(0,searchVal, pageInfo.getStart(), pageSize);
}else {
count = accessTokenMapper.countAccessTokenPaging(loginUser.getId(),searchVal);
accessTokenList = accessTokenMapper.queryAccessTokenPaging(loginUser.getId(),searchVal, pageInfo.getStart(), pageSize);
} }
IPage<AccessToken> accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId);
pageInfo.setTotalCount(count); pageInfo.setTotalCount((int)accessTokenList.getTotal());
pageInfo.setLists(accessTokenList); pageInfo.setLists(accessTokenList.getRecords());
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -155,7 +150,7 @@ public class AccessTokenService extends BaseService {
return result; return result;
} }
accessTokenMapper.delete(id); accessTokenMapper.deleteById(id);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -177,7 +172,7 @@ public class AccessTokenService extends BaseService {
accessToken.setToken(token); accessToken.setToken(token);
accessToken.setUpdateTime(new Date()); accessToken.setUpdateTime(new Date());
accessTokenMapper.update(accessToken); accessTokenMapper.updateById(accessToken);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;

34
escheduler-api/src/main/java/cn/escheduler/api/service/AlertGroupService.java

@ -22,11 +22,13 @@ import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.AlertType; import cn.escheduler.common.enums.AlertType;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.entity.AlertGroup;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.entity.UserAlertGroup;
import cn.escheduler.dao.mapper.AlertGroupMapper; import cn.escheduler.dao.mapper.AlertGroupMapper;
import cn.escheduler.dao.mapper.UserAlertGroupMapper; import cn.escheduler.dao.mapper.UserAlertGroupMapper;
import cn.escheduler.dao.model.AlertGroup; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.User; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import cn.escheduler.dao.model.UserAlertGroup;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -80,14 +82,12 @@ public class AlertGroupService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
Integer count = alertGroupMapper.countAlertGroupPaging(searchVal); Page<AlertGroup> page = new Page(pageNo, pageSize);
IPage<AlertGroup> alertGroupIPage = alertGroupMapper.queryAlertGroupPage(
page, searchVal);
PageInfo<AlertGroup> pageInfo = new PageInfo<>(pageNo, pageSize); PageInfo<AlertGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int)alertGroupIPage.getTotal());
List<AlertGroup> scheduleList = alertGroupMapper.queryAlertGroupPaging(searchVal, pageInfo.getStart(), pageSize); pageInfo.setLists(alertGroupIPage.getRecords());
pageInfo.setTotalCount(count);
pageInfo.setLists(scheduleList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -115,7 +115,7 @@ public class AlertGroupService {
alertGroup.setGroupName(groupName); alertGroup.setGroupName(groupName);
alertGroup.setGroupType(groupType); alertGroup.setGroupType(groupType);
alertGroup.setDesc(desc); alertGroup.setDescription(desc);
alertGroup.setCreateTime(now); alertGroup.setCreateTime(now);
alertGroup.setUpdateTime(now); alertGroup.setUpdateTime(now);
@ -158,7 +158,7 @@ public class AlertGroupService {
} }
AlertGroup alertGroup = alertGroupMapper.queryById(id); AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) { if (alertGroup == null) {
putMsg(result, Status.ALERT_GROUP_NOT_EXIST); putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
@ -175,10 +175,10 @@ public class AlertGroupService {
if (groupType != null) { if (groupType != null) {
alertGroup.setGroupType(groupType); alertGroup.setGroupType(groupType);
} }
alertGroup.setDesc(desc); alertGroup.setDescription(desc);
alertGroup.setUpdateTime(now); alertGroup.setUpdateTime(now);
// updateProcessInstance // updateProcessInstance
alertGroupMapper.update(alertGroup); alertGroupMapper.updateById(alertGroup);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -200,7 +200,7 @@ public class AlertGroupService {
} }
alertGroupMapper.delete(id); alertGroupMapper.deleteById(id);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -254,8 +254,8 @@ public class AlertGroupService {
*/ */
public Result verifyGroupName(User loginUser, String groupName) { public Result verifyGroupName(User loginUser, String groupName) {
Result result = new Result(); Result result = new Result();
AlertGroup alertGroup = alertGroupMapper.queryByGroupName(groupName); List<AlertGroup> alertGroup = alertGroupMapper.queryByGroupName(groupName);
if (alertGroup != null) { if (alertGroup != null && alertGroup.size() > 0) {
logger.error("group {} has exist, can't create again.", groupName); logger.error("group {} has exist, can't create again.", groupName);
result.setCode(Status.ALERT_GROUP_EXIST.getCode()); result.setCode(Status.ALERT_GROUP_EXIST.getCode());
result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); result.setMsg(Status.ALERT_GROUP_EXIST.getMsg());

4
escheduler-api/src/main/java/cn/escheduler/api/service/BaseDAGService.java

@ -22,8 +22,8 @@ import cn.escheduler.common.model.TaskNodeRelation;
import cn.escheduler.common.process.ProcessDag; import cn.escheduler.common.process.ProcessDag;
import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.dao.model.ProcessData; import cn.escheduler.dao.entity.ProcessData;
import cn.escheduler.dao.model.ProcessInstance; import cn.escheduler.dao.entity.ProcessInstance;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

2
escheduler-api/src/main/java/cn/escheduler/api/service/BaseService.java

@ -21,7 +21,7 @@ import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.utils.HadoopUtils; import cn.escheduler.common.utils.HadoopUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import javax.servlet.http.Cookie; import javax.servlet.http.Cookie;

100
escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java

@ -22,17 +22,14 @@ import cn.escheduler.api.dto.DefineUserDto;
import cn.escheduler.api.dto.TaskCountDto; import cn.escheduler.api.dto.TaskCountDto;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.CommandType;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.queue.ITaskQueue; import cn.escheduler.common.queue.ITaskQueue;
import cn.escheduler.common.queue.TaskQueueFactory; import cn.escheduler.common.queue.TaskQueueFactory;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.dao.entity.*;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.DefinitionGroupByUser; import org.apache.commons.lang3.StringUtils;
import cn.escheduler.dao.model.ExecuteStatusCount;
import cn.escheduler.dao.model.Project;
import cn.escheduler.dao.model.User;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -83,7 +80,7 @@ public class DataAnalysisService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if(projectId != 0){ if(projectId != 0){
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId));
if (getResultStatus(result)){ if (getResultStatus(result)){
@ -109,7 +106,7 @@ public class DataAnalysisService {
List<ExecuteStatusCount> taskInstanceStateCounts = List<ExecuteStatusCount> taskInstanceStateCounts =
taskInstanceMapper.countTaskInstanceStateByUser(loginUser.getId(), taskInstanceMapper.countTaskInstanceStateByUser(loginUser.getId(),
loginUser.getUserType(), start, end, projectId); loginUser.getUserType(), start, end, String.valueOf(projectId));
TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts); TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts);
if (taskInstanceStateCounts != null) { if (taskInstanceStateCounts != null) {
@ -139,7 +136,7 @@ public class DataAnalysisService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if(projectId != 0){ if(projectId != 0){
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId));
if (getResultStatus(result)){ if (getResultStatus(result)){
@ -159,7 +156,7 @@ public class DataAnalysisService {
} }
List<ExecuteStatusCount> processInstanceStateCounts = List<ExecuteStatusCount> processInstanceStateCounts =
processInstanceMapper.countInstanceStateByUser(loginUser.getId(), processInstanceMapper.countInstanceStateByUser(loginUser.getId(),
loginUser.getUserType(), start, end, projectId ); loginUser.getUserType(), start, end, String.valueOf(projectId));
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
if (processInstanceStateCounts != null) { if (processInstanceStateCounts != null) {
@ -182,7 +179,8 @@ public class DataAnalysisService {
public Map<String,Object> countDefinitionByUser(User loginUser, int projectId) { public Map<String,Object> countDefinitionByUser(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(loginUser.getId(), loginUser.getUserType(), projectId); List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), loginUser.getUserType(), String.valueOf(projectId));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers); DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto); result.put(Constants.DATA_LIST, dto);
@ -226,7 +224,7 @@ public class DataAnalysisService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if(projectId != 0){ if(projectId != 0){
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId));
if (getResultStatus(result)){ if (getResultStatus(result)){
@ -250,18 +248,32 @@ public class DataAnalysisService {
return result; return result;
} }
List<Integer> projectIds = new ArrayList<>();
if(projectId !=0){
projectIds.add(projectId);
}else if(loginUser.getUserType() == UserType.GENERAL_USER){
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(loginUser.getId());
for(Project project : authedProjectList){
projectIds.add(project.getId());
}
}
Integer[] projectIdArray = projectIds.toArray(new Integer[projectIds.size()]);
// count command state // count command state
List<ExecuteStatusCount> commandStateCounts = List<CommandCount> commandStateCounts =
commandMapper.countCommandState(loginUser.getId(), commandMapper.countCommandState(
loginUser.getUserType(), start, end, projectId); loginUser.getId(),
start,
end,
projectIdArray);
// count error command state // count error command state
List<ExecuteStatusCount> errorCommandStateCounts = List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(loginUser.getId(), errorCommandMapper.countCommandState(
loginUser.getUserType(), start, end, projectId); start, end, projectIdArray);
// //
Map<ExecutionStatus,Map<String,Integer>> dataMap = new HashMap<>(); Map<CommandType,Map<String,Integer>> dataMap = new HashMap<>();
Map<String,Integer> commonCommand = new HashMap<>(); Map<String,Integer> commonCommand = new HashMap<>();
commonCommand.put("commandState",0); commonCommand.put("commandState",0);
@ -269,37 +281,37 @@ public class DataAnalysisService {
// init data map // init data map
dataMap.put(ExecutionStatus.SUBMITTED_SUCCESS,commonCommand); // dataMap.put(ExecutionStatus.SUBMITTED_SUCCESS,commonCommand);
dataMap.put(ExecutionStatus.RUNNING_EXEUTION,commonCommand); // dataMap.put(ExecutionStatus.RUNNING_EXEUTION,commonCommand);
dataMap.put(ExecutionStatus.READY_PAUSE,commonCommand); // dataMap.put(ExecutionStatus.READY_PAUSE,commonCommand);
dataMap.put(ExecutionStatus.PAUSE,commonCommand); // dataMap.put(ExecutionStatus.PAUSE,commonCommand);
dataMap.put(ExecutionStatus.READY_STOP,commonCommand); // dataMap.put(ExecutionStatus.READY_STOP,commonCommand);
dataMap.put(ExecutionStatus.STOP,commonCommand); // dataMap.put(ExecutionStatus.STOP,commonCommand);
dataMap.put(ExecutionStatus.FAILURE,commonCommand); // dataMap.put(ExecutionStatus.FAILURE,commonCommand);
dataMap.put(ExecutionStatus.SUCCESS,commonCommand); // dataMap.put(ExecutionStatus.SUCCESS,commonCommand);
dataMap.put(ExecutionStatus.NEED_FAULT_TOLERANCE,commonCommand); // dataMap.put(ExecutionStatus.NEED_FAULT_TOLERANCE,commonCommand);
dataMap.put(ExecutionStatus.KILL,commonCommand); // dataMap.put(ExecutionStatus.KILL,commonCommand);
dataMap.put(ExecutionStatus.WAITTING_THREAD,commonCommand); // dataMap.put(ExecutionStatus.WAITTING_THREAD,commonCommand);
dataMap.put(ExecutionStatus.WAITTING_DEPEND,commonCommand); // dataMap.put(ExecutionStatus.WAITTING_DEPEND,commonCommand);
// put command state // put command state
for (ExecuteStatusCount executeStatusCount : commandStateCounts){ for (CommandCount executeStatusCount : commandStateCounts){
Map<String,Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getExecutionStatus())); Map<String,Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put("commandState", executeStatusCount.getCount()); commandStateCountsMap.put("commandState", executeStatusCount.getCount());
dataMap.put(executeStatusCount.getExecutionStatus(),commandStateCountsMap); dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap);
} }
// put error command state // put error command state
for (ExecuteStatusCount errorExecutionStatus : errorCommandStateCounts){ for (CommandCount errorExecutionStatus : errorCommandStateCounts){
Map<String,Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getExecutionStatus())); Map<String,Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount()); errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getExecutionStatus(),errorCommandStateCountsMap); dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap);
} }
List<CommandStateCount> list = new ArrayList<>(); List<CommandStateCount> list = new ArrayList<>();
Iterator<Map.Entry<ExecutionStatus, Map<String, Integer>>> iterator = dataMap.entrySet().iterator(); Iterator<Map.Entry<CommandType, Map<String, Integer>>> iterator = dataMap.entrySet().iterator();
while (iterator.hasNext()){ while (iterator.hasNext()){
Map.Entry<ExecutionStatus, Map<String, Integer>> next = iterator.next(); Map.Entry<CommandType, Map<String, Integer>> next = iterator.next();
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"), CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"),
next.getValue().get("commandState"),next.getKey()); next.getValue().get("commandState"),next.getKey());
list.add(commandStateCount); list.add(commandStateCount);
@ -319,7 +331,7 @@ public class DataAnalysisService {
public Map<String, Object> countQueueState(User loginUser, int projectId) { public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if(projectId != 0){ if(projectId != 0){
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId));
if (getResultStatus(result)){ if (getResultStatus(result)){
@ -367,11 +379,15 @@ public class DataAnalysisService {
Integer taskKillCount = 0; Integer taskKillCount = 0;
if (tasksQueueIds.length != 0){ if (tasksQueueIds.length != 0){
taskQueueCount = taskInstanceMapper.countTask(loginUser.getId(),loginUser.getUserType(),projectId, tasksQueueIds); taskQueueCount = taskInstanceMapper.countTask(
loginUser.getId(),loginUser.getUserType(),String.valueOf(projectId),
StringUtils.join(tasksQueueIds, ","));
} }
if (tasksKillIds.length != 0){ if (tasksKillIds.length != 0){
taskKillCount = taskInstanceMapper.countTask(loginUser.getId(),loginUser.getUserType(),projectId, tasksKillIds); taskKillCount = taskInstanceMapper.countTask(
loginUser.getId(),loginUser.getUserType(),String.valueOf(projectId),
StringUtils.join(tasksKillIds, ","));
} }

74
escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java

@ -25,14 +25,15 @@ import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.job.db.*; import cn.escheduler.common.job.db.*;
import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.utils.CommonUtils;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.dao.entity.DataSource;
import cn.escheduler.dao.entity.Resource;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.DataSourceMapper; import cn.escheduler.dao.mapper.DataSourceMapper;
import cn.escheduler.dao.mapper.DatasourceUserMapper; import cn.escheduler.dao.mapper.DataSourceUserMapper;
import cn.escheduler.dao.mapper.ProjectMapper;
import cn.escheduler.dao.model.DataSource;
import cn.escheduler.dao.model.Resource;
import cn.escheduler.dao.model.User;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference; import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -73,7 +74,7 @@ public class DataSourceService extends BaseService{
@Autowired @Autowired
private DatasourceUserMapper datasourceUserMapper; private DataSourceUserMapper datasourceUserMapper;
/** /**
* create data source * create data source
@ -139,7 +140,7 @@ public class DataSourceService extends BaseService{
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
// determine whether the data source exists // determine whether the data source exists
DataSource dataSource = dataSourceMapper.queryById(id); DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) { if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
return result; return result;
@ -164,7 +165,7 @@ public class DataSourceService extends BaseService{
dataSource.setType(type); dataSource.setType(type);
dataSource.setConnectionParams(parameter); dataSource.setConnectionParams(parameter);
dataSource.setUpdateTime(now); dataSource.setUpdateTime(now);
dataSourceMapper.update(dataSource); dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -185,7 +186,7 @@ public class DataSourceService extends BaseService{
public Map<String, Object> queryDataSource(int id) { public Map<String, Object> queryDataSource(int id) {
Map<String, Object> result = new HashMap<String, Object>(5); Map<String, Object> result = new HashMap<String, Object>(5);
DataSource dataSource = dataSourceMapper.queryById(id); DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) { if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
return result; return result;
@ -265,14 +266,20 @@ public class DataSourceService extends BaseService{
*/ */
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
IPage<DataSource> dataSourceList = null;
Page<DataSource> dataSourcePage = new Page(pageNo, pageSize);
Integer count = getTotalCount(loginUser); if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
}else{
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList.getRecords();
handlePasswd(dataSources);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount(count); pageInfo.setTotalCount((int)(dataSourceList.getTotal()));
List<DataSource> datasourceList = getDataSources(loginUser, searchVal, pageSize, pageInfo); pageInfo.setLists(dataSources);
pageInfo.setLists(datasourceList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -289,17 +296,18 @@ public class DataSourceService extends BaseService{
* @return * @return
*/ */
private List<DataSource> getDataSources(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) { private List<DataSource> getDataSources(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) {
List<DataSource> dataSourceList = null; IPage<DataSource> dataSourceList = null;
Page<DataSource> dataSourcePage = new Page(pageInfo.getStart(), pageSize);
if (isAdmin(loginUser)) { if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.queryAllDataSourcePaging(searchVal, pageInfo.getStart(), pageSize); dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
}else{ }else{
dataSourceList = dataSourceMapper.queryDataSourcePaging(loginUser.getId(), searchVal, dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
pageInfo.getStart(), pageSize);
} }
List<DataSource> dataSources = dataSourceList.getRecords();
handlePasswd(dataSourceList); handlePasswd(dataSources);
return dataSources;
return dataSourceList;
} }
@ -319,20 +327,6 @@ public class DataSourceService extends BaseService{
} }
} }
/**
* get datasource total num
*
* @param loginUser
* @return
*/
private Integer getTotalCount(User loginUser) {
if (isAdmin(loginUser)) {
return dataSourceMapper.countAllDatasource();
}
return dataSourceMapper.countUserDatasource(loginUser.getId());
}
/** /**
* query data resource list * query data resource list
* *
@ -470,7 +464,7 @@ public class DataSourceService extends BaseService{
* @return * @return
*/ */
public boolean connectionTest(User loginUser, int id) { public boolean connectionTest(User loginUser, int id) {
DataSource dataSource = dataSourceMapper.queryById(id); DataSource dataSource = dataSourceMapper.selectById(id);
return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
} }
@ -589,7 +583,7 @@ public class DataSourceService extends BaseService{
Result result = new Result(); Result result = new Result();
try { try {
//query datasource by id //query datasource by id
DataSource dataSource = dataSourceMapper.queryById(datasourceId); DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if(dataSource == null){ if(dataSource == null){
logger.error("resource id {} not exist", datasourceId); logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -599,7 +593,7 @@ public class DataSourceService extends BaseService{
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
} }
dataSourceMapper.deleteDataSourceById(datasourceId); dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId); datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
} catch (Exception e) { } catch (Exception e) {
@ -634,7 +628,7 @@ public class DataSourceService extends BaseService{
if (datasourceList != null && datasourceList.size() > 0) { if (datasourceList != null && datasourceList.size() > 0) {
datasourceSet = new HashSet<>(datasourceList); datasourceSet = new HashSet<>(datasourceList);
List<DataSource> authedDataSourceList = dataSourceMapper.authedDatasource(userId); List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId);
Set<DataSource> authedDataSourceSet = null; Set<DataSource> authedDataSourceSet = null;
if (authedDataSourceList != null && authedDataSourceList.size() > 0) { if (authedDataSourceList != null && authedDataSourceList.size() > 0) {
@ -665,7 +659,7 @@ public class DataSourceService extends BaseService{
return result; return result;
} }
List<DataSource> authedDatasourceList = dataSourceMapper.authedDatasource(userId); List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId);
result.put(Constants.DATA_LIST, authedDatasourceList); result.put(Constants.DATA_LIST, authedDatasourceList);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;

18
escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java

@ -24,10 +24,10 @@ import cn.escheduler.common.enums.*;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.entity.*;
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; import cn.escheduler.dao.mapper.ProcessDefinitionMapper;
import cn.escheduler.dao.mapper.ProcessInstanceMapper; import cn.escheduler.dao.mapper.ProcessInstanceMapper;
import cn.escheduler.dao.mapper.ProjectMapper; import cn.escheduler.dao.mapper.ProjectMapper;
import cn.escheduler.dao.model.*;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -104,7 +104,7 @@ public class ExecutorService extends BaseService{
} }
// check process define release state // check process define release state
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineId(processDefinitionId); ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId);
result = checkProcessDefinitionValid(processDefinition, processDefinitionId); result = checkProcessDefinitionValid(processDefinition, processDefinitionId);
if(result.get(Constants.STATUS) != Status.SUCCESS){ if(result.get(Constants.STATUS) != Status.SUCCESS){
return result; return result;
@ -127,7 +127,9 @@ public class ExecutorService extends BaseService{
/** /**
* according to the process definition ID updateProcessInstance and CC recipient * according to the process definition ID updateProcessInstance and CC recipient
*/ */
processDefinitionMapper.updateReceiversAndCcById(receivers,receiversCc,processDefinitionId); processDefinition.setReceivers(receivers);
processDefinition.setReceiversCc(receiversCc);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
} else { } else {
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); putMsg(result, Status.START_PROCESS_INSTANCE_ERROR);
@ -367,10 +369,12 @@ public class ExecutorService extends BaseService{
logger.error("process definition id is null"); logger.error("process definition id is null");
putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id"); putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id");
} }
List<String> ids = new ArrayList<>(); List<Integer> ids = new ArrayList<>();
processDao.recurseFindSubProcessId(processDefineId, ids); processDao.recurseFindSubProcessId(processDefineId, ids);
Integer[] idArray = ids.toArray(new Integer[ids.size()]);
if (ids.size() > 0){ if (ids.size() > 0){
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(ids); List<ProcessDefinition> processDefinitionList;
processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (processDefinitionList != null && processDefinitionList.size() > 0){ if (processDefinitionList != null && processDefinitionList.size() > 0){
for (ProcessDefinition processDefinition : processDefinitionList){ for (ProcessDefinition processDefinition : processDefinitionList){
/** /**
@ -402,13 +406,13 @@ public class ExecutorService extends BaseService{
throw new RuntimeException("You must set values for parameters processDefineId or processInstanceId"); throw new RuntimeException("You must set values for parameters processDefineId or processInstanceId");
} }
if(processDefineId == null && processInstanceId != null) { if(processDefineId == null && processInstanceId != null) {
ProcessInstance processInstance = processInstanceMapper.queryById(processInstanceId); ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if (processInstance == null) { if (processInstance == null) {
throw new RuntimeException("processInstanceId is not exists"); throw new RuntimeException("processInstanceId is not exists");
} }
processDefineId = processInstance.getProcessDefinitionId(); processDefineId = processInstance.getProcessDefinitionId();
} }
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineId(processDefineId); ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefineId);
if (processDefinition == null){ if (processDefinition == null){
throw new RuntimeException(String.format("processDefineId %d is not exists",processDefineId)); throw new RuntimeException(String.format("processDefineId %d is not exists",processDefineId));
} }

2
escheduler-api/src/main/java/cn/escheduler/api/service/LoggerService.java

@ -21,7 +21,7 @@ import cn.escheduler.api.log.LogClient;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.model.TaskInstance; import cn.escheduler.dao.entity.TaskInstance;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

8
escheduler-api/src/main/java/cn/escheduler/api/service/MonitorService.java

@ -22,9 +22,9 @@ import cn.escheduler.api.utils.ZookeeperMonitor;
import cn.escheduler.common.enums.ZKNodeType; import cn.escheduler.common.enums.ZKNodeType;
import cn.escheduler.dao.MonitorDBDao; import cn.escheduler.dao.MonitorDBDao;
import cn.escheduler.common.model.MasterServer; import cn.escheduler.common.model.MasterServer;
import cn.escheduler.dao.model.MonitorRecord; import cn.escheduler.dao.entity.MonitorRecord;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.model.ZookeeperRecord; import cn.escheduler.dao.entity.ZookeeperRecord;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList; import java.util.ArrayList;
@ -107,7 +107,7 @@ public class MonitorService extends BaseService{
return result; return result;
} }
private List<MasterServer> getServerListFromZK(boolean isMaster){ public List<MasterServer> getServerListFromZK(boolean isMaster){
List<MasterServer> servers = new ArrayList<>(); List<MasterServer> servers = new ArrayList<>();
ZookeeperMonitor zookeeperMonitor = null; ZookeeperMonitor zookeeperMonitor = null;
try{ try{

53
escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java

@ -32,11 +32,13 @@ import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.entity.*;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -138,7 +140,7 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setProjectId(project.getId()); processDefine.setProjectId(project.getId());
processDefine.setUserId(loginUser.getId()); processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(processDefinitionJson); processDefine.setProcessDefinitionJson(processDefinitionJson);
processDefine.setDesc(desc); processDefine.setDescription(desc);
processDefine.setLocations(locations); processDefine.setLocations(locations);
processDefine.setConnects(connects); processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout()); processDefine.setTimeout(processData.getTimeout());
@ -209,13 +211,13 @@ public class ProcessDefinitionService extends BaseDAGService {
return checkResult; return checkResult;
} }
Integer count = processDefineMapper.countDefineNumber(project.getId(), userId, searchVal); Page<ProcessDefinition> page = new Page(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefineMapper.queryDefineListPaging(
page, searchVal, userId, project.getId());
PageInfo pageInfo = new PageInfo<ProcessData>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<ProcessData>(pageNo, pageSize);
List<ProcessDefinition> resourceList = processDefineMapper.queryDefineListPaging(project.getId(), pageInfo.setTotalCount((int)processDefinitionIPage.getTotal());
searchVal, userId, pageInfo.getStart(), pageSize); pageInfo.setLists(processDefinitionIPage.getRecords());
pageInfo.setTotalCount(count);
pageInfo.setLists(resourceList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -242,7 +244,7 @@ public class ProcessDefinitionService extends BaseDAGService {
return checkResult; return checkResult;
} }
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processId); ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) { if (processDefinition == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId);
} else { } else {
@ -303,7 +305,7 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setReleaseState(ReleaseState.OFFLINE); processDefine.setReleaseState(ReleaseState.OFFLINE);
processDefine.setProjectId(project.getId()); processDefine.setProjectId(project.getId());
processDefine.setProcessDefinitionJson(processDefinitionJson); processDefine.setProcessDefinitionJson(processDefinitionJson);
processDefine.setDesc(desc); processDefine.setDescription(desc);
processDefine.setLocations(locations); processDefine.setLocations(locations);
processDefine.setConnects(connects); processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout()); processDefine.setTimeout(processData.getTimeout());
@ -318,7 +320,7 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setGlobalParamList(globalParamsList); processDefine.setGlobalParamList(globalParamsList);
processDefine.setUpdateTime(now); processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES); processDefine.setFlag(Flag.YES);
if (processDefineMapper.update(processDefine) > 0) { if (processDefineMapper.updateById(processDefine) > 0) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
} else { } else {
@ -374,7 +376,7 @@ public class ProcessDefinitionService extends BaseDAGService {
return checkResult; return checkResult;
} }
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId); ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId);
if (processDefinition == null) { if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId);
@ -402,14 +404,14 @@ public class ProcessDefinitionService extends BaseDAGService {
}else if(schedules.size() == 1){ }else if(schedules.size() == 1){
Schedule schedule = schedules.get(0); Schedule schedule = schedules.get(0);
if(schedule.getReleaseState() == ReleaseState.OFFLINE){ if(schedule.getReleaseState() == ReleaseState.OFFLINE){
scheduleMapper.delete(schedule.getId()); scheduleMapper.deleteById(schedule.getId());
}else if(schedule.getReleaseState() == ReleaseState.ONLINE){ }else if(schedule.getReleaseState() == ReleaseState.ONLINE){
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId());
return result; return result;
} }
} }
int delete = processDefineMapper.delete(processDefinitionId); int delete = processDefineMapper.deleteById(processDefinitionId);
if (delete > 0) { if (delete > 0) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -489,21 +491,24 @@ public class ProcessDefinitionService extends BaseDAGService {
} }
ReleaseState state = ReleaseState.getEnum(releaseState); ReleaseState state = ReleaseState.getEnum(releaseState);
ProcessDefinition processDefinition = processDefineMapper.selectById(id);
switch (state) { switch (state) {
case ONLINE: { case ONLINE: {
processDefineMapper.updateProcessDefinitionReleaseState(id, state); processDefinition.setReleaseState(state);
processDefineMapper.updateById(processDefinition);
break; break;
} }
case OFFLINE: { case OFFLINE: {
processDefineMapper.updateProcessDefinitionReleaseState(id, state); processDefinition.setReleaseState(state);
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(new int[]{id}); processDefineMapper.updateById(processDefinition);
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(String.valueOf(id));
for(Schedule schedule:scheduleList){ for(Schedule schedule:scheduleList){
logger.info("set schedule offline, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); logger.info("set schedule offline, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id);
// set status // set status
schedule.setReleaseState(ReleaseState.OFFLINE); schedule.setReleaseState(ReleaseState.OFFLINE);
scheduleMapper.update(schedule); scheduleMapper.updateById(schedule);
deleteSchedule(project.getId(), schedule.getId()); deleteSchedule(project.getId(), schedule.getId());
} }
break; break;
@ -532,7 +537,7 @@ public class ProcessDefinitionService extends BaseDAGService {
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS); Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus == Status.SUCCESS) { if (resultStatus == Status.SUCCESS) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId); ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId);
if (processDefinition != null) { if (processDefinition != null) {
JSONObject jsonObject = JSONUtils.parseObject(processDefinition.getProcessDefinitionJson()); JSONObject jsonObject = JSONUtils.parseObject(processDefinition.getProcessDefinitionJson());
JSONArray jsonArray = (JSONArray) jsonObject.get("tasks"); JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
@ -542,7 +547,7 @@ public class ProcessDefinitionService extends BaseDAGService {
String taskType = taskNode.getString("type"); String taskType = taskNode.getString("type");
if(taskType.equals(TaskType.SQL.name()) || taskType.equals(TaskType.PROCEDURE.name())){ if(taskType.equals(TaskType.SQL.name()) || taskType.equals(TaskType.PROCEDURE.name())){
JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params")); JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
DataSource dataSource = dataSourceMapper.queryById((Integer) sqlParameters.get("datasource")); DataSource dataSource = dataSourceMapper.selectById((Integer) sqlParameters.get("datasource"));
if (dataSource != null) { if (dataSource != null) {
sqlParameters.put("datasourceName", dataSource.getName()); sqlParameters.put("datasourceName", dataSource.getName());
} }
@ -557,7 +562,7 @@ public class ProcessDefinitionService extends BaseDAGService {
row.put("projectName", processDefinition.getProjectName()); row.put("projectName", processDefinition.getProjectName());
row.put("processDefinitionName", processDefinition.getName()); row.put("processDefinitionName", processDefinition.getName());
row.put("processDefinitionJson", processDefinition.getProcessDefinitionJson()); row.put("processDefinitionJson", processDefinition.getProcessDefinitionJson());
row.put("processDefinitionDesc", processDefinition.getDesc()); row.put("processDefinitionDesc", processDefinition.getDescription());
row.put("processDefinitionLocations", processDefinition.getLocations()); row.put("processDefinitionLocations", processDefinition.getLocations());
row.put("processDefinitionConnects", processDefinition.getConnects()); row.put("processDefinitionConnects", processDefinition.getConnects());
@ -575,7 +580,7 @@ public class ProcessDefinitionService extends BaseDAGService {
if(schedule.getId() == -1){ if(schedule.getId() == -1){
row.put("scheduleWorkerGroupId", -1); row.put("scheduleWorkerGroupId", -1);
}else{ }else{
WorkerGroup workerGroup = workerGroupMapper.queryById(schedule.getId()); WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId());
if(workerGroup != null){ if(workerGroup != null){
row.put("scheduleWorkerGroupName", workerGroup.getName()); row.put("scheduleWorkerGroupName", workerGroup.getName());
} }
@ -810,7 +815,7 @@ public class ProcessDefinitionService extends BaseDAGService {
public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) throws Exception { public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) throws Exception {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(defineId); ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) { if (processDefinition == null) {
logger.info("process define not exists"); logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition.getId()); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition.getId());
@ -841,7 +846,7 @@ public class ProcessDefinitionService extends BaseDAGService {
Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>(); Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>();
String[] idList = defineIdList.split(","); String[] idList = defineIdList.split(",");
List<String> definitionIdList = Arrays.asList(idList); List<String> definitionIdList = Arrays.asList(idList);
List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList(definitionIdList); List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList( definitionIdList.toArray(new Integer[definitionIdList.size()]));
if (processDefinitionList == null || processDefinitionList.size() ==0) { if (processDefinitionList == null || processDefinitionList.size() ==0) {
logger.info("process definition not exists"); logger.info("process definition not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList);
@ -890,7 +895,7 @@ public class ProcessDefinitionService extends BaseDAGService {
public Map<String, Object> viewTree(Integer processId, Integer limit) throws Exception { public Map<String, Object> viewTree(Integer processId, Integer limit) throws Exception {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processId); ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) { if (processDefinition == null) {
logger.info("process define not exists"); logger.info("process define not exists");
throw new RuntimeException("process define not exists"); throw new RuntimeException("process define not exists");

24
escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java

@ -35,9 +35,11 @@ import cn.escheduler.common.queue.TaskQueueFactory;
import cn.escheduler.common.utils.*; import cn.escheduler.common.utils.*;
import cn.escheduler.common.utils.placeholder.BusinessTimeUtils; import cn.escheduler.common.utils.placeholder.BusinessTimeUtils;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.entity.*;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -116,7 +118,7 @@ public class ProcessInstanceService extends BaseDAGService {
if(processInstance.getWorkerGroupId() == -1){ if(processInstance.getWorkerGroupId() == -1){
workerGroupName = DEFAULT; workerGroupName = DEFAULT;
}else{ }else{
WorkerGroup workerGroup = workerGroupMapper.queryById(processInstance.getWorkerGroupId()); WorkerGroup workerGroup = workerGroupMapper.selectById(processInstance.getWorkerGroupId());
if(workerGroup != null){ if(workerGroup != null){
workerGroupName = DEFAULT; workerGroupName = DEFAULT;
}else{ }else{
@ -185,12 +187,11 @@ public class ProcessInstanceService extends BaseDAGService {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result; return result;
} }
Integer count = processInstanceMapper.countProcessInstance(project.getId(), processDefineId, statesStr, Page<ProcessInstance> page = new Page(pageNo, pageSize);
host, start, end, searchVal);
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize); IPage<ProcessInstance> processInstanceList =
List<ProcessInstance> processInstanceList = processInstanceMapper.queryProcessInstanceListPaging( processInstanceMapper.queryProcessInstanceListPaging(page,
project.getId(), processDefineId, searchVal, statesStr, host, start, end, pageInfo.getStart(), pageSize); project.getId(), processDefineId, searchVal, statesStr, host, start, end);
Set<String> exclusionSet = new HashSet<String>(){{ Set<String> exclusionSet = new HashSet<String>(){{
add(Constants.CLASS); add(Constants.CLASS);
@ -199,8 +200,9 @@ public class ProcessInstanceService extends BaseDAGService {
add("processInstanceJson"); add("processInstanceJson");
}}; }};
pageInfo.setTotalCount(count); PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
pageInfo.setLists(CollectionUtils.getListByExclusion(processInstanceList, exclusionSet)); pageInfo.setTotalCount((int)processInstanceList.getTotal());
pageInfo.setLists(CollectionUtils.getListByExclusion(processInstanceList.getRecords(), exclusionSet));
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
@ -410,7 +412,7 @@ public class ProcessInstanceService extends BaseDAGService {
processDefinition.setLocations(locations); processDefinition.setLocations(locations);
processDefinition.setConnects(connects); processDefinition.setConnects(connects);
processDefinition.setTimeout(timeout); processDefinition.setTimeout(timeout);
updateDefine = processDefineMapper.update(processDefinition); updateDefine = processDefineMapper.updateById(processDefinition);
} }
if (update > 0 && updateDefine > 0) { if (update > 0 && updateDefine > 0) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -507,7 +509,7 @@ public class ProcessInstanceService extends BaseDAGService {
.append(UNDERLINE); .append(UNDERLINE);
int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance);
WorkerGroup workerGroup = workerGroupMapper.queryById(taskWorkerGroupId); WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId);
if(workerGroup == null){ if(workerGroup == null){
nodeValueSb.append(DEFAULT_WORKER_ID); nodeValueSb.append(DEFAULT_WORKER_ID);

56
escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java

@ -20,14 +20,15 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.entity.ProcessDefinition;
import cn.escheduler.dao.entity.Project;
import cn.escheduler.dao.entity.ProjectUser;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; import cn.escheduler.dao.mapper.ProcessDefinitionMapper;
import cn.escheduler.dao.mapper.ProjectMapper; import cn.escheduler.dao.mapper.ProjectMapper;
import cn.escheduler.dao.mapper.ProjectUserMapper; import cn.escheduler.dao.mapper.ProjectUserMapper;
import cn.escheduler.dao.mapper.UserMapper; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.ProcessDefinition; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import cn.escheduler.dao.model.Project;
import cn.escheduler.dao.model.ProjectUser;
import cn.escheduler.dao.model.User;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -39,7 +40,8 @@ import static cn.escheduler.api.utils.CheckUtils.checkDesc;
/** /**
* project service * project service
*/ *HttpTask./
**/
@Service @Service
public class ProjectService extends BaseService{ public class ProjectService extends BaseService{
@ -102,7 +104,7 @@ public class ProjectService extends BaseService{
public Map<String, Object> queryById(Integer projectId) { public Map<String, Object> queryById(Integer projectId) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
if (project != null) { if (project != null) {
result.put(Constants.DATA_LIST, project); result.put(Constants.DATA_LIST, project);
@ -151,24 +153,22 @@ public class ProjectService extends BaseService{
*/ */
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
int count = 0;
PageInfo pageInfo = new PageInfo<Project>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<Project>(pageNo, pageSize);
List<Project> projectList = null;
if (loginUser.getUserType() == UserType.ADMIN_USER) { Page<Project> page = new Page(pageNo, pageSize);
count = projectMapper.countAllProjects(searchVal);
projectList = projectMapper.queryAllProjectListPaging(pageInfo.getStart(), pageSize, searchVal); int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if(userId != 0){
for (Project project : projectList) { for (Project project : projectList) {
project.setPerm(cn.escheduler.common.Constants.DEFAULT_ADMIN_PERMISSION); project.setPerm(cn.escheduler.common.Constants.DEFAULT_ADMIN_PERMISSION);
} }
} else {
count = projectMapper.countProjects(loginUser.getId(), searchVal);
projectList = projectMapper.queryProjectListPaging(loginUser.getId(),
pageInfo.getStart(), pageSize, searchVal);
} }
pageInfo.setTotalCount(count); pageInfo.setTotalCount((int)projectIPage.getTotal());
pageInfo.setLists(projectList); pageInfo.setLists(projectList);
result.put(Constants.COUNT, count); result.put(Constants.COUNT, (int)projectIPage.getTotal());
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -184,7 +184,7 @@ public class ProjectService extends BaseService{
*/ */
public Map<String, Object> deleteProject(User loginUser, Integer projectId) { public Map<String, Object> deleteProject(User loginUser, Integer projectId) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project); Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) { if (checkResult != null) {
return checkResult; return checkResult;
@ -196,7 +196,7 @@ public class ProjectService extends BaseService{
return result; return result;
} }
int delete = projectMapper.delete(projectId); int delete = projectMapper.deleteById(projectId);
if (delete > 0) { if (delete > 0) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
} else { } else {
@ -233,7 +233,7 @@ public class ProjectService extends BaseService{
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) { public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryById(projectId); Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project); Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) { if (checkResult != null) {
return checkResult; return checkResult;
@ -247,7 +247,7 @@ public class ProjectService extends BaseService{
project.setDesc(desc); project.setDesc(desc);
project.setUpdateTime(new Date()); project.setUpdateTime(new Date());
int update = projectMapper.update(project); int update = projectMapper.updateById(project);
if (update > 0) { if (update > 0) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
} else { } else {
@ -278,7 +278,7 @@ public class ProjectService extends BaseService{
if (projectList != null && projectList.size() > 0) { if (projectList != null && projectList.size() > 0) {
projectSet = new HashSet<>(projectList); projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.authedProject(userId); List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList); resultList = getUnauthorizedProjects(projectSet, authedProjectList);
} }
@ -321,7 +321,7 @@ public class ProjectService extends BaseService{
return result; return result;
} }
List<Project> projects = projectMapper.authedProject(userId); List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects); result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS); putMsg(result,Status.SUCCESS);
@ -357,7 +357,7 @@ public class ProjectService extends BaseService{
return cn.escheduler.common.Constants.ALL_PERMISSIONS; return cn.escheduler.common.Constants.ALL_PERMISSIONS;
} }
ProjectUser projectUser = projectUserMapper.query(project.getId(), user.getId()); ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) { if (projectUser == null) {
return 0; return 0;
@ -373,8 +373,8 @@ public class ProjectService extends BaseService{
*/ */
public Map<String, Object> queryAllProjectList() { public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.queryAllProjectList(); List<Project> projects = projectMapper.selectList(null);
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryAll(); List<ProcessDefinition> processDefinitions = processDefinitionMapper.selectList(null);
if(projects != null){ if(projects != null){
Set set = new HashSet<>(); Set set = new HashSet<>();
for (ProcessDefinition processDefinition : processDefinitions){ for (ProcessDefinition processDefinition : processDefinitions){

21
escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java

@ -20,9 +20,11 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.dao.entity.Queue;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.QueueMapper; import cn.escheduler.dao.mapper.QueueMapper;
import cn.escheduler.dao.model.Queue; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.User; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -57,7 +59,7 @@ public class QueueService extends BaseService {
return result; return result;
} }
List<Queue> queueList = queueMapper.queryAllQueue(); List<Queue> queueList = queueMapper.selectList(null);
result.put(Constants.DATA_LIST, queueList); result.put(Constants.DATA_LIST, queueList);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -79,14 +81,15 @@ public class QueueService extends BaseService {
return result; return result;
} }
Integer count = queueMapper.countQueuePaging(searchVal); Page<Queue> page = new Page(pageNo, pageSize);
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize);
List<Queue> queueList = queueMapper.queryQueuePaging(searchVal, pageInfo.getStart(), pageSize); IPage<Queue> queueList = queueMapper.queryQueuePaging(page, searchVal);
Integer count = (int)queueList.getTotal();
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount(count); pageInfo.setTotalCount(count);
pageInfo.setLists(queueList); pageInfo.setLists(queueList.getRecords());
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -156,7 +159,7 @@ public class QueueService extends BaseService {
return result; return result;
} }
Queue queueObj = queueMapper.queryById(id); Queue queueObj = queueMapper.selectById(id);
if (queueObj == null) { if (queueObj == null) {
putMsg(result, Status.QUEUE_NOT_EXIST, id); putMsg(result, Status.QUEUE_NOT_EXIST, id);
return result; return result;
@ -190,7 +193,7 @@ public class QueueService extends BaseService {
queueObj.setQueueName(queueName); queueObj.setQueueName(queueName);
queueObj.setUpdateTime(now); queueObj.setUpdateTime(now);
queueMapper.update(queueObj); queueMapper.updateById(queueObj);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;

84
escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java

@ -26,10 +26,12 @@ import cn.escheduler.common.utils.FileUtils;
import cn.escheduler.common.utils.HadoopUtils; import cn.escheduler.common.utils.HadoopUtils;
import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.Resource; import cn.escheduler.dao.entity.Resource;
import cn.escheduler.dao.model.Tenant; import cn.escheduler.dao.entity.Tenant;
import cn.escheduler.dao.model.UdfFunc; import cn.escheduler.dao.entity.UdfFunc;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.collections.BeanMap; import org.apache.commons.collections.BeanMap;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -66,7 +68,7 @@ public class ResourcesService extends BaseService {
private UserMapper userMapper; private UserMapper userMapper;
@Autowired @Autowired
private ResourcesUserMapper resourcesUserMapper; private ResourceUserMapper resourceUserMapper;
/** /**
* create resource * create resource
@ -129,8 +131,7 @@ public class ResourcesService extends BaseService {
} }
// check resoure name exists // check resoure name exists
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); if (checkResourceExists(name, 0, type.ordinal())) {
if (resource != null) {
logger.error("resource {} has exist, can't recreate", name); logger.error("resource {} has exist, can't recreate", name);
putMsg(result, Status.RESOURCE_EXIST); putMsg(result, Status.RESOURCE_EXIST);
return result; return result;
@ -138,7 +139,7 @@ public class ResourcesService extends BaseService {
Date now = new Date(); Date now = new Date();
resource = new Resource(name,file.getOriginalFilename(),desc,loginUser.getId(),type,file.getSize(),now,now); Resource resource = new Resource(name,file.getOriginalFilename(),desc,loginUser.getId(),type,file.getSize(),now,now);
try { try {
resourcesMapper.insert(resource); resourcesMapper.insert(resource);
@ -167,6 +168,15 @@ public class ResourcesService extends BaseService {
return result; return result;
} }
private boolean checkResourceExists(String alias, int userId, int type ){
List<Resource> resources = resourcesMapper.queryResourceList(alias, userId, type);
if (resources != null && resources.size() > 0) {
return true;
}
return false;
}
/** /**
@ -193,7 +203,7 @@ public class ResourcesService extends BaseService {
return result; return result;
} }
Resource resource = resourcesMapper.queryResourceById(resourceId); Resource resource = resourcesMapper.selectById(resourceId);
String originResourceName = resource.getAlias(); String originResourceName = resource.getAlias();
if (resource == null) { if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -212,8 +222,7 @@ public class ResourcesService extends BaseService {
//check resource aleady exists //check resource aleady exists
if (!resource.getAlias().equals(name)) { if (!resource.getAlias().equals(name)) {
Resource needUpdateResource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); if (checkResourceExists(name, 0, type.ordinal())) {
if (needUpdateResource != null) {
logger.error("resource {} already exists, can't recreate", name); logger.error("resource {} already exists, can't recreate", name);
putMsg(result, Status.RESOURCE_EXIST); putMsg(result, Status.RESOURCE_EXIST);
return result; return result;
@ -227,7 +236,7 @@ public class ResourcesService extends BaseService {
resource.setUpdateTime(now); resource.setUpdateTime(now);
try { try {
resourcesMapper.update(resource); resourcesMapper.updateById(resource);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
Map dataMap = new BeanMap(resource); Map dataMap = new BeanMap(resource);
@ -293,21 +302,16 @@ public class ResourcesService extends BaseService {
public Map<String, Object> queryResourceListPaging(User loginUser, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryResourceListPaging(User loginUser, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) {
HashMap<String, Object> result = new HashMap<>(5); HashMap<String, Object> result = new HashMap<>(5);
Integer count = 0; Page<Resource> page = new Page(pageNo, pageSize);
List<Resource> resourceList = new ArrayList<>(); int userId = loginUser.getId();
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
if (isAdmin(loginUser)) { if (isAdmin(loginUser)) {
count = resourcesMapper.countAllResourceNumberByType(type.ordinal()); userId= 0;
resourceList = resourcesMapper.queryAllResourceListPaging(type.ordinal(), searchVal,
pageInfo.getStart(), pageSize);
} else {
count = resourcesMapper.countResourceNumberByType(loginUser.getId(), type.ordinal());
resourceList = resourcesMapper.queryResourceAuthoredPaging(loginUser.getId(), type.ordinal(), searchVal,
pageInfo.getStart(), pageSize);
} }
IPage<Resource> resourceIPage = resourcesMapper.queryResourcePaging(page,
pageInfo.setTotalCount(count); userId, type.ordinal(), searchVal);
pageInfo.setLists(resourceList); PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount((int)resourceIPage.getTotal());
pageInfo.setLists(resourceIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result,Status.SUCCESS); putMsg(result,Status.SUCCESS);
return result; return result;
@ -370,11 +374,11 @@ public class ResourcesService extends BaseService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
List<Resource> resourceList; List<Resource> resourceList;
int userId = loginUser.getId();
if(isAdmin(loginUser)){ if(isAdmin(loginUser)){
resourceList = resourcesMapper.listAllResourceByType(type.ordinal()); userId = 0;
}else{
resourceList = resourcesMapper.queryResourceListAuthored(loginUser.getId(), type.ordinal());
} }
resourceList = resourcesMapper.queryResourceList(null, userId, type.ordinal());
result.put(Constants.DATA_LIST, resourceList); result.put(Constants.DATA_LIST, resourceList);
putMsg(result,Status.SUCCESS); putMsg(result,Status.SUCCESS);
@ -399,7 +403,7 @@ public class ResourcesService extends BaseService {
} }
//get resource and hdfs path //get resource and hdfs path
Resource resource = resourcesMapper.queryResourceById(resourceId); Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) { if (resource == null) {
logger.error("resource file not exist, resource id {}", resourceId); logger.error("resource file not exist, resource id {}", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -417,8 +421,8 @@ public class ResourcesService extends BaseService {
hdfsFilename = getHdfsFileName(resource, tenantCode, hdfsFilename); hdfsFilename = getHdfsFileName(resource, tenantCode, hdfsFilename);
//delete data in database //delete data in database
resourcesMapper.delete(resourceId); resourcesMapper.deleteById(resourceId);
resourcesUserMapper.deleteByResourceId(resourceId); resourceUserMapper.deleteResourceUser(0, resourceId);
//delete file on hdfs //delete file on hdfs
HadoopUtils.getInstance().delete(hdfsFilename, false); HadoopUtils.getInstance().delete(hdfsFilename, false);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -436,8 +440,7 @@ public class ResourcesService extends BaseService {
public Result verifyResourceName(String name, ResourceType type,User loginUser) { public Result verifyResourceName(String name, ResourceType type,User loginUser) {
Result result = new Result(); Result result = new Result();
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); if (checkResourceExists(name, 0, type.ordinal())) {
if (resource != null) {
logger.error("resource type:{} name:{} has exist, can't create again.", type, name); logger.error("resource type:{} name:{} has exist, can't create again.", type, name);
putMsg(result, Status.RESOURCE_EXIST); putMsg(result, Status.RESOURCE_EXIST);
} else { } else {
@ -474,8 +477,7 @@ public class ResourcesService extends BaseService {
*/ */
public Result verifyResourceName(String name, ResourceType type) { public Result verifyResourceName(String name, ResourceType type) {
Result result = new Result(); Result result = new Result();
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); if (checkResourceExists(name, 0, type.ordinal())) {
if (resource != null) {
logger.error("resource type:{} name:{} has exist, can't create again.", type, name); logger.error("resource type:{} name:{} has exist, can't create again.", type, name);
putMsg(result, Status.RESOURCE_EXIST); putMsg(result, Status.RESOURCE_EXIST);
} else { } else {
@ -502,7 +504,7 @@ public class ResourcesService extends BaseService {
} }
// get resource by id // get resource by id
Resource resource = resourcesMapper.queryResourceById(resourceId); Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) { if (resource == null) {
logger.error("resouce file not exist, resource id {}", resourceId); logger.error("resouce file not exist, resource id {}", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -629,7 +631,7 @@ public class ResourcesService extends BaseService {
return result; return result;
} }
Resource resource = resourcesMapper.queryResourceById(resourceId); Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) { if (resource == null) {
logger.error("read file not exist, resource id {}", resourceId); logger.error("read file not exist, resource id {}", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -649,7 +651,7 @@ public class ResourcesService extends BaseService {
resource.setSize(content.getBytes().length); resource.setSize(content.getBytes().length);
resource.setUpdateTime(new Date()); resource.setUpdateTime(new Date());
resourcesMapper.update(resource); resourcesMapper.updateById(resource);
User user = userMapper.queryDetailsById(resource.getUserId()); User user = userMapper.queryDetailsById(resource.getUserId());
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode();
@ -720,7 +722,7 @@ public class ResourcesService extends BaseService {
throw new RuntimeException("hdfs not startup"); throw new RuntimeException("hdfs not startup");
} }
Resource resource = resourcesMapper.queryResourceById(resourceId); Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) { if (resource == null) {
logger.error("download file not exist, resource id {}", resourceId); logger.error("download file not exist, resource id {}", resourceId);
return null; return null;
@ -793,7 +795,7 @@ public class ResourcesService extends BaseService {
if (udfFuncList != null && udfFuncList.size() > 0) { if (udfFuncList != null && udfFuncList.size() > 0) {
udfFuncSet = new HashSet<>(udfFuncList); udfFuncSet = new HashSet<>(udfFuncList);
List<UdfFunc> authedUDFFuncList = udfFunctionMapper.authedUdfFunc(userId); List<UdfFunc> authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId);
getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); getAuthorizedResourceList(udfFuncSet, authedUDFFuncList);
resultList = new ArrayList<>(udfFuncSet); resultList = new ArrayList<>(udfFuncSet);
@ -818,7 +820,7 @@ public class ResourcesService extends BaseService {
if (checkAdmin(loginUser, result)) { if (checkAdmin(loginUser, result)) {
return result; return result;
} }
List<UdfFunc> udfFuncs = udfFunctionMapper.authedUdfFunc(userId); List<UdfFunc> udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId);
result.put(Constants.DATA_LIST, udfFuncs); result.put(Constants.DATA_LIST, udfFuncs);
putMsg(result,Status.SUCCESS); putMsg(result,Status.SUCCESS);
return result; return result;

53
escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java

@ -29,14 +29,18 @@ import cn.escheduler.common.model.MasterServer;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.mapper.MasterServerMapper; import cn.escheduler.dao.entity.ProcessDefinition;
import cn.escheduler.dao.entity.Project;
import cn.escheduler.dao.entity.Schedule;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; import cn.escheduler.dao.mapper.ProcessDefinitionMapper;
import cn.escheduler.dao.mapper.ProjectMapper; import cn.escheduler.dao.mapper.ProjectMapper;
import cn.escheduler.dao.mapper.ScheduleMapper; import cn.escheduler.dao.mapper.ScheduleMapper;
import cn.escheduler.dao.model.*;
import cn.escheduler.dao.utils.cron.CronUtils; import cn.escheduler.dao.utils.cron.CronUtils;
import cn.escheduler.server.quartz.ProcessScheduleJob; import cn.escheduler.server.quartz.ProcessScheduleJob;
import cn.escheduler.server.quartz.QuartzExecutors; import cn.escheduler.server.quartz.QuartzExecutors;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.quartz.CronExpression; import org.quartz.CronExpression;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -64,10 +68,10 @@ public class SchedulerService extends BaseService {
private ExecutorService executorService; private ExecutorService executorService;
@Autowired @Autowired
private ProcessDao processDao; private MonitorService monitorService;
@Autowired @Autowired
private MasterServerMapper masterServerMapper; private ProcessDao processDao;
@Autowired @Autowired
private ScheduleMapper scheduleMapper; private ScheduleMapper scheduleMapper;
@ -149,7 +153,9 @@ public class SchedulerService extends BaseService {
/** /**
* updateProcessInstance receivers and cc by process definition id * updateProcessInstance receivers and cc by process definition id
*/ */
processDefinitionMapper.updateReceiversAndCcById(receivers, receiversCc, processDefineId); processDefinition.setReceivers(receivers);
processDefinition.setReceiversCc(receiversCc);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
@ -186,7 +192,7 @@ public class SchedulerService extends BaseService {
} }
// check schedule exists // check schedule exists
Schedule schedule = scheduleMapper.queryById(id); Schedule schedule = scheduleMapper.selectById(id);
if (schedule == null) { if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id);
@ -241,12 +247,14 @@ public class SchedulerService extends BaseService {
schedule.setWorkerGroupId(workerGroupId); schedule.setWorkerGroupId(workerGroupId);
schedule.setUpdateTime(now); schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority); schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.update(schedule); scheduleMapper.updateById(schedule);
/** /**
* updateProcessInstance recipients and cc by process definition ID * updateProcessInstance recipients and cc by process definition ID
*/ */
processDefinitionMapper.updateReceiversAndCcById(receivers, receiversCc, schedule.getProcessDefinitionId()); processDefinition.setReceivers(receivers);
processDefinition.setReceiversCc(receiversCc);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
@ -274,7 +282,7 @@ public class SchedulerService extends BaseService {
} }
// check schedule exists // check schedule exists
Schedule scheduleObj = scheduleMapper.queryById(id); Schedule scheduleObj = scheduleMapper.selectById(id);
if (scheduleObj == null) { if (scheduleObj == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id);
@ -302,10 +310,12 @@ public class SchedulerService extends BaseService {
return result; return result;
} }
// check sub process definition release state // check sub process definition release state
List<String> subProcessDefineIds = new ArrayList<>(); List<Integer> subProcessDefineIds = new ArrayList<>();
processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds);
Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]);
if (subProcessDefineIds.size() > 0){ if (subProcessDefineIds.size() > 0){
List<ProcessDefinition> subProcessDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(subProcessDefineIds); List<ProcessDefinition> subProcessDefinitionList =
processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0){ if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0){
for (ProcessDefinition subProcessDefinition : subProcessDefinitionList){ for (ProcessDefinition subProcessDefinition : subProcessDefinitionList){
/** /**
@ -323,7 +333,8 @@ public class SchedulerService extends BaseService {
} }
// check master server exists // check master server exists
List<MasterServer> masterServers = masterServerMapper.queryAllMaster(); List<MasterServer> masterServers = monitorService.getServerListFromZK(true);
if (masterServers.size() == 0) { if (masterServers.size() == 0) {
putMsg(result, Status.MASTER_NOT_EXISTS); putMsg(result, Status.MASTER_NOT_EXISTS);
@ -332,7 +343,7 @@ public class SchedulerService extends BaseService {
// set status // set status
scheduleObj.setReleaseState(scheduleStatus); scheduleObj.setReleaseState(scheduleStatus);
scheduleMapper.update(scheduleObj); scheduleMapper.updateById(scheduleObj);
try { try {
switch (scheduleStatus) { switch (scheduleStatus) {
@ -387,15 +398,15 @@ public class SchedulerService extends BaseService {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
return result; return result;
} }
Page<Schedule> page = new Page(pageNo, pageSize);
IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(
page, processDefineId, searchVal
);
Integer count = scheduleMapper.countByProcessDefineId(processDefineId, searchVal);
PageInfo pageInfo = new PageInfo<Schedule>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<Schedule>(pageNo, pageSize);
pageInfo.setTotalCount((int)scheduleIPage.getTotal());
List<Schedule> scheduleList = scheduleMapper.queryByProcessDefineIdPaging(processDefinition.getId(), searchVal, pageInfo.getStart(), pageSize); pageInfo.setLists(scheduleIPage.getRecords());
pageInfo.setTotalCount(count);
pageInfo.setLists(scheduleList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -523,7 +534,7 @@ public class SchedulerService extends BaseService {
return checkResult; return checkResult;
} }
Schedule schedule = scheduleMapper.queryById(scheduleId); Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) { if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId);
@ -543,7 +554,7 @@ public class SchedulerService extends BaseService {
} }
int delete = scheduleMapper.delete(scheduleId); int delete = scheduleMapper.deleteById(scheduleId);
if (delete > 0) { if (delete > 0) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);

77
escheduler-api/src/main/java/cn/escheduler/api/service/ServerService.java

@ -1,77 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.dao.mapper.MasterServerMapper;
import cn.escheduler.dao.mapper.WorkerServerMapper;
import cn.escheduler.common.model.MasterServer;
import cn.escheduler.dao.model.User;
import cn.escheduler.dao.model.WorkerServer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* server service
*/
@Service
public class ServerService extends BaseService{
@Autowired
MasterServerMapper masterServerMapper;
@Autowired
WorkerServerMapper workerServerMapper;
/**
* query master list
*
* @param loginUser
* @return
*/
public Map<String,Object> queryMaster(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
List<MasterServer> masterList = masterServerMapper.queryAllMaster();
result.put(Constants.DATA_LIST, masterList);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* query worker list
*
* @param loginUser
* @return
*/
public Map<String,Object> queryWorker(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<WorkerServer> workerList = workerServerMapper.queryAllWorker();
result.put(Constants.DATA_LIST, workerList);
putMsg(result,Status.SUCCESS);
return result;
}
}

9
escheduler-api/src/main/java/cn/escheduler/api/service/SessionService.java

@ -20,9 +20,9 @@ package cn.escheduler.api.service;
import cn.escheduler.api.controller.BaseController; import cn.escheduler.api.controller.BaseController;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.dao.entity.Session;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.SessionMapper; import cn.escheduler.dao.mapper.SessionMapper;
import cn.escheduler.dao.model.Session;
import cn.escheduler.dao.model.User;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -70,7 +70,7 @@ public class SessionService extends BaseService{
String ip = BaseController.getClientIpAddress(request); String ip = BaseController.getClientIpAddress(request);
logger.debug("get session: {}, ip: {}", sessionId, ip); logger.debug("get session: {}, ip: {}", sessionId, ip);
return sessionMapper.queryBySessionId(sessionId); return sessionMapper.selectById(sessionId);
} }
/** /**
@ -103,7 +103,8 @@ public class SessionService extends BaseService{
/** /**
* updateProcessInstance the latest login time * updateProcessInstance the latest login time
*/ */
sessionMapper.update(session.getId(), now); session.setLastLoginTime(now);
sessionMapper.updateById(session);
return session.getId(); return session.getId();

22
escheduler-api/src/main/java/cn/escheduler/api/service/TaskInstanceService.java

@ -24,12 +24,14 @@ import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.entity.ProcessInstance;
import cn.escheduler.dao.entity.Project;
import cn.escheduler.dao.entity.TaskInstance;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.ProjectMapper; import cn.escheduler.dao.mapper.ProjectMapper;
import cn.escheduler.dao.mapper.TaskInstanceMapper; import cn.escheduler.dao.mapper.TaskInstanceMapper;
import cn.escheduler.dao.model.ProcessInstance; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.Project; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import cn.escheduler.dao.model.TaskInstance;
import cn.escheduler.dao.model.User;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -112,18 +114,18 @@ public class TaskInstanceService extends BaseService {
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
return result; return result;
} }
Integer count = taskInstanceMapper.countTaskInstance(project.getId(), processInstanceId, taskName, statesStr,
host,start, end, searchVal);
Page<TaskInstance> page = new Page(pageNo, pageSize);
IPage<TaskInstance> taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging(
page, project.getId(), processInstanceId, searchVal, taskName, statesStr, host, start, end
);
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
Set<String> exclusionSet = new HashSet<String>(){{ Set<String> exclusionSet = new HashSet<String>(){{
add(Constants.CLASS); add(Constants.CLASS);
add("taskJson"); add("taskJson");
}}; }};
List<TaskInstance> taskInstanceList = taskInstanceMapper.queryTaskInstanceListPaging( pageInfo.setTotalCount((int)taskInstanceIPage.getTotal());
project.getId(), processInstanceId, searchVal, taskName, statesStr, host, start, end, pageInfo.getStart(), pageSize); pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet));
pageInfo.setTotalCount(count);
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceList,exclusionSet));
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);

2
escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java

@ -20,7 +20,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.dao.TaskRecordDao; import cn.escheduler.dao.TaskRecordDao;
import cn.escheduler.dao.model.TaskRecord; import cn.escheduler.dao.entity.TaskRecord;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;

25
escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java

@ -22,9 +22,11 @@ import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.utils.HadoopUtils; import cn.escheduler.common.utils.HadoopUtils;
import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.dao.entity.Tenant;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.TenantMapper; import cn.escheduler.dao.mapper.TenantMapper;
import cn.escheduler.dao.model.Tenant; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.User; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -88,7 +90,7 @@ public class TenantService extends BaseService{
tenant.setTenantCode(tenantCode); tenant.setTenantCode(tenantCode);
tenant.setTenantName(tenantName); tenant.setTenantName(tenantName);
tenant.setQueueId(queueId); tenant.setQueueId(queueId);
tenant.setDesc(desc); tenant.setDescription(desc);
tenant.setCreateTime(now); tenant.setCreateTime(now);
tenant.setUpdateTime(now); tenant.setUpdateTime(now);
@ -123,14 +125,11 @@ public class TenantService extends BaseService{
return result; return result;
} }
Integer count = tenantMapper.countTenantPaging(searchVal); Page<Tenant> page = new Page(pageNo, pageSize);
IPage<Tenant> tenantIPage = tenantMapper.queryTenantPaging(page, searchVal);
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize); PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int)tenantIPage.getTotal());
List<Tenant> scheduleList = tenantMapper.queryTenantPaging(searchVal, pageInfo.getStart(), pageSize); pageInfo.setLists(tenantIPage.getRecords());
pageInfo.setTotalCount(count);
pageInfo.setLists(scheduleList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -198,9 +197,9 @@ public class TenantService extends BaseService{
if (queueId != 0){ if (queueId != 0){
tenant.setQueueId(queueId); tenant.setQueueId(queueId);
} }
tenant.setDesc(desc); tenant.setDescription(desc);
tenant.setUpdateTime(now); tenant.setUpdateTime(now);
tenantMapper.update(tenant); tenantMapper.updateById(tenant);
result.put(Constants.STATUS, Status.SUCCESS); result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg()); result.put(Constants.MSG, Status.SUCCESS.getMsg());
@ -265,7 +264,7 @@ public class TenantService extends BaseService{
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
List<Tenant> resourceList = tenantMapper.queryAllTenant(); List<Tenant> resourceList = tenantMapper.selectList(null);
result.put(Constants.DATA_LIST, resourceList); result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);

73
escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java

@ -22,12 +22,14 @@ import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.UdfType; import cn.escheduler.common.enums.UdfType;
import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.dao.entity.Resource;
import cn.escheduler.dao.entity.UdfFunc;
import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.mapper.ResourceMapper; import cn.escheduler.dao.mapper.ResourceMapper;
import cn.escheduler.dao.mapper.UDFUserMapper; import cn.escheduler.dao.mapper.UDFUserMapper;
import cn.escheduler.dao.mapper.UdfFuncMapper; import cn.escheduler.dao.mapper.UdfFuncMapper;
import cn.escheduler.dao.model.Resource; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.UdfFunc; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import cn.escheduler.dao.model.User;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -88,14 +90,13 @@ public class UdfFuncService extends BaseService{
} }
// verify udf func name exist // verify udf func name exist
UdfFunc udfFunc = udfFuncMapper.queryUdfFuncByName(funcName); if (checkUdfFuncNameExists(funcName)) {
if (udfFunc != null) {
logger.error("udf func {} has exist, can't recreate", funcName); logger.error("udf func {} has exist, can't recreate", funcName);
putMsg(result, Status.UDF_FUNCTION_EXISTS); putMsg(result, Status.UDF_FUNCTION_EXISTS);
return result; return result;
} }
Resource resource = resourceMapper.queryResourceById(resourceId); Resource resource = resourceMapper.selectById(resourceId);
if (resource == null) { if (resource == null) {
logger.error("resourceId {} is not exist", resourceId); logger.error("resourceId {} is not exist", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -127,6 +128,19 @@ public class UdfFuncService extends BaseService{
return result; return result;
} }
/**
*
* @param name
* @return
*/
private boolean checkUdfFuncNameExists(String name){
List<UdfFunc> resource = udfFuncMapper.queryUdfByIdStr(null, name);
if(resource != null && resource.size() > 0){
return true;
}
return false;
}
/** /**
* query udf function * query udf function
@ -134,7 +148,7 @@ public class UdfFuncService extends BaseService{
public Map<String, Object> queryUdfFuncDetail(int id) { public Map<String, Object> queryUdfFuncDetail(int id) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
UdfFunc udfFunc = udfFuncMapper.queryUdfById(id); UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) { if (udfFunc == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
return result; return result;
@ -165,7 +179,7 @@ public class UdfFuncService extends BaseService{
int resourceId) { int resourceId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
// verify udfFunc is exist // verify udfFunc is exist
UdfFunc udf = udfFuncMapper.queryUdfById(udfFuncId); UdfFunc udf = udfFuncMapper.selectById(udfFuncId);
// if resource upload startup // if resource upload startup
if (!PropertyUtils.getResUploadStartupState()){ if (!PropertyUtils.getResUploadStartupState()){
@ -182,8 +196,7 @@ public class UdfFuncService extends BaseService{
// verify udfFuncName is exist // verify udfFuncName is exist
if (!funcName.equals(udf.getFuncName())) { if (!funcName.equals(udf.getFuncName())) {
UdfFunc udfFunc = udfFuncMapper.queryUdfFuncByName(funcName); if (checkUdfFuncNameExists(funcName)) {
if (udfFunc != null) {
logger.error("UdfFunc {} has exist, can't create again.", funcName); logger.error("UdfFunc {} has exist, can't create again.", funcName);
result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS);
result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg());
@ -191,7 +204,7 @@ public class UdfFuncService extends BaseService{
} }
} }
Resource resource = resourceMapper.queryResourceById(resourceId); Resource resource = resourceMapper.selectById(resourceId);
if (resource == null) { if (resource == null) {
logger.error("resourceId {} is not exist", resourceId); logger.error("resourceId {} is not exist", resourceId);
result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST);
@ -216,7 +229,7 @@ public class UdfFuncService extends BaseService{
udf.setCreateTime(now); udf.setCreateTime(now);
udf.setUpdateTime(now); udf.setUpdateTime(now);
udfFuncMapper.update(udf); udfFuncMapper.updateById(udf);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -234,13 +247,11 @@ public class UdfFuncService extends BaseService{
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
Integer count = getTotalCount(loginUser);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount(count); IPage<UdfFunc> udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo);
List<UdfFunc> udfFuncList = getUdfFuncs(loginUser, searchVal, pageSize, pageInfo); pageInfo.setTotalCount((int)udfFuncList.getTotal());
pageInfo.setLists(udfFuncList.getRecords());
pageInfo.setLists(udfFuncList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
@ -252,28 +263,17 @@ public class UdfFuncService extends BaseService{
* @param loginUser * @param loginUser
* @param searchVal * @param searchVal
* @param pageSize * @param pageSize
* @param pageInfo * @param pageNo
* @return * @return
*/ */
private List<UdfFunc> getUdfFuncs(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) { private IPage<UdfFunc> getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) {
if (isAdmin(loginUser)) {
return udfFuncMapper.queryAllUdfFuncPaging(searchVal, pageInfo.getStart(), pageSize);
}
return udfFuncMapper.queryUdfFuncPaging(loginUser.getId(), searchVal,
pageInfo.getStart(), pageSize);
}
/** int userId = loginUser.getId();
* udf function total
*
* @param loginUser
* @return
*/
private Integer getTotalCount(User loginUser) {
if (isAdmin(loginUser)) { if (isAdmin(loginUser)) {
return udfFuncMapper.countAllUdfFunc(); userId = 0;
} }
return udfFuncMapper.countUserUdfFunc(loginUser.getId()); Page<UdfFunc> page = new Page(pageNo, pageSize);
return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal);
} }
/** /**
@ -301,7 +301,7 @@ public class UdfFuncService extends BaseService{
public Result delete(int id) { public Result delete(int id) {
Result result = new Result(); Result result = new Result();
udfFuncMapper.delete(id); udfFuncMapper.deleteById(id);
udfUserMapper.deleteByUdfFuncId(id); udfUserMapper.deleteByUdfFuncId(id);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
@ -315,8 +315,7 @@ public class UdfFuncService extends BaseService{
*/ */
public Result verifyUdfFuncByName(String name) { public Result verifyUdfFuncByName(String name) {
Result result = new Result(); Result result = new Result();
UdfFunc udfFunc = udfFuncMapper.queryUdfFuncByName(name); if (checkUdfFuncNameExists(name)) {
if (udfFunc != null) {
logger.error("UDF function name:{} has exist, can't create again.", name); logger.error("UDF function name:{} has exist, can't create again.", name);
putMsg(result, Status.UDF_FUNCTION_EXISTS); putMsg(result, Status.UDF_FUNCTION_EXISTS);
} else { } else {

48
escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java

@ -21,13 +21,16 @@ import cn.escheduler.api.utils.CheckUtils;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.ResourceType;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.EncryptionUtils; import cn.escheduler.common.utils.EncryptionUtils;
import cn.escheduler.common.utils.HadoopUtils; import cn.escheduler.common.utils.HadoopUtils;
import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.dao.entity.*;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -55,13 +58,13 @@ public class UsersService extends BaseService {
private ProjectUserMapper projectUserMapper; private ProjectUserMapper projectUserMapper;
@Autowired @Autowired
private ResourcesUserMapper resourcesUserMapper; private ResourceUserMapper resourcesUserMapper;
@Autowired @Autowired
private ResourceMapper resourceMapper; private ResourceMapper resourceMapper;
@Autowired @Autowired
private DatasourceUserMapper datasourceUserMapper; private DataSourceUserMapper datasourceUserMapper;
@Autowired @Autowired
private UDFUserMapper udfUserMapper; private UDFUserMapper udfUserMapper;
@ -148,7 +151,7 @@ public class UsersService extends BaseService {
*/ */
public User queryUser(String name, String password) { public User queryUser(String name, String password) {
String md5 = EncryptionUtils.getMd5(password); String md5 = EncryptionUtils.getMd5(password);
return userMapper.queryForCheck(name, md5); return userMapper.queryUserByNamePassword(name, md5);
} }
/** /**
@ -177,14 +180,13 @@ public class UsersService extends BaseService {
return result; return result;
} }
Integer count = userMapper.countUserPaging(searchVal); Page<User> page = new Page(pageNo, pageSize);
PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize); IPage<User> scheduleList = userMapper.queryUserPaging(page, searchVal);
List<User> scheduleList = userMapper.queryUserPaging(searchVal, pageInfo.getStart(), pageSize);
pageInfo.setTotalCount(count); PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setLists(scheduleList); pageInfo.setTotalCount((int)scheduleList.getTotal());
pageInfo.setLists(scheduleList.getRecords());
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -212,7 +214,7 @@ public class UsersService extends BaseService {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
User user = userMapper.queryById(userId); User user = userMapper.selectById(userId);
if (user == null) { if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId); putMsg(result, Status.USER_NOT_EXIST, userId);
@ -222,7 +224,7 @@ public class UsersService extends BaseService {
Date now = new Date(); Date now = new Date();
if (StringUtils.isNotEmpty(userName)) { if (StringUtils.isNotEmpty(userName)) {
User tempUser = userMapper.queryByUserName(userName); User tempUser = userMapper.queryByUserNameAccurately(userName);
if (tempUser != null && tempUser.getId() != userId) { if (tempUser != null && tempUser.getId() != userId) {
putMsg(result, Status.USER_NAME_EXIST); putMsg(result, Status.USER_NAME_EXIST);
return result; return result;
@ -259,7 +261,8 @@ public class UsersService extends BaseService {
String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode);
//file resources list //file resources list
List<Resource> fileResourcesList = resourceMapper.queryResourceCreatedByUser(userId, 0); List<Resource> fileResourcesList = resourceMapper.queryResourceList(
null, userId, ResourceType.FILE.ordinal());
if (CollectionUtils.isNotEmpty(fileResourcesList)) { if (CollectionUtils.isNotEmpty(fileResourcesList)) {
for (Resource resource : fileResourcesList) { for (Resource resource : fileResourcesList) {
HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getAlias(), newResourcePath, false, true); HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getAlias(), newResourcePath, false, true);
@ -267,7 +270,8 @@ public class UsersService extends BaseService {
} }
//udf resources //udf resources
List<Resource> udfResourceList = resourceMapper.queryResourceCreatedByUser(userId, 1); List<Resource> udfResourceList = resourceMapper.queryResourceList(
null, userId, ResourceType.UDF.ordinal());
if (CollectionUtils.isNotEmpty(udfResourceList)) { if (CollectionUtils.isNotEmpty(udfResourceList)) {
for (Resource resource : udfResourceList) { for (Resource resource : udfResourceList) {
HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getAlias(), newUdfsPath, false, true); HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getAlias(), newUdfsPath, false, true);
@ -297,7 +301,7 @@ public class UsersService extends BaseService {
} }
// updateProcessInstance user // updateProcessInstance user
userMapper.update(user); userMapper.updateById(user);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -329,7 +333,7 @@ public class UsersService extends BaseService {
} }
} }
userMapper.delete(id); userMapper.deleteById(id);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
@ -353,7 +357,7 @@ public class UsersService extends BaseService {
} }
//if the selected projectIds are empty, delete all items associated with the user //if the selected projectIds are empty, delete all items associated with the user
projectUserMapper.deleteByUserId(userId); projectUserMapper.deleteProjectRelation(0, userId);
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS, Constants.MSG)) { if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS, Constants.MSG)) {
return result; return result;
@ -393,7 +397,7 @@ public class UsersService extends BaseService {
return result; return result;
} }
resourcesUserMapper.deleteByUserId(userId); resourcesUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS, Constants.MSG)) { if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS, Constants.MSG)) {
return result; return result;
@ -549,7 +553,7 @@ public class UsersService extends BaseService {
return result; return result;
} }
List<User> userList = userMapper.queryAllGeneralUsers(); List<User> userList = userMapper.queryAllGeneralUser();
result.put(Constants.DATA_LIST, userList); result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -570,7 +574,7 @@ public class UsersService extends BaseService {
return result; return result;
} }
List<User> userList = userMapper.queryAllUsers(); List<User> userList = userMapper.selectList(null );
result.put(Constants.DATA_LIST, userList); result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -586,7 +590,7 @@ public class UsersService extends BaseService {
public Result verifyUserName(String userName) { public Result verifyUserName(String userName) {
cn.escheduler.api.utils.Result result = new cn.escheduler.api.utils.Result(); cn.escheduler.api.utils.Result result = new cn.escheduler.api.utils.Result();
User user = userMapper.queryByUserName(userName); User user = userMapper.queryByUserNameAccurately(userName);
if (user != null) { if (user != null) {
logger.error("user {} has exist, can't create again.", userName); logger.error("user {} has exist, can't create again.", userName);
@ -614,7 +618,7 @@ public class UsersService extends BaseService {
return result; return result;
} }
List<User> userList = userMapper.queryAllUsers(); List<User> userList = userMapper.selectList(null);
List<User> resultUsers = new ArrayList<>(); List<User> resultUsers = new ArrayList<>();
Set<User> userSet = null; Set<User> userSet = null;
if (userList != null && userList.size() > 0) { if (userList != null && userList.size() > 0) {

20
escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java

@ -19,13 +19,15 @@ package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.dao.entity.WorkerGroup;
import cn.escheduler.dao.mapper.WorkerGroupMapper; import cn.escheduler.dao.mapper.WorkerGroupMapper;
import cn.escheduler.dao.model.User; import com.baomidou.mybatisplus.core.metadata.IPage;
import cn.escheduler.dao.model.WorkerGroup; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.lang.invoke.WrongMethodTypeException;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -59,7 +61,7 @@ public class WorkerGroupService extends BaseService {
Date now = new Date(); Date now = new Date();
WorkerGroup workerGroup = null; WorkerGroup workerGroup = null;
if(id != 0){ if(id != 0){
workerGroup = workerGroupMapper.queryById(id); workerGroup = workerGroupMapper.selectById(id);
}else{ }else{
workerGroup = new WorkerGroup(); workerGroup = new WorkerGroup();
workerGroup.setCreateTime(now); workerGroup.setCreateTime(now);
@ -73,7 +75,7 @@ public class WorkerGroupService extends BaseService {
return result; return result;
} }
if(workerGroup.getId() != 0 ){ if(workerGroup.getId() != 0 ){
workerGroupMapper.update(workerGroup); workerGroupMapper.updateById(workerGroup);
}else{ }else{
workerGroupMapper.insert(workerGroup); workerGroupMapper.insert(workerGroup);
} }
@ -115,13 +117,13 @@ public class WorkerGroupService extends BaseService {
public Map<String,Object> queryAllGroupPaging(Integer pageNo, Integer pageSize, String searchVal) { public Map<String,Object> queryAllGroupPaging(Integer pageNo, Integer pageSize, String searchVal) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
int count = workerGroupMapper.countPaging(searchVal);
Page<WorkerGroup> page = new Page(pageNo, pageSize);
IPage<WorkerGroup> workerGroupIPage = workerGroupMapper.queryListPaging(
page, searchVal);
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize); PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
List<WorkerGroup> workerGroupList = workerGroupMapper.queryListPaging(pageInfo.getStart(), pageSize, searchVal); pageInfo.setTotalCount((int)workerGroupIPage.getTotal());
pageInfo.setTotalCount(count); pageInfo.setLists(workerGroupIPage.getRecords());
pageInfo.setLists(workerGroupList);
result.put(Constants.DATA_LIST, pageInfo); result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;

2
escheduler-api/src/main/java/cn/escheduler/api/utils/ZookeeperMonitor.java

@ -3,7 +3,7 @@ package cn.escheduler.api.utils;
import cn.escheduler.common.enums.ZKNodeType; import cn.escheduler.common.enums.ZKNodeType;
import cn.escheduler.common.zk.AbstractZKClient; import cn.escheduler.common.zk.AbstractZKClient;
import cn.escheduler.common.model.MasterServer; import cn.escheduler.common.model.MasterServer;
import cn.escheduler.dao.model.ZookeeperRecord; import cn.escheduler.dao.entity.ZookeeperRecord;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

11
escheduler-api/src/test/java/cn/escheduler/api/controller/AbstractControllerTest.java

@ -17,12 +17,9 @@
package cn.escheduler.api.controller; package cn.escheduler.api.controller;
import cn.escheduler.api.ApiApplicationServer; import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.SessionService; import cn.escheduler.api.service.SessionService;
import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.model.User;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.junit.*; import org.junit.*;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -30,19 +27,13 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.WebApplicationContext;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@Ignore @Ignore
@RunWith(SpringRunner.class) @RunWith(SpringRunner.class)

2
escheduler-api/src/test/java/cn/escheduler/api/service/DataAnalysisServiceTest.java

@ -20,7 +20,7 @@ import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/DataSourceServiceTest.java

@ -21,7 +21,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.DbType;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/LoggerServiceTest.java

@ -20,7 +20,7 @@ import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java

@ -20,7 +20,7 @@ import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;

2
escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java

@ -22,7 +22,7 @@ import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.DependResult; import cn.escheduler.common.enums.DependResult;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;

2
escheduler-api/src/test/java/cn/escheduler/api/service/ResourcesServiceTest.java

@ -21,7 +21,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.ResourceType; import cn.escheduler.common.enums.ResourceType;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

4
escheduler-api/src/test/java/cn/escheduler/api/service/SchedulerServiceTest.java

@ -21,8 +21,8 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.ReleaseState; import cn.escheduler.common.enums.ReleaseState;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.Project; import cn.escheduler.dao.entity.Project;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/SessionServiceTest.java

@ -18,7 +18,7 @@ package cn.escheduler.api.service;
import cn.escheduler.api.ApiApplicationServer; import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;

2
escheduler-api/src/test/java/cn/escheduler/api/service/TaskInstanceServiceTest.java

@ -21,7 +21,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/TenantServiceTest.java

@ -20,7 +20,7 @@ import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/UdfFuncServiceTest.java

@ -21,7 +21,7 @@ import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

2
escheduler-api/src/test/java/cn/escheduler/api/service/UsersServiceTest.java

@ -20,7 +20,7 @@ import cn.escheduler.api.ApiApplicationServer;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.common.enums.UserType; import cn.escheduler.common.enums.UserType;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;

185
escheduler-common/.factorypath

@ -0,0 +1,185 @@
<factorypath>
<factorypathentry kind="VARJAR" id="M2_REPO/com/alibaba/fastjson/1.2.29/fastjson-1.2.29.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/httpcomponents/httpclient/4.4.1/httpclient-4.4.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/httpcomponents/httpcore/4.4.1/httpcore-4.4.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-codec/commons-codec/1.6/commons-codec-1.6.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-configuration/commons-configuration/1.10/commons-configuration-1.10.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-lang/commons-lang/2.3/commons-lang-2.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/fasterxml/jackson/core/jackson-annotations/2.9.8/jackson-annotations-2.9.8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/fasterxml/jackson/core/jackson-databind/2.9.8/jackson-databind-2.9.8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/fasterxml/jackson/core/jackson-core/2.9.8/jackson-core-2.9.8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/zookeeper/zookeeper/3.4.8/zookeeper-3.4.8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/jline/jline/0.9.94/jline-0.9.94.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/google/guava/guava/20.0/guava-20.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-common/2.7.3/hadoop-common-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-annotations/2.7.3/hadoop-annotations-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-cli/commons-cli/1.2/commons-cli-1.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-httpclient/commons-httpclient/3.0.1/commons-httpclient-3.0.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-io/commons-io/2.4/commons-io-2.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/sun/jersey/jersey-json/1.9/jersey-json-1.9.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/codehaus/jackson/jackson-jaxrs/1.8.3/jackson-jaxrs-1.8.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/codehaus/jackson/jackson-xc/1.8.3/jackson-xc-1.8.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/asm/asm/3.1/asm-3.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/java/dev/jets3t/jets3t/0.9.0/jets3t-0.9.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/jamesmurty/utils/java-xmlbuilder/0.4/java-xmlbuilder-0.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-auth/2.7.3/hadoop-auth-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/directory/api/api-util/1.0.0-M20/api-util-1.0.0-M20.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/jcraft/jsch/0.1.42/jsch-0.1.42.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/htrace/htrace-core/3.1.0-incubating/htrace-core-3.1.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/tukaani/xz/1.0/xz-1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-client/2.7.3/hadoop-client-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-app/2.7.3/hadoop-mapreduce-client-app-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-common/2.7.3/hadoop-mapreduce-client-common-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-client/2.7.3/hadoop-yarn-client-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-server-common/2.7.3/hadoop-yarn-server-common-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-api/2.7.3/hadoop-yarn-api-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-core/2.7.3/hadoop-mapreduce-client-core-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-common/2.7.3/hadoop-yarn-common-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/sun/jersey/jersey-client/1.9/jersey-client-1.9.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-jobclient/2.7.3/hadoop-mapreduce-client-jobclient-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-hdfs/2.7.3/hadoop-hdfs-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/log4j/log4j/1.2.17/log4j-1.2.17.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/xml-apis/xml-apis/1.4.01/xml-apis-1.4.01.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-aws/2.7.3/hadoop-aws-2.7.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/amazonaws/aws-java-sdk/1.7.4/aws-java-sdk-1.7.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/joda-time/joda-time/2.10.1/joda-time-2.10.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/postgresql/postgresql/42.1.4/postgresql-42.1.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-jdbc/2.1.0/hive-jdbc-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-common/2.1.0/hive-common-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-storage-api/2.1.0/hive-storage-api-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-orc/2.1.0/hive-orc-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/iq80/snappy/snappy/0.2/snappy-0.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/logging/log4j/log4j-1.2-api/2.11.2/log4j-1.2-api-2.11.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/logging/log4j/log4j-api/2.11.2/log4j-api-2.11.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/logging/log4j/log4j-core/2.11.2/log4j-core-2.11.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/logging/log4j/log4j-web/2.11.2/log4j-web-2.11.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-service/2.1.0/hive-service-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-llap-server/2.1.0/hive-llap-server-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-llap-common/2.1.0/hive-llap-common-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-llap-client/2.1.0/hive-llap-client-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-llap-tez/2.1.0/hive-llap-tez-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/slider/slider-core/0.90.2-incubating/slider-core-0.90.2-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/beust/jcommander/1.30/jcommander-1.30.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-registry/2.7.1/hadoop-yarn-registry-2.7.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/google/inject/extensions/guice-servlet/3.0/guice-servlet-3.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-hadoop2-compat/1.1.1/hbase-hadoop2-compat-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/commons/commons-math/2.2/commons-math-2.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-server/1.1.1/hbase-server-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-procedure/1.1.1/hbase-procedure-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-common/1.1.1/hbase-common-1.1.1-tests.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-prefix-tree/1.1.1/hbase-prefix-tree-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/mortbay/jetty/jetty-sslengine/6.1.26/jetty-sslengine-6.1.26.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/mortbay/jetty/jsp-2.1/6.1.14/jsp-2.1-6.1.14.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/mortbay/jetty/jsp-api-2.1/6.1.14/jsp-api-2.1-6.1.14.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/lmax/disruptor/3.3.0/disruptor-3.3.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-common/1.1.1/hbase-common-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-hadoop-compat/1.1.1/hbase-hadoop-compat-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/sf/jpam/jpam/1.1/jpam-1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/tomcat/jasper-compiler/5.5.23/jasper-compiler-5.5.23.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/servlet/jsp-api/2.0/jsp-api-2.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/ant/ant/1.6.5/ant-1.6.5.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-serde/2.1.0/hive-serde-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/parquet/parquet-hadoop-bundle/1.8.1/parquet-hadoop-bundle-1.8.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-metastore/2.1.0/hive-metastore-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javolution/javolution/5.5.1/javolution-5.5.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-client/1.1.1/hbase-client-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-annotations/1.1.1/hbase-annotations-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="EXTJAR" id="/Library/Java/JavaVirtualMachines/jdk1.8.0_171.jdk/Contents/Home/jre/../lib/tools.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hbase/hbase-protocol/1.1.1/hbase-protocol-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/jruby/jcodings/jcodings/1.0.8/jcodings-1.0.8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/jruby/joni/joni/2.1.2/joni-2.1.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/derby/derby/10.14.2.0/derby-10.14.2.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/datanucleus/datanucleus-api-jdo/4.2.1/datanucleus-api-jdo-4.2.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/datanucleus/datanucleus-core/4.1.6/datanucleus-core-4.1.6.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/datanucleus/datanucleus-rdbms/4.1.7/datanucleus-rdbms-4.1.7.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-pool/commons-pool/1.6/commons-pool-1.6.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/transaction/jta/1.1/jta-1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/antlr/antlr-runtime/3.4/antlr-runtime-3.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/antlr/stringtemplate/3.2.1/stringtemplate-3.2.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/antlr/antlr/2.7.7/antlr-2.7.7.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/google/inject/guice/3.0/guice-3.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/aopalliance/aopalliance/1.0/aopalliance-1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/it/unimi/dsi/fastutil/6.5.6/fastutil-6.5.6.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-shims/2.1.0/hive-shims-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/shims/hive-shims-common/2.1.0/hive-shims-common-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/shims/hive-shims-0.23/2.1.0/hive-shims-0.23-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-server-resourcemanager/2.6.0/hadoop-yarn-server-resourcemanager-2.6.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/sun/jersey/contribs/jersey-guice/1.9/jersey-guice-1.9.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/2.6.0/hadoop-yarn-server-applicationhistoryservice-2.6.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hadoop/hadoop-yarn-server-web-proxy/2.6.0/hadoop-yarn-server-web-proxy-2.6.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/shims/hive-shims-scheduler/2.1.0/hive-shims-scheduler-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/hive/hive-service-rpc/2.1.0/hive-service-rpc-2.1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/mybatis/mybatis/3.5.1/mybatis-3.5.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/ch/qos/logback/logback-classic/1.2.3/logback-classic-1.2.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/ch/qos/logback/logback-core/1.2.3/logback-core-1.2.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/github/oshi/oshi-core/3.5.0/oshi-core-3.5.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/java/dev/jna/jna-platform/4.5.2/jna-platform-4.5.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/java/dev/jna/jna/4.5.2/jna-4.5.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/threeten/threetenbp/1.3.6/threetenbp-1.3.6.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/ru/yandex/clickhouse/clickhouse-jdbc/0.1.52/clickhouse-jdbc-0.1.52.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/apache/httpcomponents/httpmime/4.5.7/httpmime-4.5.7.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/microsoft/sqlserver/mssql-jdbc/6.1.0.jre8/mssql-jdbc-6.1.0.jre8.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/microsoft/azure/azure-keyvault/0.9.3/azure-keyvault-0.9.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/microsoft/azure/azure-core/0.9.3/azure-core-0.9.3.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/mail/mail/1.4.5/mail-1.4.5.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/activation/activation/1.1/activation-1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/javax/inject/javax.inject/1/javax.inject-1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/microsoft/azure/adal4j/1.0.0/adal4j-1.0.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/nimbusds/oauth2-oidc-sdk/4.5/oauth2-oidc-sdk-4.5.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/jcip/jcip-annotations/1.0/jcip-annotations-1.0.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/net/minidev/json-smart/1.1.1/json-smart-1.1.1.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/nimbusds/lang-tag/1.4/lang-tag-1.4.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/nimbusds/nimbus-jose-jwt/3.1.2/nimbus-jose-jwt-3.1.2.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/org/bouncycastle/bcprov-jdk15on/1.51/bcprov-jdk15on-1.51.jar" enabled="true" runInBatchMode="false"/>
<factorypathentry kind="VARJAR" id="M2_REPO/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar" enabled="true" runInBatchMode="false"/>
</factorypath>

5
escheduler-common/pom.xml

@ -522,10 +522,7 @@
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis</artifactId>
</dependency>
<dependency> <dependency>
<groupId>ch.qos.logback</groupId> <groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId> <artifactId>logback-classic</artifactId>

7
escheduler-common/src/main/java/cn/escheduler/common/utils/EnumFieldUtil.java

@ -16,8 +16,6 @@
*/ */
package cn.escheduler.common.utils; package cn.escheduler.common.utils;
import org.apache.ibatis.type.EnumOrdinalTypeHandler;
/** /**
* enum field util * enum field util
*/ */
@ -30,7 +28,10 @@ public class EnumFieldUtil {
* @return * @return
*/ */
public static String genFieldStr(String field, Class<?> enumClass) { public static String genFieldStr(String field, Class<?> enumClass) {
return "#{" + field + ",javaType=" + enumClass.getName() + ",typeHandler=" + EnumOrdinalTypeHandler.class.getName() + "}"; //TODO...
// delete this class when mybatisplus is ok
return "";
// return "#{" + field + ",javaType=" + enumClass.getName() + ",typeHandler=" + EnumOrdinalTypeHandler.class.getName() + "}";
} }
} }

63
escheduler-dao/pom.xml

@ -18,30 +18,29 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!--<dependency>-->
<!--<groupId>com.baomidou</groupId>-->
<!--<artifactId>mybatis-plus-dts</artifactId>-->
<!--</dependency>-->
<dependency> <dependency>
<groupId>org.mybatis.spring.boot</groupId> <groupId>org.postgresql</groupId>
<artifactId>mybatis-spring-boot-autoconfigure</artifactId> <artifactId>postgresql</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.mybatis.spring.boot</groupId> <groupId>org.projectlombok</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId> <artifactId>lombok</artifactId>
<exclusions> <version>${lombok.version}</version>
<exclusion>
<groupId>org.apache.tomcat</groupId>
<artifactId>tomcat-jdbc</artifactId>
</exclusion>
<exclusion>
<artifactId>log4j-to-slf4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
@ -153,11 +152,31 @@
<version>1.5.20</version> <version>1.5.20</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<!--<version>1.10</version>-->
</dependency>
</dependencies> </dependencies>
<build> <build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.xml</include>
</includes>
<filtering>false</filtering>
</resource>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>**/*.xml</include>
<include>**/*.yml</include>
</includes>
<filtering>false</filtering>
</resource>
</resources>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>

20
escheduler-dao/src/main/java/cn/escheduler/dao/AlertDao.java

@ -19,13 +19,12 @@ package cn.escheduler.dao;
import cn.escheduler.common.enums.AlertStatus; import cn.escheduler.common.enums.AlertStatus;
import cn.escheduler.common.enums.AlertType; import cn.escheduler.common.enums.AlertType;
import cn.escheduler.common.enums.ShowType; import cn.escheduler.common.enums.ShowType;
import cn.escheduler.dao.datasource.ConnectionFactory; import cn.escheduler.dao.entity.Alert;
import cn.escheduler.dao.mapper.AlertMapper; import cn.escheduler.dao.mapper.AlertMapper;
import cn.escheduler.dao.mapper.UserAlertGroupMapper; import cn.escheduler.dao.mapper.UserAlertGroupMapper;
import cn.escheduler.dao.model.Alert; import cn.escheduler.dao.entity.ProcessDefinition;
import cn.escheduler.dao.model.ProcessDefinition; import cn.escheduler.dao.entity.ProcessInstance;
import cn.escheduler.dao.model.ProcessInstance; import cn.escheduler.dao.entity.User;
import cn.escheduler.dao.model.User;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -48,8 +47,8 @@ public class AlertDao extends AbstractBaseDao {
@Override @Override
protected void init() { protected void init() {
alertMapper = ConnectionFactory.getSqlSession().getMapper(AlertMapper.class); // alertMapper = ConnectionFactory.getSqlSession().getMapper(AlertMapper.class);
userAlertGroupMapper = ConnectionFactory.getSqlSession().getMapper(UserAlertGroupMapper.class); // userAlertGroupMapper = ConnectionFactory.getSqlSession().getMapper(UserAlertGroupMapper.class);
} }
/** /**
@ -69,7 +68,11 @@ public class AlertDao extends AbstractBaseDao {
* @return * @return
*/ */
public int updateAlert(AlertStatus alertStatus,String log,int id){ public int updateAlert(AlertStatus alertStatus,String log,int id){
return alertMapper.update(alertStatus, log, new Date(), id); Alert alert = alertMapper.selectById(id);
alert.setAlertStatus(alertStatus);
alert.setUpdateTime(new Date());
alert.setLog(log);
return alertMapper.updateById(alert);
} }
/** /**
@ -78,6 +81,7 @@ public class AlertDao extends AbstractBaseDao {
* @return * @return
*/ */
public List<User> queryUserByAlertGroupId(int alerGroupId){ public List<User> queryUserByAlertGroupId(int alerGroupId){
return userAlertGroupMapper.queryForUser(alerGroupId); return userAlertGroupMapper.queryForUser(alerGroupId);
} }
/** /**

2
escheduler-dao/src/main/java/cn/escheduler/dao/MonitorDBDao.java

@ -17,7 +17,7 @@
package cn.escheduler.dao; package cn.escheduler.dao;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.dao.model.MonitorRecord; import cn.escheduler.dao.entity.MonitorRecord;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;

152
escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java

@ -28,8 +28,8 @@ import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.common.utils.IpUtils; import cn.escheduler.common.utils.IpUtils;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.dao.entity.*;
import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*;
import cn.escheduler.dao.utils.cron.CronUtils; import cn.escheduler.dao.utils.cron.CronUtils;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.cronutils.model.Cron; import com.cronutils.model.Cron;
@ -98,9 +98,6 @@ public class ProcessDao extends AbstractBaseDao {
@Autowired @Autowired
private ErrorCommandMapper errorCommandMapper; private ErrorCommandMapper errorCommandMapper;
@Autowired
private WorkerServerMapper workerServerMapper;
@Autowired @Autowired
private TenantMapper tenantMapper; private TenantMapper tenantMapper;
@ -110,7 +107,7 @@ public class ProcessDao extends AbstractBaseDao {
protected ITaskQueue taskQueue; protected ITaskQueue taskQueue;
public ProcessDao(){ public ProcessDao(){
init(); // init();
} }
/** /**
@ -129,7 +126,6 @@ public class ProcessDao extends AbstractBaseDao {
udfFuncMapper = getMapper(UdfFuncMapper.class); udfFuncMapper = getMapper(UdfFuncMapper.class);
resourceMapper = getMapper(ResourceMapper.class); resourceMapper = getMapper(ResourceMapper.class);
workerGroupMapper = getMapper(WorkerGroupMapper.class); workerGroupMapper = getMapper(WorkerGroupMapper.class);
workerServerMapper = getMapper(WorkerServerMapper.class);
taskQueue = TaskQueueFactory.getTaskQueueInstance(); taskQueue = TaskQueueFactory.getTaskQueueInstance();
tenantMapper = getMapper(TenantMapper.class); tenantMapper = getMapper(TenantMapper.class);
} }
@ -224,7 +220,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public Command findOneCommand(){ public Command findOneCommand(){
return commandMapper.queryOneCommand(); return commandMapper.getOneToRun();
} }
/** /**
@ -245,7 +241,7 @@ public class ProcessDao extends AbstractBaseDao {
JSONObject tempObj; JSONObject tempObj;
int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING); int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING);
List<Command> commands = commandMapper.queryAllCommand(); List<Command> commands = commandMapper.getAll(null);
//遍历所有命令 //遍历所有命令
for (Command tmpCommand:commands){ for (Command tmpCommand:commands){
if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){ if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){
@ -276,7 +272,7 @@ public class ProcessDao extends AbstractBaseDao {
*/ */
public ProcessInstance findProcessInstanceById(int processId){ public ProcessInstance findProcessInstanceById(int processId){
return processInstanceMapper.queryById(processId); return processInstanceMapper.selectById(processId);
} }
/** /**
@ -297,7 +293,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public ProcessDefinition findProcessDefineById(int processDefinitionId) { public ProcessDefinition findProcessDefineById(int processDefinitionId) {
return processDefineMapper.queryByDefineId(processDefinitionId); return processDefineMapper.selectById(processDefinitionId);
} }
/** /**
@ -306,7 +302,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public int deleteWorkProcessInstanceById(int processInstanceId){ public int deleteWorkProcessInstanceById(int processInstanceId){
return processInstanceMapper.delete(processInstanceId); return processInstanceMapper.deleteById(processInstanceId);
} }
/** /**
@ -346,7 +342,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
private Integer workProcessThreadNumCount(Integer processDefinitionId){ private Integer workProcessThreadNumCount(Integer processDefinitionId){
List<String> ids = new ArrayList<>(); List<Integer> ids = new ArrayList<>();
recurseFindSubProcessId(processDefinitionId, ids); recurseFindSubProcessId(processDefinitionId, ids);
return ids.size()+1; return ids.size()+1;
} }
@ -356,8 +352,8 @@ public class ProcessDao extends AbstractBaseDao {
* @param parentId * @param parentId
* @param ids * @param ids
*/ */
public void recurseFindSubProcessId(int parentId, List<String> ids){ public void recurseFindSubProcessId(int parentId, List<Integer> ids){
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(parentId); ProcessDefinition processDefinition = processDefineMapper.selectById(parentId);
String processDefinitionJson = processDefinition.getProcessDefinitionJson(); String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
@ -370,7 +366,7 @@ public class ProcessDao extends AbstractBaseDao {
String parameter = taskNode.getParams(); String parameter = taskNode.getParams();
if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){ if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){
SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class); SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class);
ids.add(String.valueOf(subProcessParam.getProcessDefinitionId())); ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids);
} }
} }
@ -390,7 +386,7 @@ public class ProcessDao extends AbstractBaseDao {
// sub process doesnot need to create wait command // sub process doesnot need to create wait command
if(processInstance.getIsSubProcess() == Flag.YES){ if(processInstance.getIsSubProcess() == Flag.YES){
if(originCommand != null){ if(originCommand != null){
commandMapper.delete(originCommand.getId()); commandMapper.deleteById(originCommand.getId());
} }
return; return;
} }
@ -420,7 +416,7 @@ public class ProcessDao extends AbstractBaseDao {
saveCommand(originCommand); saveCommand(originCommand);
}else{ }else{
// delete old command and create new waiting thread command // delete old command and create new waiting thread command
commandMapper.delete(originCommand.getId()); commandMapper.deleteById(originCommand.getId());
originCommand.setId(0); originCommand.setId(0);
originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
originCommand.setUpdateTime(new Date()); originCommand.setUpdateTime(new Date());
@ -515,7 +511,7 @@ public class ProcessDao extends AbstractBaseDao {
tenant = tenantMapper.queryById(tenantId); tenant = tenantMapper.queryById(tenantId);
} }
if(tenant == null){ if(tenant == null){
User user = userMapper.queryById(userId); User user = userMapper.selectById(userId);
tenant = tenantMapper.queryById(user.getTenantId()); tenant = tenantMapper.queryById(user.getTenantId());
} }
return tenant; return tenant;
@ -553,7 +549,7 @@ public class ProcessDao extends AbstractBaseDao {
ProcessDefinition processDefinition = null; ProcessDefinition processDefinition = null;
if(command.getProcessDefinitionId() != 0){ if(command.getProcessDefinitionId() != 0){
processDefinition = processDefineMapper.queryByDefineId(command.getProcessDefinitionId()); processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId());
if(processDefinition == null){ if(processDefinition == null){
logger.error(String.format("cannot find the work process define! define id : %d", command.getProcessDefinitionId())); logger.error(String.format("cannot find the work process define! define id : %d", command.getProcessDefinitionId()));
return null; return null;
@ -584,7 +580,7 @@ public class ProcessDao extends AbstractBaseDao {
}else{ }else{
processInstance = this.findProcessInstanceDetailById(processInstanceId); processInstance = this.findProcessInstanceDetailById(processInstanceId);
} }
processDefinition = processDefineMapper.queryByDefineId(processInstance.getProcessDefinitionId()); processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId());
processInstance.setProcessDefinition(processDefinition); processInstance.setProcessDefinition(processDefinition);
//reset command parameter //reset command parameter
@ -954,7 +950,7 @@ public class ProcessDao extends AbstractBaseDao {
if(childDefinition != null && fatherDefinition != null){ if(childDefinition != null && fatherDefinition != null){
childDefinition.setReceivers(fatherDefinition.getReceivers()); childDefinition.setReceivers(fatherDefinition.getReceivers());
childDefinition.setReceiversCc(fatherDefinition.getReceiversCc()); childDefinition.setReceiversCc(fatherDefinition.getReceiversCc());
processDefineMapper.update(childDefinition); processDefineMapper.updateById(childDefinition);
} }
} }
@ -1157,7 +1153,7 @@ public class ProcessDao extends AbstractBaseDao {
} }
//创建流程实例 //创建流程实例
if(workProcessInstance.getId() != 0){ if(workProcessInstance.getId() != 0){
processInstanceMapper.update(workProcessInstance); processInstanceMapper.updateById(workProcessInstance);
}else{ }else{
createProcessInstance(workProcessInstance); createProcessInstance(workProcessInstance);
} }
@ -1170,7 +1166,7 @@ public class ProcessDao extends AbstractBaseDao {
*/ */
public int saveCommand(Command command){ public int saveCommand(Command command){
if(command.getId() != 0){ if(command.getId() != 0){
return commandMapper.update(command); return commandMapper.updateById(command);
}else{ }else{
return commandMapper.insert(command); return commandMapper.insert(command);
} }
@ -1205,7 +1201,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public boolean updateTaskInstance(TaskInstance taskInstance){ public boolean updateTaskInstance(TaskInstance taskInstance){
int count = taskInstanceMapper.update(taskInstance); int count = taskInstanceMapper.updateById(taskInstance);
return count > 0; return count > 0;
} }
/** /**
@ -1213,7 +1209,7 @@ public class ProcessDao extends AbstractBaseDao {
* @param id * @param id
*/ */
public void delCommandByid(int id) { public void delCommandByid(int id) {
commandMapper.delete(id); commandMapper.deleteById(id);
} }
public TaskInstance findTaskInstanceById(Integer taskId){ public TaskInstance findTaskInstanceById(Integer taskId){
@ -1275,7 +1271,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){
return processInstanceMapMapper.update(processInstanceMap); return processInstanceMapMapper.updateById(processInstanceMap);
} }
@ -1359,7 +1355,8 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public int updateProcessInstance(ProcessInstance instance){ public int updateProcessInstance(ProcessInstance instance){
return processInstanceMapper.update(instance);
return processInstanceMapper.updateById(instance);
} }
/** /**
@ -1376,8 +1373,16 @@ public class ProcessDao extends AbstractBaseDao {
public int updateProcessInstance(Integer processInstanceId, String processJson, public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag, String globalParams, Date scheduleTime, Flag flag,
String locations, String connects){ String locations, String connects){
return processInstanceMapper.updateProcessInstance(processInstanceId, processJson, ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
globalParams, scheduleTime, locations, connects, flag); if(processInstance!= null){
processInstance.setProcessInstanceJson(processJson);
processInstance.setGlobalParams(globalParams);
processInstance.setScheduleTime(scheduleTime);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
return processInstanceMapper.updateById(processInstance);
}
return 0;
} }
/** /**
@ -1452,22 +1457,17 @@ public class ProcessDao extends AbstractBaseDao {
* @see Schedule * @see Schedule
*/ */
public Schedule querySchedule(int id) { public Schedule querySchedule(int id) {
return scheduleMapper.queryById(id); return scheduleMapper.selectById(id);
} }
public List<ProcessInstance> queryNeedFailoverProcessInstances(String host){ public List<ProcessInstance> queryNeedFailoverProcessInstances(String host){
return processInstanceMapper.queryByHostAndStatus(host, stateArray);
String states = StringUtils.join(stateArray, ",");
return processInstanceMapper.queryByHostAndStatus(host, states);
} }
/**
* update host null
* @param host
* @return
*/
public int updateNeddFailoverProcessInstances(String host){
return processInstanceMapper.setFailoverByHostAndStateArray(host, stateArray);
}
/** /**
* process need failover process instance * process need failover process instance
@ -1479,7 +1479,7 @@ public class ProcessDao extends AbstractBaseDao {
//1 update processInstance host is null //1 update processInstance host is null
processInstance.setHost("null"); processInstance.setHost("null");
processInstanceMapper.update(processInstance); processInstanceMapper.updateById(processInstance);
//2 insert into recover command //2 insert into recover command
Command cmd = new Command(); Command cmd = new Command();
@ -1497,16 +1497,8 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public List<TaskInstance> queryNeedFailoverTaskInstances(String host){ public List<TaskInstance> queryNeedFailoverTaskInstances(String host){
return taskInstanceMapper.queryByHostAndStatus(host, stateArray); return taskInstanceMapper.queryByHostAndStatus(host,
} StringUtils.join(stateArray, ","));
/**
* update host null
* @param host
* @return
*/
public int updateNeedFailoverTaskInstances(String host){
return taskInstanceMapper.setFailoverByHostAndStateArray(host, stateArray);
} }
/** /**
@ -1515,7 +1507,7 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public DataSource findDataSourceById(int id){ public DataSource findDataSourceById(int id){
return dataSourceMapper.queryById(id); return dataSourceMapper.selectById(id);
} }
@ -1526,7 +1518,9 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
return processInstanceMapper.updateState(processInstanceId, executionStatus); ProcessInstance instance = processInstanceMapper.selectById(processInstanceId);
instance.setState(executionStatus);
return processInstanceMapper.updateById(instance);
} }
@ -1545,7 +1539,8 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public List<UdfFunc> queryUdfFunListByids(String ids){ public List<UdfFunc> queryUdfFunListByids(String ids){
return udfFuncMapper.queryUdfByIdStr(ids);
return udfFuncMapper.queryUdfByIdStr(ids, null);
} }
/** /**
@ -1563,7 +1558,8 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public List<Schedule> selectAllByProcessDefineId(int[] ids){ public List<Schedule> selectAllByProcessDefineId(int[] ids){
return scheduleMapper.selectAllByProcessDefineArray(ids); return scheduleMapper.selectAllByProcessDefineArray(
StringUtils.join(ids, ","));
} }
/** /**
@ -1667,32 +1663,11 @@ public class ProcessDao extends AbstractBaseDao {
DateUtils.dateToString(startTime), DateUtils.dateToString(endTime)); DateUtils.dateToString(startTime), DateUtils.dateToString(endTime));
} }
public void selfFaultTolerant(int state){
List<ProcessInstance> processInstanceList = processInstanceMapper.listByStatus(new int[]{state});
for (ProcessInstance processInstance:processInstanceList){
selfFaultTolerant(processInstance);
}
}
/**
* master starup fault tolerant
*/
public void masterStartupFaultTolerant(){
int[] readyStopAndKill=new int[]{ExecutionStatus.READY_PAUSE.ordinal(),ExecutionStatus.READY_STOP.ordinal(),
ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),ExecutionStatus.RUNNING_EXEUTION.ordinal()};
List<ProcessInstance> processInstanceList = processInstanceMapper.listByStatus(readyStopAndKill);
for (ProcessInstance processInstance:processInstanceList){
processNeedFailoverProcessInstances(processInstance);
}
}
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) @Transactional(value = "TransactionManager",rollbackFor = Exception.class)
public void selfFaultTolerant(ProcessInstance processInstance){ public void selfFaultTolerant(ProcessInstance processInstance){
processInstance.setState(ExecutionStatus.FAILURE); processInstance.setState(ExecutionStatus.FAILURE);
processInstanceMapper.update(processInstance); processInstanceMapper.updateById(processInstance);
// insert to command // insert to command
Command command = new Command(); Command command = new Command();
@ -1740,7 +1715,17 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public String queryUserQueueByProcessInstanceId(int processInstanceId){ public String queryUserQueueByProcessInstanceId(int processInstanceId){
return userMapper.queryQueueByProcessInstanceId(processInstanceId);
String queue = "";
ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if(processInstance == null){
return queue;
}
User executor = userMapper.selectById(processInstance.getExecutorId());
if(executor != null){
queue = executor.getQueue();
}
return queue;
} }
/** /**
@ -1749,21 +1734,10 @@ public class ProcessDao extends AbstractBaseDao {
* @return * @return
*/ */
public WorkerGroup queryWorkerGroupById(int workerGroupId){ public WorkerGroup queryWorkerGroupById(int workerGroupId){
return workerGroupMapper.queryById(workerGroupId);
}
/**
* query worker server by host
* @param host
* @return
*/
public List<WorkerServer> queryWorkerServerByHost(String host){
return workerServerMapper.queryWorkerByHost(host);
return workerGroupMapper.selectById(workerGroupId);
} }
/** /**
* get task worker group id * get task worker group id
* *

164
escheduler-dao/src/main/java/cn/escheduler/dao/ServerDao.java

@ -1,164 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao;
import cn.escheduler.common.model.MasterServer;
import cn.escheduler.dao.mapper.MasterServerMapper;
import cn.escheduler.dao.mapper.WorkerServerMapper;
import cn.escheduler.dao.model.WorkerServer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.Date;
import static cn.escheduler.dao.datasource.ConnectionFactory.getMapper;
/**
* server dao
*/
@Component
public class ServerDao extends AbstractBaseDao {
@Autowired
MasterServerMapper masterServerMapper;
@Autowired
WorkerServerMapper workerServerMapper;
@Override
protected void init() {
masterServerMapper = getMapper(MasterServerMapper.class);
workerServerMapper = getMapper(WorkerServerMapper.class);
}
/**
* register master
*
* @param host
* @param port
* @param zkDirectory
* @param resInfo
* @param createTime
* @param lastHeartbeatTime
* @return
*/
public int registerMaster(String host, int port , String zkDirectory , String resInfo ,
Date createTime , Date lastHeartbeatTime) {
MasterServer masterServer = new MasterServer();
masterServer.setHost(host);
masterServer.setPort(port);
masterServer.setZkDirectory(zkDirectory);
masterServer.setResInfo(resInfo);
masterServer.setCreateTime(createTime);
masterServer.setLastHeartbeatTime(lastHeartbeatTime);
return masterServerMapper.insert(masterServer);
}
/**
* update master
*
* @param host
* @param port
* @param resInfo
* @param lastHeartbeatTime
* @return
*/
public int updateMaster(String host, int port , String resInfo , Date lastHeartbeatTime) {
MasterServer masterServer = new MasterServer();
masterServer.setHost(host);
masterServer.setPort(port);
masterServer.setResInfo(resInfo);
masterServer.setLastHeartbeatTime(lastHeartbeatTime);
return masterServerMapper.update(masterServer);
}
/**
* delete master
*
* @param host
* @return
*/
public int deleteMaster(String host) {
return masterServerMapper.deleteWorkerByHost(host);
}
/**
* register master
* @param host
* @param port
* @param zkDirectory
* @param resInfo
* @param createTime
* @param lastHeartbeatTime
* @return
*/
public int registerWorker(String host, int port , String zkDirectory , String resInfo ,
Date createTime , Date lastHeartbeatTime) {
WorkerServer workerServer = new WorkerServer();
workerServer.setHost(host);
workerServer.setPort(port);
workerServer.setZkDirectory(zkDirectory);
workerServer.setResInfo(resInfo);
workerServer.setCreateTime(createTime);
workerServer.setLastHeartbeatTime(lastHeartbeatTime);
return workerServerMapper.insert(workerServer);
}
/**
*
* update worker
* @param host
* @param port
* @param resInfo
* @param lastHeartbeatTime
* @return
*/
public int updateWorker(String host, int port , String resInfo , Date lastHeartbeatTime) {
WorkerServer workerServer = new WorkerServer();
workerServer.setHost(host);
workerServer.setPort(port);
workerServer.setResInfo(resInfo);
workerServer.setLastHeartbeatTime(lastHeartbeatTime);
return workerServerMapper.update(workerServer);
}
/**
* delete worker by host
*
* @param host
* @return
*/
public int deleteWorker(String host) {
return workerServerMapper.deleteWorkerByHost(host);
}
}

3
escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java

@ -20,7 +20,7 @@ import cn.escheduler.common.Constants;
import cn.escheduler.common.enums.TaskRecordStatus; import cn.escheduler.common.enums.TaskRecordStatus;
import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.dao.model.TaskRecord; import cn.escheduler.dao.entity.TaskRecord;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
@ -30,7 +30,6 @@ import org.slf4j.LoggerFactory;
import java.sql.*; import java.sql.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;

53
escheduler-dao/src/main/java/cn/escheduler/dao/datasource/ConnectionFactory.java

@ -16,9 +16,6 @@
*/ */
package cn.escheduler.dao.datasource; package cn.escheduler.dao.datasource;
import cn.escheduler.common.Constants;
import cn.escheduler.common.utils.CommonUtils;
import cn.escheduler.dao.mapper.ProjectMapper;
import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.pool.DruidDataSource;
import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.Configuration;
@ -32,9 +29,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.SQLException;
import static cn.escheduler.dao.utils.PropertyUtils.*;
/** /**
@ -51,39 +46,17 @@ public class ConnectionFactory {
public static DruidDataSource getDataSource() { public static DruidDataSource getDataSource() {
DruidDataSource druidDataSource = new DruidDataSource(); DruidDataSource druidDataSource = new DruidDataSource();
druidDataSource.setDriverClassName(getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); druidDataSource.setDriverClassName("com.mysql.jdbc.Driver");
druidDataSource.setUrl(getString(Constants.SPRING_DATASOURCE_URL)); druidDataSource.setUrl("jdbc:mysql://192.168.220.188:3306/escheduler?useUnicode=true&characterEncoding=UTF-8");
druidDataSource.setUsername(getString(Constants.SPRING_DATASOURCE_USERNAME)); druidDataSource.setUsername("root");
druidDataSource.setPassword(getString(Constants.SPRING_DATASOURCE_PASSWORD)); druidDataSource.setPassword("root@123");
druidDataSource.setValidationQuery(getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY)); druidDataSource.setInitialSize(5);
druidDataSource.setMinIdle(5);
druidDataSource.setPoolPreparedStatements(getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS)); druidDataSource.setMaxActive(20);
druidDataSource.setTestWhileIdle(getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE)); druidDataSource.setMaxWait(60000);
druidDataSource.setTestOnBorrow(getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW)); druidDataSource.setTimeBetweenEvictionRunsMillis(60000);
druidDataSource.setTestOnReturn(getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN)); druidDataSource.setMinEvictableIdleTimeMillis(300000);
druidDataSource.setKeepAlive(getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE)); druidDataSource.setValidationQuery("SELECT 1");
//just for development
/*if (CommonUtils.isDevelopMode()) {
//Configure filters that are intercepted by monitoring statistics, and SQL can not be counted after removing them.'wall'is used for firewall
try {
druidDataSource.setFilters("stat,wall,log4j");
} catch (SQLException e) {
logger.error(e.getMessage(), e);
}
}*/
druidDataSource.setMinIdle(getInt(Constants.SPRING_DATASOURCE_MIN_IDLE));
druidDataSource.setMaxActive(getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE));
druidDataSource.setMaxWait(getInt(Constants.SPRING_DATASOURCE_MAX_WAIT));
druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE));
druidDataSource.setInitialSize(getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE));
druidDataSource.setTimeBetweenEvictionRunsMillis(getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS));
druidDataSource.setTimeBetweenConnectErrorMillis(getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS));
druidDataSource.setMinEvictableIdleTimeMillis(getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS));
druidDataSource.setValidationQueryTimeout(getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT));
//auto commit
druidDataSource.setDefaultAutoCommit(getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT));
return druidDataSource; return druidDataSource;
} }
@ -97,12 +70,12 @@ public class ConnectionFactory {
DataSource dataSource = getDataSource(); DataSource dataSource = getDataSource();
TransactionFactory transactionFactory = new JdbcTransactionFactory(); TransactionFactory transactionFactory = new JdbcTransactionFactory();
Environment environment = new Environment(Constants.DEVELOPMENT, transactionFactory, dataSource); Environment environment = new Environment("development", transactionFactory, dataSource);
Configuration configuration = new Configuration(environment); Configuration configuration = new Configuration(environment);
configuration.setLazyLoadingEnabled(true); configuration.setLazyLoadingEnabled(true);
configuration.addMappers("cn.escheduler.dao.mapper");
configuration.addMappers(ProjectMapper.class.getPackage().getName());
SqlSessionFactoryBuilder builder = new SqlSessionFactoryBuilder(); SqlSessionFactoryBuilder builder = new SqlSessionFactoryBuilder();
sqlSessionFactory = builder.build(configuration); sqlSessionFactory = builder.build(configuration);

126
escheduler-dao/src/main/java/cn/escheduler/dao/datasource/DatabaseConfiguration.java

@ -1,63 +1,63 @@
/* ///*
* Licensed to the Apache Software Foundation (ASF) under one or more // * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with // * contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. // * this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0 // * The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with // * (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at // * the License. You may obtain a copy of the License at
* // *
* http://www.apache.org/licenses/LICENSE-2.0 // * http://www.apache.org/licenses/LICENSE-2.0
* // *
* Unless required by applicable law or agreed to in writing, software // * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, // * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and // * See the License for the specific language governing permissions and
* limitations under the License. // * limitations under the License.
*/ // */
package cn.escheduler.dao.datasource; //package cn.escheduler.dao.datasource;
//
import com.alibaba.druid.pool.DruidDataSource; //import com.alibaba.druid.pool.DruidDataSource;
import org.apache.ibatis.session.SqlSessionFactory; //import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean; //import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan; //import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean; //import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; //import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary; //import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource; //import org.springframework.context.annotation.PropertySource;
import org.springframework.jdbc.datasource.DataSourceTransactionManager; //import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager; //import org.springframework.transaction.PlatformTransactionManager;
//
import java.sql.SQLException; //import java.sql.SQLException;
//
/** ///**
* data base configuration // * data base configuration
*/ // */
@Configuration //@Configuration
@PropertySource({"classpath:dao/data_source.properties"}) //@PropertySource({"classpath:application.yml"})
@MapperScan(basePackages = "cn.escheduler.dao.mapper", sqlSessionFactoryRef = "SqlSessionFactory") //@MapperScan(basePackages = "cn.escheduler.dao.mapper", sqlSessionFactoryRef = "SqlSessionFactory")
public class DatabaseConfiguration { //public class DatabaseConfiguration {
//
/** // /**
* register data source // * register data source
*/ // */
@Primary // @Primary
@Bean(name = "DataSource", initMethod = "init", destroyMethod = "close") // @Bean(name = "DataSource", initMethod = "init", destroyMethod = "close")
public DruidDataSource dataSource() { // public DruidDataSource dataSource() {
return ConnectionFactory.getDataSource(); // return ConnectionFactory.getDataSource();
} // }
//
@Primary // @Primary
@Bean(name = "SqlSessionFactory") // @Bean(name = "SqlSessionFactory")
public SqlSessionFactory sqlSessionFactory() throws Exception { // public SqlSessionFactory sqlSessionFactory() throws Exception {
SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean(); // SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
sqlSessionFactoryBean.setDataSource(dataSource()); // sqlSessionFactoryBean.setDataSource(dataSource());
//
return sqlSessionFactoryBean.getObject(); // return sqlSessionFactoryBean.getObject();
} // }
//
@Primary // @Primary
@Bean(name = "TransactionManager") // @Bean(name = "TransactionManager")
public PlatformTransactionManager transactionManager() throws SQLException { // public PlatformTransactionManager transactionManager() throws SQLException {
return new DataSourceTransactionManager(dataSource()); // return new DataSourceTransactionManager(dataSource());
} // }
} //}

78
escheduler-dao/src/main/java/cn/escheduler/dao/model/AccessToken.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/AccessToken.java

@ -1,7 +1,3 @@
package cn.escheduler.dao.model;
import java.util.Date;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -18,41 +14,40 @@ import java.util.Date;
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date;
@Data
@TableName("t_escheduler_access_token")
public class AccessToken { public class AccessToken {
/** @TableId(value="id", type=IdType.AUTO)
* id
*/
private int id; private int id;
/**
* user id
*/
private int userId;
/** @TableField("user_id")
* user name private int userId;
*/
private String userName;
/** @TableField("token")
* user token
*/
private String token; private String token;
/** @TableField(exist = false)
* token expire time private String userName;
*/
@TableField("expire_time")
private Date expireTime; private Date expireTime;
/** @TableField("create_time")
* create time
*/
private Date createTime; private Date createTime;
/** @TableField("update_time")
* update time
*/
private Date updateTime; private Date updateTime;
public int getId() { public int getId() {
@ -79,6 +74,14 @@ public class AccessToken {
this.token = token; this.token = token;
} }
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public Date getExpireTime() { public Date getExpireTime() {
return expireTime; return expireTime;
} }
@ -102,25 +105,4 @@ public class AccessToken {
public void setUpdateTime(Date updateTime) { public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime; this.updateTime = updateTime;
} }
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
@Override
public String toString() {
return "AccessToken{" +
"id=" + id +
", userId=" + userId +
", userName='" + userName + '\'' +
", token='" + token + '\'' +
", expireTime=" + expireTime +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
}
} }

16
escheduler-dao/src/main/java/cn/escheduler/dao/model/Alert.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/Alert.java

@ -14,11 +14,14 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.AlertStatus; import cn.escheduler.common.enums.*;
import cn.escheduler.common.enums.AlertType; import com.baomidou.mybatisplus.annotation.IdType;
import cn.escheduler.common.enums.ShowType; import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
@ -27,11 +30,14 @@ import java.util.Map;
/** /**
* alert * alert
*/ */
@Data
@TableName("t_escheduler_alert")
public class Alert { public class Alert {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
@ -68,6 +74,7 @@ public class Alert {
/** /**
* alert group id * alert group id
*/ */
@TableField("alertgroup_id")
private int alertGroupId; private int alertGroupId;
@ -92,6 +99,7 @@ public class Alert {
private Date updateTime; private Date updateTime;
@TableField(exist = false)
private Map<String,Object> info = new HashMap<>(); private Map<String,Object> info = new HashMap<>();
public Map<String, Object> getInfo() { public Map<String, Object> getInfo() {

42
escheduler-dao/src/main/java/cn/escheduler/dao/model/AlertGroup.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/AlertGroup.java

@ -14,25 +14,33 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.AlertType; import cn.escheduler.common.enums.AlertType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
/** @Data
* alert group @TableName("t_escheduler_alertgroup")
*/
public class AlertGroup { public class AlertGroup {
/** /**
* primary key * primary key
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
* alert group name * alert group name
*/ */
private String groupName; private String groupName;
/** /**
* alert group type * alert group type
*/ */
@ -41,7 +49,7 @@ public class AlertGroup {
/** /**
* alert group description * alert group description
*/ */
private String desc; private String description;
/** /**
* create time * create time
@ -77,14 +85,6 @@ public class AlertGroup {
this.groupType = groupType; this.groupType = groupType;
} }
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public Date getCreateTime() { public Date getCreateTime() {
return createTime; return createTime;
} }
@ -101,15 +101,11 @@ public class AlertGroup {
this.updateTime = updateTime; this.updateTime = updateTime;
} }
@Override public String getDescription() {
public String toString() { return description;
return "AlertGroup{" + }
"id=" + id +
", groupName='" + groupName + '\'' + public void setDescription(String description) {
", groupType=" + groupType + this.description = description;
", desc='" + desc + '\'' +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
} }
} }

24
escheduler-dao/src/main/java/cn/escheduler/dao/model/Command.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/Command.java

@ -14,86 +14,107 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.*; import cn.escheduler.common.enums.*;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
/** /**
* command * command
*/ */
@Data
@TableName("t_escheduler_command")
public class Command { public class Command {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
* command type * command type
*/ */
@TableField("command_type")
private CommandType commandType; private CommandType commandType;
/** /**
* process definition id * process definition id
*/ */
@TableField("process_definition_id")
private int processDefinitionId; private int processDefinitionId;
/** /**
* executor id * executor id
*/ */
@TableField("executor_id")
private int executorId; private int executorId;
/** /**
* command parameter, format json * command parameter, format json
*/ */
@TableField("command_param")
private String commandParam; private String commandParam;
/** /**
* task depend type * task depend type
*/ */
@TableField("task_depend_type")
private TaskDependType taskDependType; private TaskDependType taskDependType;
/** /**
* failure strategy * failure strategy
*/ */
@TableField("failure_strategy")
private FailureStrategy failureStrategy; private FailureStrategy failureStrategy;
/** /**
* warning type * warning type
*/ */
@TableField("warning_type")
private WarningType warningType; private WarningType warningType;
/** /**
* warning group id * warning group id
*/ */
@TableField("warning_type")
private Integer warningGroupId; private Integer warningGroupId;
/** /**
* schedule time * schedule time
*/ */
@TableField("schedule_time")
private Date scheduleTime; private Date scheduleTime;
/** /**
* start time * start time
*/ */
@TableField("start_time")
private Date startTime; private Date startTime;
/** /**
* process instance priority * process instance priority
*/ */
@TableField("process_instance_priority")
private Priority processInstancePriority; private Priority processInstancePriority;
/** /**
* update time * update time
*/ */
@TableField("update_time")
private Date updateTime; private Date updateTime;
/** /**
* *
*/ */
@TableField("worker_group_id;")
private int workerGroupId; private int workerGroupId;
@ -264,3 +285,4 @@ public class Command {
'}'; '}';
} }
} }

58
escheduler-dao/src/main/java/cn/escheduler/dao/entity/CommandCount.java

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.CommandType;
public class CommandCount {
/**
* execution state
*/
private CommandType commandType;
/**
* count for state
*/
private int count;
@Override
public String toString(){
return "command count:" +
" commandType: "+ commandType.toString() +
" count: "+ count;
}
public CommandType getCommandType() {
return commandType;
}
public void setCommandType(CommandType commandType) {
this.commandType = commandType;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
}

2
escheduler-dao/src/main/java/cn/escheduler/dao/model/CycleDependency.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/CycleDependency.java

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.CycleEnum; import cn.escheduler.common.enums.CycleEnum;

12
escheduler-dao/src/main/java/cn/escheduler/dao/model/DataSource.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/DataSource.java

@ -14,17 +14,26 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.DbType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
@Data
@TableName("t_escheduler_datasource")
public class DataSource { public class DataSource {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
@ -35,6 +44,7 @@ public class DataSource {
/** /**
* user name * user name
*/ */
@TableField(exist = false)
private String userName; private String userName;
/** /**

8
escheduler-dao/src/main/java/cn/escheduler/dao/model/DatasourceUser.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/DatasourceUser.java

@ -14,18 +14,24 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.util.Date; import java.util.Date;
/** /**
* data source user relation * data source user relation
*/ */
@TableName("t_escheduler_relation_datasource_user")
public class DatasourceUser { public class DatasourceUser {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**

2
escheduler-dao/src/main/java/cn/escheduler/dao/model/DefinitionGroupByUser.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/DefinitionGroupByUser.java

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
/** /**
* count definition number group by user * count definition number group by user

2
escheduler-dao/src/main/java/cn/escheduler/dao/model/Dependency.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/Dependency.java

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.SelfDependStrategy; import cn.escheduler.common.enums.SelfDependStrategy;

8
escheduler-dao/src/main/java/cn/escheduler/dao/model/ErrorCommand.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/ErrorCommand.java

@ -14,20 +14,25 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.*; import cn.escheduler.common.enums.*;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.util.Date; import java.util.Date;
/** /**
* command * command
*/ */
@TableName("t_escheduler_error_command")
public class ErrorCommand { public class ErrorCommand {
/** /**
* id * id
*/ */
@TableId(value="id", type = IdType.INPUT)
private int id; private int id;
/** /**
@ -100,6 +105,7 @@ public class ErrorCommand {
*/ */
private int workerGroupId; private int workerGroupId;
public ErrorCommand(){}
public ErrorCommand(Command command, String message){ public ErrorCommand(Command command, String message){
this.id = command.getId(); this.id = command.getId();

2
escheduler-dao/src/main/java/cn/escheduler/dao/model/ExecuteStatusCount.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/ExecuteStatusCount.java

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;

2
escheduler-dao/src/main/java/cn/escheduler/dao/model/MonitorRecord.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/MonitorRecord.java

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import java.util.Date; import java.util.Date;

2
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessData.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessData.java

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.model.TaskNode; import cn.escheduler.common.model.TaskNode;
import cn.escheduler.common.process.Property; import cn.escheduler.common.process.Property;

41
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessDefinition.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessDefinition.java

@ -14,26 +14,34 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.Flag; import cn.escheduler.common.enums.*;
import cn.escheduler.common.enums.ReleaseState;
import cn.escheduler.common.process.Property; import cn.escheduler.common.process.Property;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils; import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import lombok.Data;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
* process definition * process definition
*/ */
@Data
@TableName("t_escheduler_process_definition")
public class ProcessDefinition { public class ProcessDefinition {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
@ -64,7 +72,7 @@ public class ProcessDefinition {
/** /**
* description * description
*/ */
private String desc; private String description;
/** /**
* user defined parameters * user defined parameters
@ -74,11 +82,13 @@ public class ProcessDefinition {
/** /**
* user defined parameter list * user defined parameter list
*/ */
@TableField(exist=false)
private List<Property> globalParamList; private List<Property> globalParamList;
/** /**
* user define parameter map * user define parameter map
*/ */
@TableField(exist=false)
private Map<String,String> globalParamMap; private Map<String,String> globalParamMap;
/** /**
@ -104,11 +114,13 @@ public class ProcessDefinition {
/** /**
* user name * user name
*/ */
@TableField(exist = false)
private String userName; private String userName;
/** /**
* project name * project name
*/ */
@TableField(exist = false)
private String projectName; private String projectName;
/** /**
@ -134,6 +146,7 @@ public class ProcessDefinition {
/** /**
* schedule release state : online/offline * schedule release state : online/offline
*/ */
@TableField(exist=false)
private ReleaseState scheduleReleaseState; private ReleaseState scheduleReleaseState;
/** /**
@ -277,15 +290,6 @@ public class ProcessDefinition {
this.globalParamMap = globalParamMap; this.globalParamMap = globalParamMap;
} }
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public String getLocations() { public String getLocations() {
return locations; return locations;
} }
@ -343,7 +347,6 @@ public class ProcessDefinition {
", releaseState=" + releaseState + ", releaseState=" + releaseState +
", projectId=" + projectId + ", projectId=" + projectId +
", processDefinitionJson='" + processDefinitionJson + '\'' + ", processDefinitionJson='" + processDefinitionJson + '\'' +
", desc='" + desc + '\'' +
", globalParams='" + globalParams + '\'' + ", globalParams='" + globalParams + '\'' +
", globalParamList=" + globalParamList + ", globalParamList=" + globalParamList +
", globalParamMap=" + globalParamMap + ", globalParamMap=" + globalParamMap +
@ -370,4 +373,12 @@ public class ProcessDefinition {
public void setTenantId(int tenantId) { public void setTenantId(int tenantId) {
this.tenantId = tenantId; this.tenantId = tenantId;
} }
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
} }

13
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessInstance.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstance.java

@ -14,21 +14,28 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.*; import cn.escheduler.common.enums.*;
import org.apache.commons.lang3.StringUtils; import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import lombok.Data;
import java.util.Date; import java.util.Date;
/** /**
* process instance * process instance
*/ */
@Data
@TableName("t_escheduler_process_instance")
public class ProcessInstance { public class ProcessInstance {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
* process definition id * process definition id
@ -177,7 +184,6 @@ public class ProcessInstance {
*/ */
private Priority processInstancePriority; private Priority processInstancePriority;
/** /**
* worker group id * worker group id
*/ */
@ -188,7 +194,6 @@ public class ProcessInstance {
*/ */
private int timeout; private int timeout;
/** /**
* tenant id * tenant id
*/ */

10
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessInstanceMap.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstanceMap.java

@ -14,16 +14,24 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
/** /**
* process instance map * process instance map
*/ */
@Data
@TableName("t_escheduler_relation_process_instance")
public class ProcessInstanceMap { public class ProcessInstanceMap {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**

20
escheduler-dao/src/main/java/cn/escheduler/dao/model/Project.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/Project.java

@ -14,63 +14,81 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
/** /**
* project * project
*/ */
@Data
@TableName("t_escheduler_project")
public class Project { public class Project {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
* user id * user id
*/ */
@TableField("user_id")
private int userId; private int userId;
/** /**
* user name * user name
*/ */
@TableField(exist=false)
private String userName; private String userName;
/** /**
* project name * project name
*/ */
@TableField("name")
private String name; private String name;
/** /**
* project description * project description
*/ */
@TableField("`desc`")
private String desc; private String desc;
/** /**
* create time * create time
*/ */
@TableField("`create_time`")
private Date createTime; private Date createTime;
/** /**
* update time * update time
*/ */
@TableField("`update_time`")
private Date updateTime; private Date updateTime;
/** /**
* permission * permission
*/ */
@TableField(exist=false)
private int perm; private int perm;
/** /**
* process define count * process define count
*/ */
@TableField(exist=false)
private int defCount; private int defCount;
/** /**
* process instance running count * process instance running count
*/ */
@TableField(exist=false)
private int instRunningCount; private int instRunningCount;
public int getDefCount() { public int getDefCount() {

141
escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProjectUser.java

@ -0,0 +1,141 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date;
@Data
@TableName("t_escheduler_relation_project_user")
public class ProjectUser {
/**
* id
*/
@TableId(value="id", type=IdType.AUTO)
private int id;
@TableField("user_id")
private int userId;
@TableField("project_id")
private int projectId;
/**
* project name
*/
@TableField(exist = false)
private String projectName;
/**
* user name
*/
@TableField(exist = false)
private String userName;
/**
* permission
*/
private int perm;
@TableField("create_time")
private Date createTime;
@TableField("update_time")
private Date updateTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
public int getProjectId() {
return projectId;
}
public void setProjectId(int projectId) {
this.projectId = projectId;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public int getPerm() {
return perm;
}
public void setPerm(int perm) {
this.perm = perm;
}
@Override
public String toString() {
return "ProjectUser{" +
"id=" + id +
", projectId=" + projectId +
", projectName='" + projectName + '\'' +
", userId=" + userId +
", userName='" + userName + '\'' +
", perm=" + perm +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
}
}

10
escheduler-dao/src/main/java/cn/escheduler/dao/model/Queue.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/Queue.java

@ -14,18 +14,26 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
/** /**
* queue * queue
*/ */
@Data
@TableName("t_escheduler_queue")
public class Queue { public class Queue {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**
* queue name * queue name

10
escheduler-dao/src/main/java/cn/escheduler/dao/model/Resource.java → escheduler-dao/src/main/java/cn/escheduler/dao/entity/Resource.java

@ -14,16 +14,24 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package cn.escheduler.dao.model; package cn.escheduler.dao.entity;
import cn.escheduler.common.enums.ResourceType; import cn.escheduler.common.enums.ResourceType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date; import java.util.Date;
@Data
@TableName("t_escheduler_resources")
public class Resource { public class Resource {
/** /**
* id * id
*/ */
@TableId(value="id", type=IdType.AUTO)
private int id; private int id;
/** /**

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save