Browse Source

Merge remote-tracking branch 'remotes/upstream/dev-1.1.0' into dev-1.1.0

pull/2/head
lgcareer 6 years ago
parent
commit
94cff3b7ff
  1. 271
      docs/zh_CN/EasyScheduler-FAQ.md
  2. BIN
      docs/zh_CN/images/cdh_hive_error.png
  3. BIN
      docs/zh_CN/images/master_worker_lack_res.png
  4. 4
      docs/zh_CN/前端部署文档.md
  5. 4
      docs/zh_CN/后端部署文档.md
  6. 2
      docs/zh_CN/系统使用手册.md
  7. 2
      escheduler-alert/pom.xml
  8. 1
      escheduler-alert/src/main/java/cn/escheduler/alert/utils/MailUtils.java
  9. 2
      escheduler-api/pom.xml
  10. 46
      escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java
  11. 4
      escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java
  12. 3
      escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java
  13. 40
      escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java
  14. 5
      escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java
  15. 7
      escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java
  16. 9
      escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java
  17. 116
      escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java
  18. 20
      escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java
  19. 12
      escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java
  20. 6
      escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java
  21. 14
      escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java
  22. 1
      escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java
  23. 24
      escheduler-api/src/test/java/cn/escheduler/api/controller/ResourcesControllerTest.java
  24. 2
      escheduler-common/pom.xml
  25. 26
      escheduler-common/src/main/java/cn/escheduler/common/Constants.java
  26. 29
      escheduler-common/src/main/java/cn/escheduler/common/enums/ResUploadType.java
  27. 35
      escheduler-common/src/main/java/cn/escheduler/common/enums/TaskRecordStatus.java
  28. 8
      escheduler-common/src/main/java/cn/escheduler/common/enums/TaskType.java
  29. 15
      escheduler-common/src/main/java/cn/escheduler/common/enums/ZKNodeType.java
  30. 20
      escheduler-common/src/main/java/cn/escheduler/common/job/db/BaseDataSource.java
  31. 13
      escheduler-common/src/main/java/cn/escheduler/common/job/db/HiveDataSource.java
  32. 3
      escheduler-common/src/main/java/cn/escheduler/common/job/db/SparkDataSource.java
  33. 15
      escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java
  34. 2
      escheduler-common/src/main/java/cn/escheduler/common/utils/OSUtils.java
  35. 14
      escheduler-common/src/main/java/cn/escheduler/common/utils/PropertyUtils.java
  36. 4
      escheduler-common/src/main/resources/common/common.properties
  37. 12
      escheduler-common/src/main/resources/common/hadoop/hadoop.properties
  38. 2
      escheduler-dao/pom.xml
  39. 47
      escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java
  40. 46
      escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java
  41. 5
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapper.java
  42. 4
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperProvider.java
  43. 11
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java
  44. 15
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java
  45. 1
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceMapperProvider.java
  46. 7
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapperProvider.java
  47. 2
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java
  48. 17
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerServerMapper.java
  49. 15
      escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerServerMapperProvider.java
  50. 10
      escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessData.java
  51. 13
      escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessDefinition.java
  52. 14
      escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessInstance.java
  53. 125
      escheduler-dao/src/main/java/cn/escheduler/dao/utils/DagHelper.java
  54. 6
      escheduler-dao/src/main/resources/dao/data_source.properties
  55. 2
      escheduler-rpc/pom.xml
  56. 2
      escheduler-server/pom.xml
  57. 51
      escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java
  58. 30
      escheduler-server/src/main/java/cn/escheduler/server/utils/ProcessUtils.java
  59. 12
      escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java
  60. 72
      escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java
  61. 22
      escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractCommandExecutor.java
  62. 30
      escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java
  63. 193
      escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java
  64. 42
      escheduler-server/src/test/java/cn/escheduler/server/master/MasterCommandTest.java
  65. 2
      escheduler-ui/src/js/conf/home/pages/dag/_source/config.js
  66. 20
      escheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
  67. 2
      escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue
  68. 2
      escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue
  69. 2
      escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js
  70. 13
      escheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue
  71. 5
      escheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue
  72. 5
      escheduler-ui/src/js/conf/home/pages/dag/index.vue
  73. 5
      escheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue
  74. 41
      escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue
  75. 4
      escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue
  76. 2
      escheduler-ui/src/js/conf/home/pages/datasource/pages/list/index.vue
  77. 115
      escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/statistics.vue
  78. 4
      escheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue
  79. 8
      escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue
  80. 15
      escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue
  81. 2
      escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue
  82. 2
      escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue
  83. 24
      escheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue
  84. 8
      escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue
  85. 6
      escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue
  86. 2
      escheduler-ui/src/js/conf/home/pages/projects/pages/list/index.vue
  87. 2
      escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue
  88. 12
      escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue
  89. 4
      escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue
  90. 8
      escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue
  91. 2
      escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue
  92. 9
      escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue
  93. 2
      escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue
  94. 9
      escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue
  95. 10
      escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue
  96. 8
      escheduler-ui/src/js/conf/home/router/index.js
  97. 6
      escheduler-ui/src/js/conf/home/store/dag/actions.js
  98. 7
      escheduler-ui/src/js/conf/home/store/dag/mutations.js
  99. 2
      escheduler-ui/src/js/conf/home/store/dag/state.js
  100. 9
      escheduler-ui/src/js/conf/home/store/datasource/actions.js
  101. Some files were not shown because too many files have changed in this diff Show More

271
docs/zh_CN/EasyScheduler-FAQ.md

@ -1,96 +1,287 @@
Q:单机运行服务老挂,应该是内存不够,测试机器4核8G。生产环境需要分布式,如果单机的话建议的配置是? ## Q:EasyScheduler服务介绍及建议运行内存
A: Easy Scheduler有5个服务组成,这些服务本身需要的内存和cpu不多, A: EasyScheduler由5个服务组成,MasterServer、WorkerServer、ApiServer、AlertServer、LoggerServer和UI。
| 服务 | 内存 | cpu核数 | | 服务 | 说明 |
| ------------ | ---- | ------- | | ------------------------- | ------------------------------------------------------------ |
| MasterServer | 2G | 2核 | | MasterServer | 主要负责 **DAG** 的切分和任务状态的监控 |
| WorkerServer | 2G | 2核 | | WorkerServer/LoggerServer | 主要负责任务的提交、执行和任务状态的更新。LoggerServer用于Rest Api通过 **RPC** 查看日志 |
| ApiServer | 512M | 1核 | | ApiServer | 提供Rest Api服务,供UI进行调用 |
| AlertServer | 512M | 1核 | | AlertServer | 提供告警服务 |
| LoggerServer | 512M | 1核 | | UI | 前端页面展示 |
注意:由于如果任务较多,WorkServer所在机器建议物理内存在16G以上 注意:**由于服务比较多,建议单机部署最好是4核16G以上**
---
## Q: 管理员为什么不能创建项目
A:管理员目前属于"**纯管理**", 没有租户,即没有linux上对应的用户,所以没有执行权限, **故没有所属的项目、资源及数据源**,所以没有创建权限。**但是有所有的查看权限**。如果需要创建项目等业务操作,**请使用管理员创建租户和普通用户,然后使用普通用户登录进行操作**。我们将会在1.1.0版本中将管理员的创建和执行权限放开,管理员将会有所有的权限
---
## Q:系统支持哪些邮箱?
A:支持绝大多数邮箱,qq、163、126、139、outlook、aliyun等皆支持。支持**TLS和SSL**协议,可以在alert.properties中选择性配置
---
## Q:常用的系统变量时间参数有哪些,如何使用?
A:请参考 https://analysys.github.io/easyscheduler_docs_cn/%E7%B3%BB%E7%BB%9F%E4%BD%BF%E7%94%A8%E6%89%8B%E5%86%8C.html#%E7%B3%BB%E7%BB%9F%E5%8F%82%E6%95%B0
---
## Q:pip install kazoo 这个安装报错。是必须安装的吗?
A: 这个是python连接zookeeper需要使用到的,必须要安装
--- ---
Q: 管理员为什么不能创建项目? ## Q: 怎么指定机器运行任务
A: 管理员目前属于"纯管理", 没有租户,即没有linux上对应的用户,所以没有执行权限, 但是有所有的查看权限。如果需要创建项目等业务操作,请使用管理员创建租户和普通用户,然后使用普通用户登录进行操作 A:使用 **管理员** 创建Worker分组,在 **流程定义启动** 的时候可**指定Worker分组**或者在**任务节点上指定Worker分组**。如果不指定,则使用Default,**Default默认是使用的集群里所有的Worker中随机选取一台来进行任务提交、执行**
--- ---
Q: 系统支持哪些邮箱? ## Q:任务的优先级
A:我们同时 **支持流程和任务的优先级**。优先级我们有 **HIGHEST、HIGH、MEDIUM、LOW和LOWEST** 五种级别。**可以设置不同流程实例之间的优先级,也可以设置同一个流程实例中不同任务实例的优先级**。详细内容请参考任务优先级设计 https://analysys.github.io/easyscheduler_docs_cn/%E7%B3%BB%E7%BB%9F%E6%9E%B6%E6%9E%84%E8%AE%BE%E8%AE%A1.html#%E7%B3%BB%E7%BB%9F%E6%9E%B6%E6%9E%84%E8%AE%BE%E8%AE%A1
----
## Q:escheduler-grpc报错
A:在根目录下执行:mvn -U clean package assembly:assembly -Dmaven.test.skip=true , 然后刷新下整个项目
----
## Q:EasyScheduler支持windows上运行么
A: 理论上只有**Worker是需要在Linux上运行的**,其它的服务都是可以在windows上正常运行的。但是还是建议最好能在linux上部署使用
-----
## Q:UI 在 linux 编译node-sass提示:Error:EACCESS:permission denied,mkdir xxxx
A: 支持绝大多数邮箱,qq、163、126、139、outlook、aliyun等皆可支持 A:单独安装 **npm install node-sass --unsafe-perm**,之后再 **npm install**
--- ---
Q:常用的系统变量时间参数有哪些,如何使用? ## Q:UI 不能正常登陆访问
A: 请参考使用手册中的系统参数 A: 1,如果是node启动的查看escheduler-ui下的.env API_BASE配置是否是Api Server服务地址
2,如果是nginx启动的并且是通过 **install-escheduler-ui.sh** 安装的,查看 **/etc/nginx/conf.d/escheduler.conf** 中的proxy_pass配置是否是Api Server服务地址
3,如果以上配置都是正确的,那么请查看Api Server服务是否是正常的,curl http://192.168.xx.xx:12345/escheduler/users/get-user-info,查看Api Server日志,如果提示 cn.escheduler.api.interceptor.LoginHandlerInterceptor:[76] - session info is null,则证明Api Server服务是正常的
4,如果以上都没有问题,需要查看一下 **application.properties** 中的 **server.context-path 和 server.port 配置**是否正确
--- ---
Q:pip install kazoo 这个安装报错。是必须安装的吗? ## Q: 流程定义手动启动或调度启动之后,没有流程实例生成
A: 1,首先通过**jps 查看MasterServer服务是否存在**,或者从服务监控直接查看zk中是否存在master服务
A: 这个是python连接zookeeper需要使用到的 2,如果存在master服务,查看 **命令状态统计** 或者 **t_escheduler_error_command** 中是否增加的新记录,如果增加了,**请查看 message 字段定位启动异常原因**
--- ---
Q: 如果alert、api、logger服务任意一个宕机,任何还会正常执行吧 ## Q : 任务状态一直处于提交成功状态
A: 不影响,影响正在运行中的任务的服务有Master和Worker服务 A: 1,首先通过**jps 查看WorkerServer服务是否存在**,或者从服务监控直接查看zk中是否存在worker服务
2,如果 **WorkerServer** 服务正常,需要 **查看MasterServer是否把task任务放到zk队列中** ,**需要查看MasterServer日志及zk队列中是否有任务阻塞**
3,如果以上都没有问题,需要定位是否指定了Worker分组,但是 **Worker分组的机器不是在线状态**
--- ---
Q: 这个怎么指定机器运行任务的啊 」 ## Q: 是否提供Docker镜像及Dockerfile
A: 提供Docker镜像及Dockerfile。
A: 通过worker分组: 这个流程只能在指定的机器组里执行。默认是Default,可以在任一worker上执行。 Docker镜像地址:https://hub.docker.com/r/escheduler/escheduler_images
Dockerfile地址:https://github.com/qiaozhanwei/escheduler_dockerfile/tree/master/docker_escheduler
--- ---
Q: 跨用户的任务依赖怎么实现呢, 比如A用户写了一个任务,B用户需要依赖这个任务 ## Q : install.sh 中需要注意问题
A: 1,如果替换变量中包含特殊字符,**请用 \ 转移符进行转移**
2,installPath="/data1_1T/escheduler",**这个目录不能和当前要一键安装的install.sh目录是一样的**
就比如说 我们数仓组 写了一个 中间宽表的任务, 其他业务部门想要使用这个中间表的时候,他们应该是另外一个用户,怎么依赖这个中间表呢 3,deployUser="escheduler",**部署用户必须具有sudo权限**,因为worker是通过sudo -u 租户 sh xxx.command进行执行的
A: 有两种情况,一个是要运行这个宽表任务,可以使用子工作流把宽表任务放到自己的工作流里面。另一个是检查这个宽表任务有没有完成,可以使用依赖节点来检查这个宽表任务在指定的时间周期有没有完成。 4,monitorServerState="false",服务监控脚本是否启动,默认是不启动服务监控脚本的。**如果启动服务监控脚本,则每5分钟定时来监控master和worker的服务是否down机,如果down机则会自动重启**
5,hdfsStartupSate="false",是否开启HDFS资源上传功能。默认是不开启的,**如果不开启则资源中心是不能使用的**。如果开启,需要conf/common/hadoop/hadoop.properties中配置fs.defaultFS和yarn的相关配置,如果使用namenode HA,需要将core-site.xml和hdfs-site.xml复制到conf根目录下
注意:**1.0.x版本是不会自动创建hdfs根目录的,需要自行创建,并且需要部署用户有hdfs的操作权限**
--- ---
Q: 启动WorkerServer服务时不能正常启动,报以下信息是什么原因? ## Q : 流程定义和流程实例下线异常
A : 对于 **1.0.4 以前的版本中**,修改escheduler-api cn.escheduler.api.quartz包下的代码即可
``` ```
[INFO] 2019-05-06 16:39:31.492 cn.escheduler.server.zk.ZKWorkerClient:[155] - register failure , worker already started on : 127.0.0.1, please wait for a moment and try again public boolean deleteJob(String jobName, String jobGroupName) {
lock.writeLock().lock();
try {
JobKey jobKey = new JobKey(jobName,jobGroupName);
if(scheduler.checkExists(jobKey)){
logger.info("try to delete job, job name: {}, job group name: {},", jobName, jobGroupName);
return scheduler.deleteJob(jobKey);
}else {
return true;
}
} catch (SchedulerException e) {
logger.error(String.format("delete job : %s failed",jobName), e);
} finally {
lock.writeLock().unlock();
}
return false;
}
``` ```
A:Worker/Master Server在启动时,会向Zookeeper注册自己的启动信息,是Zookeeper的临时节点,如果两次启动时间间隔较短的情况,上次启动的Worker/Master Server在Zookeeper的会话还未过期,会出现上述信息,处理办法是等待session过期,一般是1分钟左右 ---
---- ## Q : HDFS启动之前创建的租户,能正常使用资源中心吗
Q: 编译时escheduler-grpc模块一直报错:Information:java: Errors occurred while compiling module 'escheduler-rpc', 找不到LogParameter、RetStrInfo、RetByteInfo等class类 A: 不能。因为在未启动HDFS创建的租户,不会在HDFS中注册租户目录。所以上次资源会报错
A: 这是因为rpc源码包是google Grpc实现的,需要使用maven进行编译,在根目录下执行:mvn -U clean package assembly:assembly -Dmaven.test.skip=true , 然后刷新下整个项目 ## Q : 多Master和多Worker状态下,服务掉了,怎么容错
---- A: **注意:Master监控Master及Worker服务。**
Q:EasyScheduler支持windows上运行么? 1,如果Master服务掉了,其它的Master会接管挂掉的Master的流程,继续监控Worker task状态
A: 建议在Ubuntu、Centos上运行,暂不支持windows上运行,不过windows上可以进行编译。开发调试的话建议Ubuntu或者mac上进行。 2,如果Worker服务掉,Master会监控到Worker服务掉了,如果存在Yarn任务,Kill Yarn任务之后走重试
具体请看容错设计:https://analysys.github.io/easyscheduler_docs_cn/%E7%B3%BB%E7%BB%9F%E6%9E%B6%E6%9E%84%E8%AE%BE%E8%AE%A1.html#%E7%B3%BB%E7%BB%9F%E6%9E%B6%E6%9E%84%E8%AE%BE%E8%AE%A1
---
## Q : 对于Master和Worker一台机器伪分布式下的容错
A : 1.0.3 版本只实现了Master启动流程容错,不走Worker容错。也就是说如果Worker挂掉的时候,没有Master存在。这流程将会出现问题。我们会在 **1.1.0** 版本中增加Master和Worker启动自容错,修复这个问题。如果想手动修改这个问题,需要针对 **跨重启正在运行流程** **并且已经掉的正在运行的Worker任务,需要修改为失败**,**同时跨重启正在运行流程设置为失败状态**。然后从失败节点进行流程恢复即可
---
## Q : 定时容易设置成每秒执行
A : 设置定时的时候需要注意,如果第一位(* * * * * ? *)设置成 \* ,则表示每秒执行。**我们将会在1.1.0版本中加入显示最近调度的时间列表** ,使用http://cron.qqe2.com/ 可以在线看近5次运行时间
-----
Q:任务为什么不执行?
A: 不执行的原因: ## Q: 定时有有效时间范围吗
查看command表里有没有内容? A:有的,**如果定时的起止时间是同一个时间,那么此定时将是无效的定时**。**如果起止时间的结束时间比当前的时间小,很有可能定时会被自动删除**
查看Master server的运行日志:
查看Worker Server的运行日志
## Q : 任务依赖有几种实现
A: 1,**DAG** 之间的任务依赖关系,是从 **入度为零** 进行DAG切分的
2,有 **任务依赖节点** ,可以实现跨流程的任务或者流程依赖,具体请参考 依赖(DEPENDENT)节点:https://analysys.github.io/easyscheduler_docs_cn/%E7%B3%BB%E7%BB%9F%E4%BD%BF%E7%94%A8%E6%89%8B%E5%86%8C.html#%E4%BB%BB%E5%8A%A1%E8%8A%82%E7%82%B9%E7%B1%BB%E5%9E%8B%E5%92%8C%E5%8F%82%E6%95%B0%E8%AE%BE%E7%BD%AE
注意:**不支持跨项目的流程或任务依赖**
## Q: 流程定义有几种启动方式
A: 1,在 **流程定义列表**,点击 **启动** 按钮
2,**流程定义列表添加定时器**,调度启动流程定义
3,流程定义 **查看或编辑** DAG 页面,任意 **任务节点右击** 启动流程定义
4,可以对流程定义 DAG 编辑,设置某些任务的运行标志位 **禁止运行**,则在启动流程定义的时候,将该节点的连线将从DAG中去掉
## Q : Python任务设置Python版本
A: 1,对于1**.0.3之后的版本**只需要修改 conf/env/.escheduler_env.sh中的PYTHON_HOME
```
export PYTHON_HOME=/bin/python
```
注意:这了 **PYTHON_HOME** ,是python命令的绝对路径,而不是单纯的 PYTHON_HOME,还需要注意的是 export PATH 的时候,需要直接
```
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$PATH
```
2,对 1.0.3 之前的版本,Python任务只能支持系统的Python版本,不支持指定Python版本
## Q: Worker Task 通过sudo -u 租户 sh xxx.command会产生子进程,在kill的时候,是否会杀掉
A: 我们会在1.0.4中增加kill任务同时,kill掉任务产生的各种所有子进程
## Q : EasyScheduler中的队列怎么用,用户队列和租户队列是什么意思
A : EasyScheduler 中的队列可以在用户或者租户上指定队列,**用户指定的队列优先级是高于租户队列的优先级的。**,例如:对MR任务指定队列,是通过 mapreduce.job.queuename 来指定队列的。
注意:MR在用以上方法指定队列的时候,传递参数请使用如下方式:
```
Configuration conf = new Configuration();
GenericOptionsParser optionParser = new GenericOptionsParser(conf, args);
String[] remainingArgs = optionParser.getRemainingArgs();
```
如果是Spark任务 --queue 方式指定队列
## Q : Master 或者 Worker报如下告警
<p align="center">
<img src="https://analysys.github.io/easyscheduler_docs_cn/images/master_worker_lack_res.png" width="60%" />
</p>
A : 修改conf下的 master.properties **master.reserved.memory** 的值为更小的值,比如说0.1 或者
worker.properties **worker.reserved.memory** 的值为更小的值,比如说0.1
## Q : hive版本是1.1.0+cdh5.15.0,SQL hive任务连接报错
<p align="center">
<img src="https://analysys.github.io/easyscheduler_docs_cn/images/cdh_hive_error.png" width="60%" />
</p>
A : 将 hive pom
```
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.1.0</version>
</dependency>
```
修改为
```
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
</dependency>
```

BIN
docs/zh_CN/images/cdh_hive_error.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

BIN
docs/zh_CN/images/master_worker_lack_res.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 106 KiB

4
docs/zh_CN/前端部署文档.md

@ -5,9 +5,9 @@
## 1、准备工作 ## 1、准备工作
#### 下载安装包 #### 下载安装包
目前最新安装包版本是1.0.2,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) 请下载最新版本的安装包,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/)
下载 escheduler-ui-1.0.2.tar.gz 后,解压`tar -zxvf escheduler-ui-1.0.2.tar.gz ./`后,进入`escheduler-ui`目录 下载 escheduler-ui-x.x.x.tar.gz 后,解压`tar -zxvf escheduler-ui-x.x.x.tar.gz ./`后,进入`escheduler-ui`目录

4
docs/zh_CN/后端部署文档.md

@ -4,7 +4,7 @@
## 1、准备工作 ## 1、准备工作
目前最新安装包版本是1.0.3,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) ,下载escheduler-backend-1.0.3.tar.gz(后端简称escheduler-backend),escheduler-ui-1.0.3.tar.gz(前端简称escheduler-ui) 请下载最新版本的安装包,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) ,下载escheduler-backend-x.x.x.tar.gz(后端简称escheduler-backend),escheduler-ui-x.x.x.tar.gz(前端简称escheduler-ui)
#### 准备一: 基础软件安装(必装项请自行安装) #### 准备一: 基础软件安装(必装项请自行安装)
@ -149,7 +149,7 @@ install.sh : 一键部署脚本
### 2.2 编译源码来部署 ### 2.2 编译源码来部署
将源码包release版本1.0.3下载后,解压进入根目录 将源码包release版本下载后,解压进入根目录
* 执行编译命令: * 执行编译命令:

2
docs/zh_CN/系统使用手册.md

@ -311,7 +311,7 @@ conf/common/hadoop.properties
## 安全中心(权限系统) ## 安全中心(权限系统)
- 安全中心是只有管理员账户才有权限的功能,有队列管理、租户管理、用户管理、告警组管理、worker分组、令牌管理等功能,还可以对资源、数据源、项目等授权 - 安全中心是只有管理员账户才有权限的功能,有队列管理、租户管理、用户管理、告警组管理、worker分组、令牌管理等功能,还可以对资源、数据源、项目等授权
- 管理员登录,默认用户名密码:admin/esheduler123 - 管理员登录,默认用户名密码:admin/escheduler123
### 创建队列 ### 创建队列
- 队列是在执行spark、mapreduce等程序,需要用到“队列”参数时使用的。 - 队列是在执行spark、mapreduce等程序,需要用到“队列”参数时使用的。

2
escheduler-alert/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<version>1.0.3-SNAPSHOT</version> <version>1.0.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-alert</artifactId> <artifactId>escheduler-alert</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>

1
escheduler-alert/src/main/java/cn/escheduler/alert/utils/MailUtils.java

@ -165,6 +165,7 @@ public class MailUtils {
return retMap; return retMap;
}catch (Exception e){ }catch (Exception e){
handleException(receivers, retMap, e); handleException(receivers, retMap, e);
return retMap;
} }
} }
return retMap; return retMap;

2
escheduler-api/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<version>1.0.3-SNAPSHOT</version> <version>1.0.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-api</artifactId> <artifactId>escheduler-api</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>

46
escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java

@ -18,10 +18,13 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.DataSourceService; import cn.escheduler.api.service.DataSourceService;
import cn.escheduler.api.utils.CheckUtils;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.DbType;
import cn.escheduler.common.enums.ResUploadType;
import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.ParameterUtils;
import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.model.User;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParam;
@ -34,9 +37,11 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import static cn.escheduler.api.enums.Status.*; import static cn.escheduler.api.enums.Status.*;
import static cn.escheduler.common.utils.PropertyUtils.getBoolean;
/** /**
@ -54,12 +59,16 @@ public class DataSourceController extends BaseController {
/** /**
* create data source * create data source
* 创建数据源
*
* @param loginUser * @param loginUser
* @param name * @param name
* @param note * @param note
* @param type * @param type
* @param host
* @param port
* @param database
* @param principal
* @param userName
* @param password
* @param other * @param other
* @return * @return
*/ */
@ -84,13 +93,14 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "host") String host, @RequestParam(value = "host") String host,
@RequestParam(value = "port") String port, @RequestParam(value = "port") String port,
@RequestParam(value = "database") String database, @RequestParam(value = "database") String database,
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName, @RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password, @RequestParam(value = "password") String password,
@RequestParam(value = "other") String other) { @RequestParam(value = "other") String other) {
logger.info("login user {} create datasource ame: {}, note: {}, type: {}, other: {}", logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {},port: {},database : {},principal: {},userName : {} other: {}",
loginUser.getUserName(), name, note, type, other); loginUser.getUserName(), name, note, type, host,port,database,principal,userName,other);
try { try {
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, userName, password, other); String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other);
Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter);
return returnDataList(result); return returnDataList(result);
@ -134,13 +144,14 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "host") String host, @RequestParam(value = "host") String host,
@RequestParam(value = "port") String port, @RequestParam(value = "port") String port,
@RequestParam(value = "database") String database, @RequestParam(value = "database") String database,
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName, @RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password, @RequestParam(value = "password") String password,
@RequestParam(value = "other") String other) { @RequestParam(value = "other") String other) {
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}", logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}",
loginUser.getUserName(), name, note, type, other); loginUser.getUserName(), name, note, type, other);
try { try {
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, userName, password, other); String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal, userName, password, other);
Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter);
return returnDataList(dataSource); return returnDataList(dataSource);
} catch (Exception e) { } catch (Exception e) {
@ -269,13 +280,14 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "host") String host, @RequestParam(value = "host") String host,
@RequestParam(value = "port") String port, @RequestParam(value = "port") String port,
@RequestParam(value = "database") String database, @RequestParam(value = "database") String database,
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName, @RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password, @RequestParam(value = "password") String password,
@RequestParam(value = "other") String other) { @RequestParam(value = "other") String other) {
logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}", logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}",
loginUser.getUserName(), name, note, type, other); loginUser.getUserName(), name, note, type, other);
try { try {
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, userName, password, other); String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other);
Boolean isConnection = dataSourceService.checkConnection(type, parameter); Boolean isConnection = dataSourceService.checkConnection(type, parameter);
Result result = new Result(); Result result = new Result();
@ -429,4 +441,24 @@ public class DataSourceController extends BaseController {
return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg()); return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg());
} }
} }
/**
* get user info
*
* @param loginUser
* @return
*/
@ApiOperation(value = "getKerberosStartupState", notes= "GET_USER_INFO_NOTES")
@GetMapping(value="/kerberos-startup-state")
@ResponseStatus(HttpStatus.OK)
public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){
logger.info("login user {},get kerberos startup state : {}", loginUser.getUserName());
try{
// if upload resource is HDFS and kerberos startup is true , else false
return success(Status.SUCCESS.getMsg(), CheckUtils.getKerberosStartupState());
}catch (Exception e){
logger.error(KERBEROS_STARTUP_STATE.getMsg(),e);
return error(Status.KERBEROS_STARTUP_STATE.getCode(), Status.KERBEROS_STARTUP_STATE.getMsg());
}
}
} }

4
escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java

@ -236,9 +236,9 @@ public class ResourcesController extends BaseController{
) { ) {
try { try {
logger.info("login user {}, verfiy resource alias: {},resource type: {}", logger.info("login user {}, verfiy resource alias: {},resource type: {}",
loginUser.getUserName(), alias); loginUser.getUserName(), alias,type);
return resourceService.verifyResourceName(alias, type); return resourceService.verifyResourceName(alias,type,loginUser);
} catch (Exception e) { } catch (Exception e) {
logger.error(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg(), e); logger.error(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg(), e);
return error(Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getCode(), Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg()); return error(Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getCode(), Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg());

3
escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java

@ -175,6 +175,8 @@ public enum Status {
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified"), RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified"),
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"), UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"),
HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"), HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"),
RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!"),
RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!"),
@ -231,6 +233,7 @@ public enum Status {
QUEUE_COUNT_ERROR(90001,"queue count error"), QUEUE_COUNT_ERROR(90001,"queue count error"),
KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error"),
; ;
private int code; private int code;

40
escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java

@ -17,11 +17,15 @@
package cn.escheduler.api.service; package cn.escheduler.api.service;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.CheckUtils;
import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.DbType;
import cn.escheduler.common.enums.ResUploadType;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.job.db.*; import cn.escheduler.common.job.db.*;
import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.dao.mapper.DataSourceMapper; import cn.escheduler.dao.mapper.DataSourceMapper;
import cn.escheduler.dao.mapper.DatasourceUserMapper; import cn.escheduler.dao.mapper.DatasourceUserMapper;
import cn.escheduler.dao.mapper.ProjectMapper; import cn.escheduler.dao.mapper.ProjectMapper;
@ -30,6 +34,8 @@ import cn.escheduler.dao.model.Resource;
import cn.escheduler.dao.model.User; import cn.escheduler.dao.model.User;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference; import com.alibaba.fastjson.TypeReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -41,6 +47,9 @@ import java.sql.DriverManager;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.*; import java.util.*;
import static cn.escheduler.common.utils.PropertyUtils.getBoolean;
import static cn.escheduler.common.utils.PropertyUtils.getString;
/** /**
* datasource service * datasource service
*/ */
@ -54,6 +63,7 @@ public class DataSourceService extends BaseService{
public static final String TYPE = "type"; public static final String TYPE = "type";
public static final String HOST = "host"; public static final String HOST = "host";
public static final String PORT = "port"; public static final String PORT = "port";
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database"; public static final String DATABASE = "database";
public static final String USER_NAME = "userName"; public static final String USER_NAME = "userName";
public static final String PASSWORD = "password"; public static final String PASSWORD = "password";
@ -239,6 +249,7 @@ public class DataSourceService extends BaseService{
map.put(TYPE, dataSourceType); map.put(TYPE, dataSourceType);
map.put(HOST, host); map.put(HOST, host);
map.put(PORT, port); map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(DATABASE, database); map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser()); map.put(USER_NAME, datasourceForm.getUser());
map.put(PASSWORD, datasourceForm.getPassword()); map.put(PASSWORD, datasourceForm.getPassword());
@ -362,11 +373,21 @@ public class DataSourceService extends BaseService{
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); Class.forName(Constants.COM_MYSQL_JDBC_DRIVER);
break; break;
case HIVE: case HIVE:
datasource = JSONObject.parseObject(parameter, HiveDataSource.class);
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case SPARK: case SPARK:
if (CheckUtils.getKerberosStartupState()) {
System.setProperty(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF,
getString(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH));
Configuration configuration = new Configuration();
configuration.set(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME),
getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
}
if (dbType == DbType.HIVE){
datasource = JSONObject.parseObject(parameter, HiveDataSource.class);
}else if (dbType == DbType.SPARK){
datasource = JSONObject.parseObject(parameter, SparkDataSource.class); datasource = JSONObject.parseObject(parameter, SparkDataSource.class);
}
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break; break;
case CLICKHOUSE: case CLICKHOUSE:
@ -442,10 +463,18 @@ public class DataSourceService extends BaseService{
* @param other * @param other
* @return * @return
*/ */
public String buildParameter(String name, String desc, DbType type, String host, String port, String database, String userName, String password, String other) { public String buildParameter(String name, String desc, DbType type, String host,
String port, String database,String principal,String userName,
String password, String other) {
String address = buildAddress(type, host, port); String address = buildAddress(type, host, port);
String jdbcUrl = address + "/" + database; String jdbcUrl = address + "/" + database;
if (CheckUtils.getKerberosStartupState() &&
(type == DbType.HIVE || type == DbType.SPARK)){
jdbcUrl += ";principal=" + principal;
}
String separator = ""; String separator = "";
if (Constants.MYSQL.equals(type.name()) if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name()) || Constants.POSTGRESQL.equals(type.name())
@ -464,6 +493,7 @@ public class DataSourceService extends BaseService{
parameterMap.put(Constants.JDBC_URL, jdbcUrl); parameterMap.put(Constants.JDBC_URL, jdbcUrl);
parameterMap.put(Constants.USER, userName); parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, password); parameterMap.put(Constants.PASSWORD, password);
parameterMap.put(Constants.PRINCIPAL,principal);
if (other != null && !"".equals(other)) { if (other != null && !"".equals(other)) {
Map map = JSONObject.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() { Map map = JSONObject.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() {
}); });
@ -537,7 +567,7 @@ public class DataSourceService extends BaseService{
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
return result; return result;
} }
if(loginUser.getId() != dataSource.getUserId()){ if(loginUser.getId() != dataSource.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER){
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
} }

5
escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java

@ -24,6 +24,7 @@ import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.common.enums.Flag; import cn.escheduler.common.enums.Flag;
import cn.escheduler.common.enums.ReleaseState; import cn.escheduler.common.enums.ReleaseState;
import cn.escheduler.common.enums.TaskType; import cn.escheduler.common.enums.TaskType;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.graph.DAG; import cn.escheduler.common.graph.DAG;
import cn.escheduler.common.model.TaskNode; import cn.escheduler.common.model.TaskNode;
import cn.escheduler.common.model.TaskNodeRelation; import cn.escheduler.common.model.TaskNodeRelation;
@ -127,6 +128,7 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setLocations(locations); processDefine.setLocations(locations);
processDefine.setConnects(connects); processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout()); processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
//custom global params //custom global params
List<Property> globalParamsList = processData.getGlobalParams(); List<Property> globalParamsList = processData.getGlobalParams();
@ -291,6 +293,7 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setLocations(locations); processDefine.setLocations(locations);
processDefine.setConnects(connects); processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout()); processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
//custom global params //custom global params
List<Property> globalParamsList = new ArrayList<>(); List<Property> globalParamsList = new ArrayList<>();
@ -365,7 +368,7 @@ public class ProcessDefinitionService extends BaseDAGService {
} }
// Determine if the login user is the owner of the process definition // Determine if the login user is the owner of the process definition
if (loginUser.getId() != processDefinition.getUserId()) { if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
} }

7
escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java

@ -364,6 +364,7 @@ public class ProcessInstanceService extends BaseDAGService {
String globalParams = null; String globalParams = null;
String originDefParams = null; String originDefParams = null;
int timeout = processInstance.getTimeout(); int timeout = processInstance.getTimeout();
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
if (StringUtils.isNotEmpty(processInstanceJson)) { if (StringUtils.isNotEmpty(processInstanceJson)) {
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class);
//check workflow json is valid //check workflow json is valid
@ -379,6 +380,11 @@ public class ProcessInstanceService extends BaseDAGService {
processInstance.getCmdTypeIfComplement(), schedule); processInstance.getCmdTypeIfComplement(), schedule);
timeout = processData.getTimeout(); timeout = processData.getTimeout();
processInstance.setTimeout(timeout); processInstance.setTimeout(timeout);
Tenant tenant = processDao.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if(tenant != null){
processInstance.setTenantCode(tenant.getTenantCode());
}
processInstance.setProcessInstanceJson(processInstanceJson); processInstance.setProcessInstanceJson(processInstanceJson);
processInstance.setGlobalParams(globalParams); processInstance.setGlobalParams(globalParams);
} }
@ -387,7 +393,6 @@ public class ProcessInstanceService extends BaseDAGService {
int update = processDao.updateProcessInstance(processInstance); int update = processDao.updateProcessInstance(processInstance);
int updateDefine = 1; int updateDefine = 1;
if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) { if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) {
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
processDefinition.setProcessDefinitionJson(processInstanceJson); processDefinition.setProcessDefinitionJson(processInstanceJson);
processDefinition.setGlobalParams(originDefParams); processDefinition.setGlobalParams(originDefParams);
processDefinition.setLocations(locations); processDefinition.setLocations(locations);

9
escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java

@ -76,15 +76,6 @@ public class ProjectService extends BaseService{
return descCheck; return descCheck;
} }
/**
* only general users can create projects. administrators have no corresponding tenants and can only view
* 管理员没有对应的租户,只能查看,只有普通用户才可以创建项目
*/
if (!userService.isGeneral(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
Project project = projectMapper.queryByName(name); Project project = projectMapper.queryByName(name);
if (project != null) { if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);

116
escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java

@ -21,6 +21,7 @@ import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo; import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.api.utils.Result; import cn.escheduler.api.utils.Result;
import cn.escheduler.common.enums.ResourceType; import cn.escheduler.common.enums.ResourceType;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.utils.FileUtils; import cn.escheduler.common.utils.FileUtils;
import cn.escheduler.common.utils.HadoopUtils; import cn.escheduler.common.utils.HadoopUtils;
import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.common.utils.PropertyUtils;
@ -85,8 +86,8 @@ public class ResourcesService extends BaseService {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if hdfs not startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -184,9 +185,9 @@ public class ResourcesService extends BaseService {
ResourceType type) { ResourceType type) {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -385,9 +386,9 @@ public class ResourcesService extends BaseService {
public Result delete(User loginUser, int resourceId) throws Exception { public Result delete(User loginUser, int resourceId) throws Exception {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -399,7 +400,7 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
return result; return result;
} }
if (loginUser.getId() != resource.getUserId()) { if (loginUser.getId() != resource.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
} }
@ -420,6 +421,41 @@ public class ResourcesService extends BaseService {
return result; return result;
} }
/**
* verify resource by name and type
* @param name
* @param type
* @param loginUser
* @return
*/
public Result verifyResourceName(String name, ResourceType type,User loginUser) {
Result result = new Result();
putMsg(result, Status.SUCCESS);
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal());
if (resource != null) {
logger.error("resource type:{} name:{} has exist, can't create again.", type, name);
putMsg(result, Status.RESOURCE_EXIST);
} else {
// query tenant
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
try {
String hdfsFilename = getHdfsFileName(type,tenantCode,name);
if(HadoopUtils.getInstance().exists(hdfsFilename)){
logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, name,hdfsFilename);
putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename);
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result,Status.HDFS_OPERATION_ERROR);
}
}
return result;
}
/** /**
* verify resource by name and type * verify resource by name and type
* *
@ -448,9 +484,9 @@ public class ResourcesService extends BaseService {
public Result readResource(int resourceId, int skipLineNum, int limit) { public Result readResource(int resourceId, int skipLineNum, int limit) {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -480,6 +516,7 @@ public class ResourcesService extends BaseService {
String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias());
logger.info("resource hdfs path is {} ", hdfsFileName); logger.info("resource hdfs path is {} ", hdfsFileName);
try { try {
if(HadoopUtils.getInstance().exists(hdfsFileName)){
List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -487,6 +524,11 @@ public class ResourcesService extends BaseService {
map.put(ALIAS, resource.getAlias()); map.put(ALIAS, resource.getAlias());
map.put(CONTENT, StringUtils.join(content.toArray(), "\n")); map.put(CONTENT, StringUtils.join(content.toArray(), "\n"));
result.setData(map); result.setData(map);
}else{
logger.error("read file {} not exist in hdfs", hdfsFileName);
putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName);
}
} catch (Exception e) { } catch (Exception e) {
logger.error(String.format("Resource %s read failed", hdfsFileName), e); logger.error(String.format("Resource %s read failed", hdfsFileName), e);
putMsg(result, Status.HDFS_OPERATION_ERROR); putMsg(result, Status.HDFS_OPERATION_ERROR);
@ -509,9 +551,9 @@ public class ResourcesService extends BaseService {
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) @Transactional(value = "TransactionManager",rollbackFor = Exception.class)
public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content) { public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content) {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -530,17 +572,14 @@ public class ResourcesService extends BaseService {
String name = fileName.trim() + "." + nameSuffix; String name = fileName.trim() + "." + nameSuffix;
//check file already exists result = verifyResourceName(name,type,loginUser);
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); if (!result.getCode().equals(Status.SUCCESS.getCode())) {
if (resource != null) {
logger.error("resource {} has exist, can't recreate .", name);
putMsg(result, Status.RESOURCE_EXIST);
return result; return result;
} }
// save data // save data
Date now = new Date(); Date now = new Date();
resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now); Resource resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now);
resourcesMapper.insert(resource); resourcesMapper.insert(resource);
@ -569,12 +608,13 @@ public class ResourcesService extends BaseService {
* @param resourceId * @param resourceId
* @return * @return
*/ */
@Transactional(value = "TransactionManager",rollbackFor = Exception.class)
public Result updateResourceContent(int resourceId, String content) { public Result updateResourceContent(int resourceId, String content) {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -597,6 +637,10 @@ public class ResourcesService extends BaseService {
} }
} }
resource.setSize(content.getBytes().length);
resource.setUpdateTime(new Date());
resourcesMapper.update(resource);
User user = userMapper.queryDetailsById(resource.getUserId()); User user = userMapper.queryDetailsById(resource.getUserId());
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode();
@ -643,6 +687,7 @@ public class ResourcesService extends BaseService {
logger.error("{} is not exist", resourcePath); logger.error("{} is not exist", resourcePath);
result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); result.setCode(Status.HDFS_OPERATION_ERROR.getCode());
result.setMsg(String.format("%s is not exist", resourcePath)); result.setMsg(String.format("%s is not exist", resourcePath));
return result;
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage(), e); logger.error(e.getMessage(), e);
@ -662,9 +707,9 @@ public class ResourcesService extends BaseService {
* @return * @return
*/ */
public org.springframework.core.io.Resource downloadResource(int resourceId) throws Exception { public org.springframework.core.io.Resource downloadResource(int resourceId) throws Exception {
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
throw new RuntimeException("hdfs not startup"); throw new RuntimeException("hdfs not startup");
} }
@ -809,6 +854,23 @@ public class ResourcesService extends BaseService {
return hdfsFileName; return hdfsFileName;
} }
/**
* get hdfs file name
*
* @param resourceType
* @param tenantCode
* @param hdfsFileName
* @return
*/
private String getHdfsFileName(ResourceType resourceType, String tenantCode, String hdfsFileName) {
if (resourceType.equals(ResourceType.FILE)) {
hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, hdfsFileName);
} else if (resourceType.equals(ResourceType.UDF)) {
hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, hdfsFileName);
}
return hdfsFileName;
}
/** /**
* get authorized resource list * get authorized resource list
* *

20
escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java

@ -96,7 +96,7 @@ public class TenantService extends BaseService{
tenantMapper.insert(tenant); tenantMapper.insert(tenant);
// if hdfs startup // if hdfs startup
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (PropertyUtils.getResUploadStartupState()){
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources";
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
/** /**
@ -125,9 +125,9 @@ public class TenantService extends BaseService{
public Map<String,Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String,Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) { // if (checkAdmin(loginUser, result)) {
return result; // return result;
} // }
Integer count = tenantMapper.countTenantPaging(searchVal); Integer count = tenantMapper.countTenantPaging(searchVal);
@ -166,7 +166,7 @@ public class TenantService extends BaseService{
Tenant tenant = tenantMapper.queryById(id); Tenant tenant = tenantMapper.queryById(id);
if (tenant == null){ if (tenant == null){
putMsg(result, Status.USER_NOT_EXIST, id); putMsg(result, Status.TENANT_NOT_EXIST);
return result; return result;
} }
@ -178,7 +178,7 @@ public class TenantService extends BaseService{
Tenant newTenant = tenantMapper.queryByTenantCode(tenantCode); Tenant newTenant = tenantMapper.queryByTenantCode(tenantCode);
if (newTenant == null){ if (newTenant == null){
// if hdfs startup // if hdfs startup
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (PropertyUtils.getResUploadStartupState()){
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources";
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
//init hdfs resource //init hdfs resource
@ -230,6 +230,13 @@ public class TenantService extends BaseService{
Tenant tenant = tenantMapper.queryById(id); Tenant tenant = tenantMapper.queryById(id);
if (tenant == null){
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
// if resource upload startup
if (PropertyUtils.getResUploadStartupState()){
String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode();
String resourcePath = HadoopUtils.getHdfsDir(tenant.getTenantCode()); String resourcePath = HadoopUtils.getHdfsDir(tenant.getTenantCode());
@ -245,6 +252,7 @@ public class TenantService extends BaseService{
} }
HadoopUtils.getInstance().delete(tenantPath, true); HadoopUtils.getInstance().delete(tenantPath, true);
}
tenantMapper.deleteById(id); tenantMapper.deleteById(id);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);

12
escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java

@ -80,9 +80,9 @@ public class UdfFuncService extends BaseService{
int resourceId) { int resourceId) {
Result result = new Result(); Result result = new Result();
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
@ -167,9 +167,9 @@ public class UdfFuncService extends BaseService{
// verify udfFunc is exist // verify udfFunc is exist
UdfFunc udf = udfFuncMapper.queryUdfById(udfFuncId); UdfFunc udf = udfFuncMapper.queryUdfById(udfFuncId);
// if hdfs not startup // if resource upload startup
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (!PropertyUtils.getResUploadStartupState()){
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }

6
escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java

@ -125,7 +125,7 @@ public class UsersService extends BaseService {
Tenant tenant = tenantMapper.queryById(tenantId); Tenant tenant = tenantMapper.queryById(tenantId);
// if hdfs startup // if hdfs startup
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (PropertyUtils.getResUploadStartupState()){
String userPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode() + "/home/" + user.getId(); String userPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode() + "/home/" + user.getId();
HadoopUtils.getInstance().mkdir(userPath); HadoopUtils.getInstance().mkdir(userPath);
@ -245,7 +245,7 @@ public class UsersService extends BaseService {
Tenant newTenant = tenantMapper.queryById(tenantId); Tenant newTenant = tenantMapper.queryById(tenantId);
if (newTenant != null) { if (newTenant != null) {
// if hdfs startup // if hdfs startup
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (PropertyUtils.getResUploadStartupState()){
String newTenantCode = newTenant.getTenantCode(); String newTenantCode = newTenant.getTenantCode();
String oldResourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + oldTenant.getTenantCode() + "/resources"; String oldResourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + oldTenant.getTenantCode() + "/resources";
String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode());
@ -308,7 +308,7 @@ public class UsersService extends BaseService {
User user = userMapper.queryTenantCodeByUserId(id); User user = userMapper.queryTenantCodeByUserId(id);
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ if (PropertyUtils.getResUploadStartupState()){
String userPath = HadoopUtils.getHdfsDataBasePath() + "/" + user.getTenantCode() + "/home/" + id; String userPath = HadoopUtils.getHdfsDataBasePath() + "/" + user.getTenantCode() + "/home/" + id;
HadoopUtils.getInstance().delete(userPath, true); HadoopUtils.getInstance().delete(userPath, true);

14
escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java

@ -18,8 +18,10 @@ package cn.escheduler.api.utils;
import cn.escheduler.api.enums.Status; import cn.escheduler.api.enums.Status;
import cn.escheduler.common.enums.ResUploadType;
import cn.escheduler.common.task.AbstractParameters; import cn.escheduler.common.task.AbstractParameters;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.common.utils.PropertyUtils;
import cn.escheduler.common.utils.TaskParametersUtils; import cn.escheduler.common.utils.TaskParametersUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
@ -28,6 +30,7 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import static cn.escheduler.common.utils.PropertyUtils.getBoolean;
/** /**
@ -158,4 +161,15 @@ public class CheckUtils {
return pattern.matcher(str).matches(); return pattern.matcher(str).matches();
} }
/**
* if upload resource is HDFS and kerberos startup is true , else false
* @return
*/
public static boolean getKerberosStartupState(){
String resUploadStartupType = PropertyUtils.getString(cn.escheduler.common.Constants.RES_UPLOAD_STARTUP_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
Boolean kerberosStartupState = getBoolean(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE);
return resUploadType == ResUploadType.HDFS && kerberosStartupState;
}
} }

1
escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java

@ -111,6 +111,7 @@ public class Constants {
public static final String ADDRESS = "address"; public static final String ADDRESS = "address";
public static final String DATABASE = "database"; public static final String DATABASE = "database";
public static final String JDBC_URL = "jdbcUrl"; public static final String JDBC_URL = "jdbcUrl";
public static final String PRINCIPAL = "principal";
public static final String USER = "user"; public static final String USER = "user";
public static final String PASSWORD = "password"; public static final String PASSWORD = "password";
public static final String OTHER = "other"; public static final String OTHER = "other";

24
escheduler-api/src/test/java/cn/escheduler/api/controller/ResourcesControllerTest.java

@ -34,6 +34,8 @@ import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.WebApplicationContext;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
@ -43,7 +45,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@RunWith(SpringRunner.class) @RunWith(SpringRunner.class)
@SpringBootTest @SpringBootTest
public class ResourcesControllerTest { public class ResourcesControllerTest {
private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); private static Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class);
private MockMvc mockMvc; private MockMvc mockMvc;
@ -71,4 +73,24 @@ public class ResourcesControllerTest {
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString()); logger.info(mvcResult.getResponse().getContentAsString());
} }
@Test
public void verifyResourceName() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("name","list_resources_1.sh");
paramsMap.add("type","FILE");
MvcResult mvcResult = mockMvc.perform(get("/resources/verify-name")
.header("sessionId", "c24ed9d9-1c20-48a0-bd9c-5cfca14a4dcb")
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
} }

2
escheduler-common/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<version>1.0.3-SNAPSHOT</version> <version>1.0.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-common</artifactId> <artifactId>escheduler-common</artifactId>
<name>escheduler-common</name> <name>escheduler-common</name>

26
escheduler-common/src/main/java/cn/escheduler/common/Constants.java

@ -60,6 +60,23 @@ public final class Constants {
*/ */
public static final String FS_DEFAULTFS = "fs.defaultFS"; public static final String FS_DEFAULTFS = "fs.defaultFS";
/**
* fs s3a endpoint
*/
public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint";
/**
* fs s3a access key
*/
public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key";
/**
* fs s3a secret key
*/
public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key";
/** /**
* yarn.resourcemanager.ha.rm.idsfs.defaultFS * yarn.resourcemanager.ha.rm.idsfs.defaultFS
*/ */
@ -123,9 +140,9 @@ public final class Constants {
public static final String DEVELOPMENT_STATE = "development.state"; public static final String DEVELOPMENT_STATE = "development.state";
/** /**
* hdfs.startup.state * res.upload.startup.type
*/ */
public static final String HDFS_STARTUP_STATE = "hdfs.startup.state"; public static final String RES_UPLOAD_STARTUP_TYPE = "res.upload.startup.type";
/** /**
* zookeeper quorum * zookeeper quorum
@ -261,6 +278,11 @@ public final class Constants {
*/ */
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
/**
* date format of yyyyMMdd
*/
public static final String YYYYMMDD = "yyyyMMdd";
/** /**
* date format of yyyyMMddHHmmss * date format of yyyyMMddHHmmss
*/ */

29
escheduler-common/src/main/java/cn/escheduler/common/enums/ResUploadType.java

@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.enums;
/**
* data base types
*/
public enum ResUploadType {
/**
* 0 hdfs
* 1 s3
* 2 none
*/
HDFS,S3,NONE
}

35
escheduler-common/src/main/java/cn/escheduler/common/enums/TaskRecordStatus.java

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.enums;
/**
* task record status
*
*/
public enum TaskRecordStatus {
/**
* status
* 0 sucess
* 1 failure
* 2 exception
*/
SUCCESS,FAILURE,EXCEPTION
}

8
escheduler-common/src/main/java/cn/escheduler/common/enums/TaskType.java

@ -30,5 +30,11 @@ public enum TaskType {
* 6 PYTHON * 6 PYTHON
* 7 DEPENDENT * 7 DEPENDENT
*/ */
SHELL,SQL, SUB_PROCESS,PROCEDURE,MR,SPARK,PYTHON,DEPENDENT SHELL,SQL, SUB_PROCESS,PROCEDURE,MR,SPARK,PYTHON,DEPENDENT;
public static boolean typeIsNormalTask(String typeName) {
TaskType taskType = TaskType.valueOf(typeName);
return !(taskType == TaskType.SUB_PROCESS || taskType == TaskType.DEPENDENT);
}
} }

15
escheduler-common/src/main/java/cn/escheduler/common/enums/ZKNodeType.java

@ -0,0 +1,15 @@
package cn.escheduler.common.enums;
/**
* zk node type
*/
public enum ZKNodeType {
/**
* 0 do not send warning;
* 1 send if process success;
* 2 send if process failed;
* 3 send if process ending;
*/
MASTER, WORKER, DEAD_SERVER, TASK_QUEUE;
}

20
escheduler-common/src/main/java/cn/escheduler/common/job/db/BaseDataSource.java

@ -45,6 +45,18 @@ public abstract class BaseDataSource {
*/ */
private String other; private String other;
/**
* principal
*/
private String principal;
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
/** /**
* test whether the data source can be connected successfully * test whether the data source can be connected successfully
* @throws Exception * @throws Exception
@ -73,14 +85,14 @@ public abstract class BaseDataSource {
this.password = password; this.password = password;
} }
public String getAddress() {
return address;
}
public void setAddress(String address) { public void setAddress(String address) {
this.address = address; this.address = address;
} }
public String getAddress() {
return address;
}
public String getDatabase() { public String getDatabase() {
return database; return database;
} }

13
escheduler-common/src/main/java/cn/escheduler/common/job/db/HiveDataSource.java

@ -17,12 +17,12 @@
package cn.escheduler.common.job.db; package cn.escheduler.common.job.db;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.sql.Connection; import java.sql.*;
import java.sql.DriverManager;
import java.sql.SQLException;
/** /**
* data source of hive * data source of hive
@ -32,6 +32,8 @@ public class HiveDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(HiveDataSource.class); private static final Logger logger = LoggerFactory.getLogger(HiveDataSource.class);
/** /**
* gets the JDBC url for the data source connection * gets the JDBC url for the data source connection
* @return * @return
@ -43,7 +45,7 @@ public class HiveDataSource extends BaseDataSource {
jdbcUrl += "/"; jdbcUrl += "/";
} }
jdbcUrl += getDatabase(); jdbcUrl += getDatabase() + ";principal=" + getPrincipal();
if (StringUtils.isNotEmpty(getOther())) { if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += ";" + getOther(); jdbcUrl += ";" + getOther();
@ -67,11 +69,10 @@ public class HiveDataSource extends BaseDataSource {
try { try {
con.close(); con.close();
} catch (SQLException e) { } catch (SQLException e) {
logger.error("Postgre datasource try conn close conn error", e); logger.error("hive datasource try conn close conn error", e);
throw e; throw e;
} }
} }
} }
} }
} }

3
escheduler-common/src/main/java/cn/escheduler/common/job/db/SparkDataSource.java

@ -31,7 +31,6 @@ public class SparkDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(SparkDataSource.class); private static final Logger logger = LoggerFactory.getLogger(SparkDataSource.class);
/** /**
* gets the JDBC url for the data source connection * gets the JDBC url for the data source connection
* @return * @return
@ -43,7 +42,7 @@ public class SparkDataSource extends BaseDataSource {
jdbcUrl += "/"; jdbcUrl += "/";
} }
jdbcUrl += getDatabase(); jdbcUrl += getDatabase() + ";principal=" + getPrincipal();
if (StringUtils.isNotEmpty(getOther())) { if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += ";" + getOther(); jdbcUrl += ";" + getOther();

15
escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java

@ -18,6 +18,7 @@ package cn.escheduler.common.utils;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.enums.ResUploadType;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException; import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
@ -40,6 +41,7 @@ import java.util.stream.Stream;
import static cn.escheduler.common.Constants.*; import static cn.escheduler.common.Constants.*;
import static cn.escheduler.common.utils.PropertyUtils.*; import static cn.escheduler.common.utils.PropertyUtils.*;
import static cn.escheduler.common.utils.PropertyUtils.getString;
/** /**
* hadoop utils * hadoop utils
@ -94,6 +96,10 @@ public class HadoopUtils implements Closeable {
try { try {
configuration = new Configuration(); configuration = new Configuration();
String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
if (resUploadType == ResUploadType.HDFS){
if (getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){ if (getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
@ -136,6 +142,15 @@ public class HadoopUtils implements Closeable {
fs = FileSystem.get(configuration); fs = FileSystem.get(configuration);
} }
} }
}else if (resUploadType == ResUploadType.S3){
configuration.set(FS_DEFAULTFS,getString(FS_DEFAULTFS));
configuration.set(FS_S3A_ENDPOINT,getString(FS_S3A_ENDPOINT));
configuration.set(FS_S3A_ACCESS_KEY,getString(FS_S3A_ACCESS_KEY));
configuration.set(FS_S3A_SECRET_KEY,getString(FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
String rmHaIds = getString(YARN_RESOURCEMANAGER_HA_RM_IDS); String rmHaIds = getString(YARN_RESOURCEMANAGER_HA_RM_IDS);
String appAddress = getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); String appAddress = getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
if (!StringUtils.isEmpty(rmHaIds)) { if (!StringUtils.isEmpty(rmHaIds)) {

2
escheduler-common/src/main/java/cn/escheduler/common/utils/OSUtils.java

@ -220,7 +220,7 @@ public class OSUtils {
* @throws IOException * @throws IOException
*/ */
public static String exeShell(String command) throws IOException { public static String exeShell(String command) throws IOException {
return ShellExecutor.execCommand("groups"); return ShellExecutor.execCommand(command);
} }
/** /**

14
escheduler-common/src/main/java/cn/escheduler/common/utils/PropertyUtils.java

@ -16,6 +16,8 @@
*/ */
package cn.escheduler.common.utils; package cn.escheduler.common.utils;
import cn.escheduler.common.Constants;
import cn.escheduler.common.enums.ResUploadType;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -65,11 +67,15 @@ public class PropertyUtils {
} }
} }
/* /**
public static PropertyUtils getInstance(){ * judge whether resource upload startup
return propertyUtils; * @return
}
*/ */
public static Boolean getResUploadStartupState(){
String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3;
}
/** /**
* get property value * get property value

4
escheduler-common/src/main/resources/common/common.properties

@ -16,8 +16,8 @@ hdfs.root.user=hdfs
# data base dir, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/escheduler" is recommended # data base dir, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/escheduler" is recommended
data.store2hdfs.basepath=/escheduler data.store2hdfs.basepath=/escheduler
# whether hdfs starts # resource upload startup type : HDFS,S3,NONE
hdfs.startup.state=false res.upload.startup.type=NONE
# whether kerberos starts # whether kerberos starts
hadoop.security.authentication.startup.state=false hadoop.security.authentication.startup.state=false

12
escheduler-common/src/main/resources/common/hadoop/hadoop.properties

@ -1,6 +1,16 @@
# ha or single namenode,If namenode ha needs to copy core-site.xml and hdfs-site.xml to the conf directory # ha or single namenode,If namenode ha needs to copy core-site.xml and hdfs-site.xml
# to the conf directory,support s3,for example : s3a://escheduler
fs.defaultFS=hdfs://mycluster:8020 fs.defaultFS=hdfs://mycluster:8020
# s3 need,s3 endpoint
fs.s3a.endpoint=http://192.168.199.91:9010
# s3 need,s3 access key
fs.s3a.access.key=A3DXS30FO22544RE
# s3 need,s3 secret key
fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK
#resourcemanager ha note this need ips , this empty if single #resourcemanager ha note this need ips , this empty if single
yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx

2
escheduler-dao/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<version>1.0.3-SNAPSHOT</version> <version>1.0.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-dao</artifactId> <artifactId>escheduler-dao</artifactId>
<name>escheduler-dao</name> <name>escheduler-dao</name>

47
escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java

@ -58,6 +58,7 @@ public class ProcessDao extends AbstractBaseDao {
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(), ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(), ExecutionStatus.READY_PAUSE.ordinal(),
// ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()}; ExecutionStatus.READY_STOP.ordinal()};
@Autowired @Autowired
@ -96,6 +97,12 @@ public class ProcessDao extends AbstractBaseDao {
@Autowired @Autowired
private ErrorCommandMapper errorCommandMapper; private ErrorCommandMapper errorCommandMapper;
@Autowired
private WorkerServerMapper workerServerMapper;
@Autowired
private TenantMapper tenantMapper;
/** /**
* task queue impl * task queue impl
*/ */
@ -121,7 +128,9 @@ public class ProcessDao extends AbstractBaseDao {
udfFuncMapper = getMapper(UdfFuncMapper.class); udfFuncMapper = getMapper(UdfFuncMapper.class);
resourceMapper = getMapper(ResourceMapper.class); resourceMapper = getMapper(ResourceMapper.class);
workerGroupMapper = getMapper(WorkerGroupMapper.class); workerGroupMapper = getMapper(WorkerGroupMapper.class);
workerServerMapper = getMapper(WorkerServerMapper.class);
taskQueue = TaskQueueFactory.getTaskQueueInstance(); taskQueue = TaskQueueFactory.getTaskQueueInstance();
tenantMapper = getMapper(TenantMapper.class);
} }
@ -485,9 +494,30 @@ public class ProcessDao extends AbstractBaseDao {
processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
processInstance.setWorkerGroupId(command.getWorkerGroupId()); processInstance.setWorkerGroupId(command.getWorkerGroupId());
processInstance.setTimeout(processDefinition.getTimeout()); processInstance.setTimeout(processDefinition.getTimeout());
processInstance.setTenantId(processDefinition.getTenantId());
return processInstance; return processInstance;
} }
/**
* get process tenant
* there is tenant id in definition, use the tenant of the definition.
* if there is not tenant id in the definiton or the tenant not exist
* use definition creator's tenant.
* @param tenantId
* @param userId
* @return
*/
public Tenant getTenantForProcess(int tenantId, int userId){
Tenant tenant = null;
if(tenantId >= 0){
tenant = tenantMapper.queryById(tenantId);
}
if(tenant == null){
User user = userMapper.queryById(userId);
tenant = tenantMapper.queryById(user.getTenantId());
}
return tenant;
}
/** /**
* check command parameters is valid * check command parameters is valid
@ -581,6 +611,8 @@ public class ProcessDao extends AbstractBaseDao {
processInstance.setScheduleTime(command.getScheduleTime()); processInstance.setScheduleTime(command.getScheduleTime());
} }
processInstance.setHost(host); processInstance.setHost(host);
ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXEUTION;
int runTime = processInstance.getRunTimes(); int runTime = processInstance.getRunTimes();
switch (commandType){ switch (commandType){
case START_PROCESS: case START_PROCESS:
@ -621,6 +653,7 @@ public class ProcessDao extends AbstractBaseDao {
case RECOVER_TOLERANCE_FAULT_PROCESS: case RECOVER_TOLERANCE_FAULT_PROCESS:
// recover tolerance fault process // recover tolerance fault process
processInstance.setRecovery(Flag.YES); processInstance.setRecovery(Flag.YES);
runStatus = processInstance.getState();
break; break;
case COMPLEMENT_DATA: case COMPLEMENT_DATA:
// delete all the valid tasks when complement data // delete all the valid tasks when complement data
@ -652,7 +685,7 @@ public class ProcessDao extends AbstractBaseDao {
default: default:
break; break;
} }
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); processInstance.setState(runStatus);
return processInstance; return processInstance;
} }
@ -1566,7 +1599,6 @@ public class ProcessDao extends AbstractBaseDao {
for (ProcessInstance processInstance:processInstanceList){ for (ProcessInstance processInstance:processInstanceList){
processNeedFailoverProcessInstances(processInstance); processNeedFailoverProcessInstances(processInstance);
} }
} }
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) @Transactional(value = "TransactionManager",rollbackFor = Exception.class)
@ -1633,6 +1665,17 @@ public class ProcessDao extends AbstractBaseDao {
return workerGroupMapper.queryById(workerGroupId); return workerGroupMapper.queryById(workerGroupId);
} }
/**
* query worker server by host
* @param host
* @return
*/
public List<WorkerServer> queryWorkerServerByHost(String host){
return workerServerMapper.queryWorkerByHost(host);
}
} }

46
escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java

@ -17,6 +17,8 @@
package cn.escheduler.dao; package cn.escheduler.dao;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.common.enums.TaskRecordStatus;
import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.dao.model.TaskRecord; import cn.escheduler.dao.model.TaskRecord;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
@ -28,6 +30,7 @@ import org.slf4j.LoggerFactory;
import java.sql.*; import java.sql.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -43,7 +46,7 @@ public class TaskRecordDao {
/** /**
* 加载配置文件 * load conf file
*/ */
private static Configuration conf; private static Configuration conf;
@ -56,6 +59,14 @@ public class TaskRecordDao {
} }
} }
/**
* get task record flag
* @return
*/
public static boolean getTaskRecordFlag(){
return conf.getBoolean(Constants.TASK_RECORD_FLAG);
}
/** /**
* create connection * create connection
* @return * @return
@ -253,4 +264,37 @@ public class TaskRecordDao {
} }
return recordList; return recordList;
} }
/**
* according to procname and procdate query task record
* @param procName
* @param procDate
* @return
*/
public static TaskRecordStatus getTaskRecordState(String procName,String procDate){
String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'"
,procName,procDate + "%");
List<TaskRecord> taskRecordList = getQueryResult(sql);
// contains no record and sql exception
if (CollectionUtils.isEmpty(taskRecordList)){
// exception
return TaskRecordStatus.EXCEPTION;
}else if (taskRecordList.size() > 1){
return TaskRecordStatus.EXCEPTION;
}else {
TaskRecord taskRecord = taskRecordList.get(0);
if (taskRecord == null){
return TaskRecordStatus.EXCEPTION;
}
Long targetRowCount = taskRecord.getTargetRowCount();
if (targetRowCount <= 0){
return TaskRecordStatus.FAILURE;
}else {
return TaskRecordStatus.SUCCESS;
}
}
}
} }

5
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapper.java

@ -95,6 +95,7 @@ public interface ProcessDefinitionMapper {
@Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "receivers", column = "receivers", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "receivers", column = "receivers", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "receiversCc", column = "receivers_cc", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "receiversCc", column = "receivers_cc", javaType = String.class, jdbcType = JdbcType.VARCHAR)
@ -123,6 +124,7 @@ public interface ProcessDefinitionMapper {
@Result(property = "locations", column = "locations", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "locations", column = "locations", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR)
}) })
@SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryByDefineName") @SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryByDefineName")
@ -160,6 +162,7 @@ public interface ProcessDefinitionMapper {
@Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT), @Result(property = "flag", column = "flag", typeHandler = EnumOrdinalTypeHandler.class, javaType = Flag.class, jdbcType = JdbcType.TINYINT),
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR)
}) })
@SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryAllDefinitionList") @SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryAllDefinitionList")
@ -187,6 +190,7 @@ public interface ProcessDefinitionMapper {
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "scheduleReleaseState", column = "schedule_release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT), @Result(property = "scheduleReleaseState", column = "schedule_release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR)
}) })
@SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryDefineListPaging") @SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryDefineListPaging")
@ -216,6 +220,7 @@ public interface ProcessDefinitionMapper {
@Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "connects", column = "connects", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR) @Result(property = "projectName", column = "project_name", javaType = String.class, jdbcType = JdbcType.VARCHAR)
}) })
@SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryDefinitionListByIdList") @SelectProvider(type = ProcessDefinitionMapperProvider.class, method = "queryDefinitionListByIdList")

4
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperProvider.java

@ -56,6 +56,7 @@ public class ProcessDefinitionMapperProvider {
VALUES("`create_time`", "#{processDefinition.createTime}"); VALUES("`create_time`", "#{processDefinition.createTime}");
VALUES("`update_time`", "#{processDefinition.updateTime}"); VALUES("`update_time`", "#{processDefinition.updateTime}");
VALUES("`timeout`", "#{processDefinition.timeout}"); VALUES("`timeout`", "#{processDefinition.timeout}");
VALUES("`tenant_id`", "#{processDefinition.tenantId}");
VALUES("`flag`", EnumFieldUtil.genFieldStr("processDefinition.flag", ReleaseState.class)); VALUES("`flag`", EnumFieldUtil.genFieldStr("processDefinition.flag", ReleaseState.class));
VALUES("`user_id`", "#{processDefinition.userId}"); VALUES("`user_id`", "#{processDefinition.userId}");
@ -102,6 +103,7 @@ public class ProcessDefinitionMapperProvider {
SET("`create_time`=#{processDefinition.createTime}"); SET("`create_time`=#{processDefinition.createTime}");
SET("`update_time`=#{processDefinition.updateTime}"); SET("`update_time`=#{processDefinition.updateTime}");
SET("`timeout`=#{processDefinition.timeout}"); SET("`timeout`=#{processDefinition.timeout}");
SET("`tenant_id`=#{processDefinition.tenantId}");
SET("`flag`="+EnumFieldUtil.genFieldStr("processDefinition.flag", Flag.class)); SET("`flag`="+EnumFieldUtil.genFieldStr("processDefinition.flag", Flag.class));
SET("`user_id`=#{processDefinition.userId}"); SET("`user_id`=#{processDefinition.userId}");
@ -189,7 +191,7 @@ public class ProcessDefinitionMapperProvider {
if(userId != null && 0 != Integer.parseInt(userId.toString())){ if(userId != null && 0 != Integer.parseInt(userId.toString())){
WHERE("td.user_id = #{userId}"); WHERE("td.user_id = #{userId}");
} }
ORDER_BY(" td.update_time desc limit #{offset},#{pageSize} "); ORDER_BY(" sc.schedule_release_state desc,td.update_time desc limit #{offset},#{pageSize} ");
}}.toString(); }}.toString();
} }
/** /**

11
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java

@ -97,6 +97,7 @@ public interface ProcessInstanceMapper {
@Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryDetailById") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryDetailById")
@ -136,6 +137,7 @@ public interface ProcessInstanceMapper {
@Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryById") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryById")
@ -175,6 +177,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -214,6 +217,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -262,6 +266,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -359,6 +364,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -452,6 +458,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -497,6 +504,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -542,6 +550,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@ -585,6 +594,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastRunningProcess") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastRunningProcess")
@ -628,6 +638,7 @@ public interface ProcessInstanceMapper {
@Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "processInstanceJson", column = "process_instance_json", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT)
}) })
@SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastManualProcess") @SelectProvider(type = ProcessInstanceMapperProvider.class, method = "queryLastManualProcess")

15
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java

@ -69,6 +69,7 @@ public class ProcessInstanceMapperProvider {
VALUES("`executor_id`", "#{processInstance.executorId}"); VALUES("`executor_id`", "#{processInstance.executorId}");
VALUES("`worker_group_id`", "#{processInstance.workerGroupId}"); VALUES("`worker_group_id`", "#{processInstance.workerGroupId}");
VALUES("`timeout`", "#{processInstance.timeout}"); VALUES("`timeout`", "#{processInstance.timeout}");
VALUES("`tenant_id`", "#{processInstance.tenantId}");
VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("processInstance.processInstancePriority", Priority.class)); VALUES("`process_instance_priority`", EnumFieldUtil.genFieldStr("processInstance.processInstancePriority", Priority.class));
} }
}.toString(); }.toString();
@ -141,6 +142,7 @@ public class ProcessInstanceMapperProvider {
SET("`dependence_schedule_times`=#{processInstance.dependenceScheduleTimes}"); SET("`dependence_schedule_times`=#{processInstance.dependenceScheduleTimes}");
SET("`is_sub_process`="+EnumFieldUtil.genFieldStr("processInstance.isSubProcess", Flag.class)); SET("`is_sub_process`="+EnumFieldUtil.genFieldStr("processInstance.isSubProcess", Flag.class));
SET("`executor_id`=#{processInstance.executorId}"); SET("`executor_id`=#{processInstance.executorId}");
SET("`tenant_id`=#{processInstance.tenantId}");
SET("`worker_group_id`=#{processInstance.workerGroupId}"); SET("`worker_group_id`=#{processInstance.workerGroupId}");
SET("`timeout`=#{processInstance.timeout}"); SET("`timeout`=#{processInstance.timeout}");
@ -220,11 +222,11 @@ public class ProcessInstanceMapperProvider {
public String queryDetailById(Map<String, Object> parameter) { public String queryDetailById(Map<String, Object> parameter) {
return new SQL() { return new SQL() {
{ {
SELECT("inst.*,q.queue_name as queue,t.tenant_code,UNIX_TIMESTAMP(inst.end_time)-UNIX_TIMESTAMP(inst.start_time) as duration"); SELECT("inst.*,q.queue_name as queue,UNIX_TIMESTAMP(inst.end_time)-UNIX_TIMESTAMP(inst.start_time) as duration");
FROM(TABLE_NAME + " inst, t_escheduler_user u,t_escheduler_tenant t,t_escheduler_queue q"); FROM(TABLE_NAME + " inst, t_escheduler_user u,t_escheduler_queue q");
WHERE("inst.executor_id = u.id AND u.tenant_id = t.id AND t.queue_id = q.id AND inst.id = #{processId}"); WHERE("inst.executor_id = u.id AND t.queue_id = q.id AND inst.id = #{processId}");
} }
}.toString(); }.toString();
} }
@ -402,7 +404,12 @@ public class ProcessInstanceMapperProvider {
FROM(TABLE_NAME); FROM(TABLE_NAME);
WHERE("`host` = #{host} and `state` in (" + strStates.toString() +")"); Object host = parameter.get("host");
if(host != null && StringUtils.isNotEmpty(host.toString())){
WHERE("`host` = #{host} ");
}
WHERE("`state` in (" + strStates.toString() +")");
ORDER_BY("`id` asc"); ORDER_BY("`id` asc");

1
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceMapperProvider.java

@ -118,6 +118,7 @@ public class ResourceMapperProvider {
SET("`alias` = #{resource.alias}"); SET("`alias` = #{resource.alias}");
SET("`desc` = #{resource.desc}"); SET("`desc` = #{resource.desc}");
SET("`update_time` = #{resource.updateTime}"); SET("`update_time` = #{resource.updateTime}");
SET("`size` = #{resource.size}");
WHERE("`id` = #{resource.id}"); WHERE("`id` = #{resource.id}");
}}.toString(); }}.toString();
} }

7
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapperProvider.java

@ -228,7 +228,12 @@ public class TaskInstanceMapperProvider {
SELECT("*, UNIX_TIMESTAMP(end_time)-UNIX_TIMESTAMP(start_time) as duration"); SELECT("*, UNIX_TIMESTAMP(end_time)-UNIX_TIMESTAMP(start_time) as duration");
FROM(TABLE_NAME); FROM(TABLE_NAME);
WHERE("`host` = #{host} and `state` in (" + strStates.toString() +")"); Object host = parameter.get("host");
if(host != null && StringUtils.isNotEmpty(host.toString())){
WHERE("`host` = #{host} ");
}
WHERE("`state` in (" + strStates.toString() +")");
ORDER_BY("`id` asc"); ORDER_BY("`id` asc");
} }
}.toString(); }.toString();

2
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java

@ -187,7 +187,6 @@ public class UserMapperProvider {
return new SQL() {{ return new SQL() {{
SELECT("count(0)"); SELECT("count(0)");
FROM(TABLE_NAME); FROM(TABLE_NAME);
WHERE("user_type = 1");
Object searchVal = parameter.get("searchVal"); Object searchVal = parameter.get("searchVal");
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){ if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE( " user_name like concat('%', #{searchVal}, '%') "); WHERE( " user_name like concat('%', #{searchVal}, '%') ");
@ -209,7 +208,6 @@ public class UserMapperProvider {
FROM(TABLE_NAME + " u "); FROM(TABLE_NAME + " u ");
LEFT_OUTER_JOIN("t_escheduler_tenant t on u.tenant_id = t.id"); LEFT_OUTER_JOIN("t_escheduler_tenant t on u.tenant_id = t.id");
LEFT_OUTER_JOIN("t_escheduler_queue q on t.queue_id = q.id"); LEFT_OUTER_JOIN("t_escheduler_queue q on t.queue_id = q.id");
WHERE("u.user_type = 1");
Object searchVal = parameter.get("searchVal"); Object searchVal = parameter.get("searchVal");
if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){ if(searchVal != null && StringUtils.isNotEmpty(searchVal.toString())){
WHERE( " u.user_name like concat('%', #{searchVal}, '%') "); WHERE( " u.user_name like concat('%', #{searchVal}, '%') ");

17
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerServerMapper.java

@ -42,6 +42,23 @@ public interface WorkerServerMapper {
@SelectProvider(type = WorkerServerMapperProvider.class, method = "queryAllWorker") @SelectProvider(type = WorkerServerMapperProvider.class, method = "queryAllWorker")
List<WorkerServer> queryAllWorker(); List<WorkerServer> queryAllWorker();
/**
* query worker list
*
* @return
*/
@Results(value = {
@Result(property = "id", column = "id", javaType = Integer.class, jdbcType = JdbcType.INTEGER),
@Result(property = "host", column = "host", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "port", column = "port", javaType = int.class, jdbcType = JdbcType.INTEGER),
@Result(property = "zkDirectory", column = "zk_directory", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "resInfo", column = "res_info", javaType = String.class, jdbcType = JdbcType.VARCHAR),
@Result(property = "createTime", column = "create_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP),
@Result(property = "lastHeartbeatTime", column = "last_heartbeat_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP)
})
@SelectProvider(type = WorkerServerMapperProvider.class, method = "queryWorkerByHost")
List<WorkerServer> queryWorkerByHost(@Param("host") String host);
/** /**
* insert worker server * insert worker server
* *

15
escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerServerMapperProvider.java

@ -37,6 +37,21 @@ public class WorkerServerMapperProvider {
}}.toString(); }}.toString();
} }
/**
* query worker list
* @return
*/
public String queryWorkerByHost(Map<String, Object> parameter) {
return new SQL() {{
SELECT("*");
FROM(TABLE_NAME);
WHERE("host = #{host}");
}}.toString();
}
/** /**
* insert worker server * insert worker server
* @param parameter * @param parameter

10
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessData.java

@ -39,6 +39,8 @@ public class ProcessData {
private int timeout; private int timeout;
private int tenantId;
public ProcessData() { public ProcessData() {
} }
@ -92,4 +94,12 @@ public class ProcessData {
public void setTimeout(int timeout) { public void setTimeout(int timeout) {
this.timeout = timeout; this.timeout = timeout;
} }
public int getTenantId() {
return tenantId;
}
public void setTenantId(int tenantId) {
this.tenantId = tenantId;
}
} }

13
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessDefinition.java

@ -141,6 +141,11 @@ public class ProcessDefinition {
*/ */
private int timeout; private int timeout;
/**
* tenant id
*/
private int tenantId;
public String getName() { public String getName() {
return name; return name;
@ -354,7 +359,15 @@ public class ProcessDefinition {
", receiversCc='" + receiversCc + '\'' + ", receiversCc='" + receiversCc + '\'' +
", scheduleReleaseState=" + scheduleReleaseState + ", scheduleReleaseState=" + scheduleReleaseState +
", timeout=" + timeout + ", timeout=" + timeout +
", tenantId=" + tenantId +
'}'; '}';
} }
public int getTenantId() {
return tenantId;
}
public void setTenantId(int tenantId) {
this.tenantId = tenantId;
}
} }

14
escheduler-dao/src/main/java/cn/escheduler/dao/model/ProcessInstance.java

@ -188,6 +188,12 @@ public class ProcessInstance {
*/ */
private int timeout; private int timeout;
/**
* tenant id
*/
private int tenantId;
public ProcessInstance(){ public ProcessInstance(){
} }
@ -534,6 +540,7 @@ public class ProcessInstance {
", processInstanceJson='" + processInstanceJson + '\'' + ", processInstanceJson='" + processInstanceJson + '\'' +
", executorId=" + executorId + ", executorId=" + executorId +
", tenantCode='" + tenantCode + '\'' + ", tenantCode='" + tenantCode + '\'' +
", tenantId='" + tenantId + '\'' +
", queue='" + queue + '\'' + ", queue='" + queue + '\'' +
", isSubProcess=" + isSubProcess + ", isSubProcess=" + isSubProcess +
", locations='" + locations + '\'' + ", locations='" + locations + '\'' +
@ -546,4 +553,11 @@ public class ProcessInstance {
'}'; '}';
} }
public void setTenantId(int tenantId) {
this.tenantId = tenantId;
}
public int getTenantId() {
return this.tenantId ;
}
} }

125
escheduler-dao/src/main/java/cn/escheduler/dao/utils/DagHelper.java

@ -18,16 +18,20 @@ package cn.escheduler.dao.utils;
import cn.escheduler.common.enums.TaskDependType; import cn.escheduler.common.enums.TaskDependType;
import cn.escheduler.common.graph.DAG;
import cn.escheduler.common.model.TaskNode; import cn.escheduler.common.model.TaskNode;
import cn.escheduler.common.model.TaskNodeRelation; import cn.escheduler.common.model.TaskNodeRelation;
import cn.escheduler.common.process.ProcessDag; import cn.escheduler.common.process.ProcessDag;
import cn.escheduler.common.utils.JSONUtils; import cn.escheduler.common.utils.JSONUtils;
import cn.escheduler.dao.model.ProcessData; import cn.escheduler.dao.model.ProcessData;
import cn.escheduler.dao.model.TaskInstance;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.*;
import java.util.List; import java.util.concurrent.ConcurrentHashMap;
/** /**
* dag tools * dag tools
@ -105,8 +109,7 @@ public class DagHelper {
} }
for (TaskNode taskNode : tmpTaskNodeList) { for (TaskNode taskNode : tmpTaskNodeList) {
if ( !taskNode.isForbidden() if (null == findNodeByName(destTaskNodeList, taskNode.getName())) {
&& null == findNodeByName(destTaskNodeList, taskNode.getName())) {
destTaskNodeList.add(taskNode); destTaskNodeList.add(taskNode);
} }
} }
@ -193,6 +196,24 @@ public class DagHelper {
return processDag; return processDag;
} }
/**
* parse the forbidden task nodes in process definition.
* @param processDefinitionJson
* @return
*/
public static Map<String, TaskNode> getForbiddenTaskNodeMaps(String processDefinitionJson){
Map<String, TaskNode> forbidTaskNodeMap = new ConcurrentHashMap<>();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
for(TaskNode node : taskNodeList){
if(node.isForbidden()){
forbidTaskNodeMap.putIfAbsent(node.getName(), node);
}
}
return forbidTaskNodeMap;
}
/** /**
* find node by node name * find node by node name
@ -210,4 +231,100 @@ public class DagHelper {
} }
return null; return null;
} }
/**
* get start vertex in one dag
* it would find the post node if the start vertex is forbidden running
* @param parentNodeName the previous node
* @param dag
* @param completeTaskList
* @return
*/
public static Collection<String> getStartVertex(String parentNodeName, DAG<String, TaskNode, TaskNodeRelation> dag,
Map<String, TaskInstance> completeTaskList){
if(completeTaskList == null){
completeTaskList = new HashMap<>();
}
Collection<String> startVertexs = null;
if(StringUtils.isNotEmpty(parentNodeName)){
startVertexs = dag.getSubsequentNodes(parentNodeName);
}else{
startVertexs = dag.getBeginNode();
}
List<String> tmpStartVertexs = new ArrayList<>();
if(startVertexs!= null){
tmpStartVertexs.addAll(startVertexs);
}
for(String start : startVertexs){
TaskNode startNode = dag.getNode(start);
if(!startNode.isForbidden() && !completeTaskList.containsKey(start)){
continue;
}
Collection<String> postNodes = getStartVertex(start, dag, completeTaskList);
for(String post : postNodes){
if(checkForbiddenPostCanSubmit(post, dag)){
tmpStartVertexs.add(post);
}
}
tmpStartVertexs.remove(start);
}
return tmpStartVertexs;
}
/**
*
* @param postNodeName
* @param dag
* @return
*/
private static boolean checkForbiddenPostCanSubmit(String postNodeName, DAG<String, TaskNode, TaskNodeRelation> dag){
TaskNode postNode = dag.getNode(postNodeName);
List<String> dependList = postNode.getDepList();
for(String dependNodeName : dependList){
TaskNode dependNode = dag.getNode(dependNodeName);
if(!dependNode.isForbidden()){
return false;
}
}
return true;
}
/***
* generate dag graph
* @param processDag
* @return
*/
public static DAG<String, TaskNode, TaskNodeRelation> buildDagGraph(ProcessDag processDag) {
DAG<String,TaskNode,TaskNodeRelation> dag = new DAG<>();
/**
* add vertex
*/
if (CollectionUtils.isNotEmpty(processDag.getNodes())){
for (TaskNode node : processDag.getNodes()){
dag.addNode(node.getName(),node);
}
}
/**
* add edge
*/
if (CollectionUtils.isNotEmpty(processDag.getEdges())){
for (TaskNodeRelation edge : processDag.getEdges()){
dag.addEdge(edge.getStartNode(),edge.getEndNode());
}
}
return dag;
}
} }

6
escheduler-dao/src/main/resources/dao/data_source.properties

@ -1,9 +1,9 @@
# base spring data source configuration # base spring data source configuration
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.driver-class-name=com.mysql.jdbc.Driver spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/escheduler?characterEncoding=UTF-8 spring.datasource.url=jdbc:mysql://192.168.220.188:3306/escheduler_new?characterEncoding=UTF-8
spring.datasource.username=xx spring.datasource.username=root
spring.datasource.password=xx spring.datasource.password=root@123
# connection configuration # connection configuration
spring.datasource.initialSize=5 spring.datasource.initialSize=5

2
escheduler-rpc/pom.xml

@ -4,7 +4,7 @@
<parent> <parent>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<version>1.0.3-SNAPSHOT</version> <version>1.0.4-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

2
escheduler-server/pom.xml

@ -3,7 +3,7 @@
<parent> <parent>
<artifactId>escheduler</artifactId> <artifactId>escheduler</artifactId>
<groupId>cn.analysys</groupId> <groupId>cn.analysys</groupId>
<version>1.0.3-SNAPSHOT</version> <version>1.0.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>escheduler-server</artifactId> <artifactId>escheduler-server</artifactId>
<name>escheduler-server</name> <name>escheduler-server</name>

51
escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java

@ -79,6 +79,7 @@ public class MasterExecThread implements Runnable {
private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>(); private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>();
private Map<String, TaskInstance> readyToSubmitTaskList = new ConcurrentHashMap<>(); private Map<String, TaskInstance> readyToSubmitTaskList = new ConcurrentHashMap<>();
private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>(); private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>();
private Map<String, TaskNode> forbiddenTaskList = new ConcurrentHashMap<>();
private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>(); private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>();
private AlertManager alertManager = new AlertManager(); private AlertManager alertManager = new AlertManager();
@ -269,6 +270,7 @@ public class MasterExecThread implements Runnable {
private void buildFlowDag() throws Exception { private void buildFlowDag() throws Exception {
recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam()); recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam());
forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson());
// generate process to get DAG info // generate process to get DAG info
List<String> recoveryNameList = getRecoveryNodeNameList(); List<String> recoveryNameList = getRecoveryNodeNameList();
List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam());
@ -279,7 +281,8 @@ public class MasterExecThread implements Runnable {
return; return;
} }
// generate process dag // generate process dag
dag = buildDagGraph(processDag); dag = DagHelper.buildDagGraph(processDag);
} }
private void initTaskQueue(){ private void initTaskQueue(){
@ -411,6 +414,8 @@ public class MasterExecThread implements Runnable {
return taskInstance; return taskInstance;
} }
/** /**
* get post task instance by node * get post task instance by node
* *
@ -421,14 +426,12 @@ public class MasterExecThread implements Runnable {
private List<TaskInstance> getPostTaskInstanceByNode(DAG<String, TaskNode, TaskNodeRelation> dag, String parentNodeName){ private List<TaskInstance> getPostTaskInstanceByNode(DAG<String, TaskNode, TaskNodeRelation> dag, String parentNodeName){
List<TaskInstance> postTaskList = new ArrayList<>(); List<TaskInstance> postTaskList = new ArrayList<>();
Collection<String> startVertex = null; Collection<String> startVertex = DagHelper.getStartVertex(parentNodeName, dag, completeTaskList);
if(StringUtils.isNotEmpty(parentNodeName)){ if(startVertex == null){
startVertex = dag.getSubsequentNodes(parentNodeName); return postTaskList;
}else{
startVertex = dag.getBeginNode();
} }
for (String nodeName : startVertex){
for (String nodeName : startVertex){
// encapsulation task instance // encapsulation task instance
TaskInstance taskInstance = createTaskInstance(processInstance, nodeName , TaskInstance taskInstance = createTaskInstance(processInstance, nodeName ,
dag.getNode(nodeName),parentNodeName); dag.getNode(nodeName),parentNodeName);
@ -517,7 +520,10 @@ public class MasterExecThread implements Runnable {
List<String> depsNameList = taskNode.getDepList(); List<String> depsNameList = taskNode.getDepList();
for(String depsNode : depsNameList ){ for(String depsNode : depsNameList ){
// dependencies must be all complete if(forbiddenTaskList.containsKey(depsNode)){
continue;
}
// dependencies must be fully completed
if(!completeTaskList.containsKey(depsNode)){ if(!completeTaskList.containsKey(depsNode)){
return DependResult.WAITING; return DependResult.WAITING;
} }
@ -904,35 +910,6 @@ public class MasterExecThread implements Runnable {
} }
} }
/***
* generate dag graph
* @param processDag
* @return
*/
public DAG<String, TaskNode, TaskNodeRelation> buildDagGraph(ProcessDag processDag) {
DAG<String,TaskNode,TaskNodeRelation> dag = new DAG<>();
/**
* add vertex
*/
if (CollectionUtils.isNotEmpty(processDag.getNodes())){
for (TaskNode node : processDag.getNodes()){
dag.addNode(node.getName(),node);
}
}
/**
* add edge
*/
if (CollectionUtils.isNotEmpty(processDag.getEdges())){
for (TaskNodeRelation edge : processDag.getEdges()){
dag.addEdge(edge.getStartNode(),edge.getEndNode());
}
}
return dag;
}
/** /**
* whether the retry interval is timed out * whether the retry interval is timed out
* @param taskInstance * @param taskInstance

30
escheduler-server/src/main/java/cn/escheduler/server/utils/ProcessUtils.java

@ -18,6 +18,7 @@ package cn.escheduler.server.utils;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.utils.CommonUtils;
import cn.escheduler.common.utils.OSUtils;
import cn.escheduler.dao.model.TaskInstance; import cn.escheduler.dao.model.TaskInstance;
import cn.escheduler.server.rpc.LogClient; import cn.escheduler.server.rpc.LogClient;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -33,6 +34,7 @@ import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
/** /**
* mainly used to get the start command line of a process * mainly used to get the start command line of a process
*/ */
@ -139,6 +141,8 @@ public class ProcessUtils {
{' ', '\t', '<', '>'}, {' ', '\t'}}; {' ', '\t', '<', '>'}, {' ', '\t'}};
private static Matcher matcher;
private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) { private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) {
StringBuilder cmdbuf = new StringBuilder(80); StringBuilder cmdbuf = new StringBuilder(80);
@ -256,11 +260,11 @@ public class ProcessUtils {
return ; return ;
} }
String cmd = String.format("sudo kill -9 %d", processId); String cmd = String.format("sudo kill -9 %s", getPidsStr(processId));
logger.info("process id:{}, cmd:{}", processId, cmd); logger.info("process id:{}, cmd:{}", processId, cmd);
Runtime.getRuntime().exec(cmd); OSUtils.exeCmd(cmd);
// find log and kill yarn job // find log and kill yarn job
killYarnJob(taskInstance); killYarnJob(taskInstance);
@ -270,12 +274,28 @@ public class ProcessUtils {
} }
} }
/**
* get pids str
* @param processId
* @return
* @throws Exception
*/
private static String getPidsStr(int processId)throws Exception{
StringBuilder sb = new StringBuilder();
// pstree -p pid get sub pids
String pids = OSUtils.exeCmd("pstree -p " +processId+ "");
Matcher mat = Pattern.compile("(\\d+)").matcher(pids);
while (mat.find()){
sb.append(mat.group()+" ");
}
return sb.toString().trim();
}
/** /**
* find logs and kill yarn tasks * find logs and kill yarn tasks
* @param taskInstance * @param taskInstance
* @throws IOException
*/ */
public static void killYarnJob(TaskInstance taskInstance) throws Exception { public static void killYarnJob(TaskInstance taskInstance) {
try { try {
Thread.sleep(Constants.SLEEP_TIME_MILLIS); Thread.sleep(Constants.SLEEP_TIME_MILLIS);
LogClient logClient = new LogClient(taskInstance.getHost(), Constants.RPC_PORT); LogClient logClient = new LogClient(taskInstance.getHost(), Constants.RPC_PORT);
@ -295,7 +315,7 @@ public class ProcessUtils {
} catch (Exception e) { } catch (Exception e) {
logger.error("kill yarn job failed : " + e.getMessage(),e); logger.error("kill yarn job failed : " + e.getMessage(),e);
throw new RuntimeException("kill yarn job fail"); // throw new RuntimeException("kill yarn job fail");
} }
} }
} }

12
escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java

@ -23,10 +23,7 @@ import cn.escheduler.common.thread.ThreadUtils;
import cn.escheduler.common.utils.FileUtils; import cn.escheduler.common.utils.FileUtils;
import cn.escheduler.common.utils.OSUtils; import cn.escheduler.common.utils.OSUtils;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.model.ProcessDefinition; import cn.escheduler.dao.model.*;
import cn.escheduler.dao.model.ProcessInstance;
import cn.escheduler.dao.model.TaskInstance;
import cn.escheduler.dao.model.WorkerGroup;
import cn.escheduler.server.zk.ZKWorkerClient; import cn.escheduler.server.zk.ZKWorkerClient;
import com.cronutils.utils.StringUtils; import com.cronutils.utils.StringUtils;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.Configuration;
@ -194,9 +191,16 @@ public class FetchTaskThread implements Runnable{
// get process instance // get process instance
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); ProcessInstance processInstance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
// get process define // get process define
ProcessDefinition processDefine = processDao.findProcessDefineById(taskInstance.getProcessDefinitionId()); ProcessDefinition processDefine = processDao.findProcessDefineById(taskInstance.getProcessDefinitionId());
Tenant tenant = processDao.getTenantForProcess(processInstance.getTenantId(),
processDefine.getUserId());
if(tenant != null){
processInstance.setTenantCode(tenant.getTenantCode());
}
taskInstance.setProcessInstance(processInstance); taskInstance.setProcessInstance(processInstance);
taskInstance.setProcessDefine(processDefine); taskInstance.setProcessDefine(processDefine);

72
escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java

@ -19,18 +19,25 @@ package cn.escheduler.server.worker.runner;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.enums.TaskRecordStatus;
import cn.escheduler.common.enums.TaskType; import cn.escheduler.common.enums.TaskType;
import cn.escheduler.common.model.TaskNode; import cn.escheduler.common.model.TaskNode;
import cn.escheduler.common.process.Property; import cn.escheduler.common.process.Property;
import cn.escheduler.common.task.AbstractParameters; import cn.escheduler.common.task.AbstractParameters;
import cn.escheduler.common.task.TaskTimeoutParameter; import cn.escheduler.common.task.TaskTimeoutParameter;
import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.task.mr.MapreduceParameters;
import cn.escheduler.common.utils.HadoopUtils; import cn.escheduler.common.task.procedure.ProcedureParameters;
import cn.escheduler.common.utils.TaskParametersUtils; import cn.escheduler.common.task.python.PythonParameters;
import cn.escheduler.common.task.shell.ShellParameters;
import cn.escheduler.common.task.spark.SparkParameters;
import cn.escheduler.common.task.sql.SqlParameters;
import cn.escheduler.common.utils.*;
import cn.escheduler.dao.ProcessDao; import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.TaskRecordDao;
import cn.escheduler.dao.model.ProcessInstance; import cn.escheduler.dao.model.ProcessInstance;
import cn.escheduler.dao.model.TaskInstance; import cn.escheduler.dao.model.TaskInstance;
import cn.escheduler.server.utils.LoggerUtils; import cn.escheduler.server.utils.LoggerUtils;
import cn.escheduler.server.utils.ParamUtils;
import cn.escheduler.server.worker.log.TaskLogger; import cn.escheduler.server.worker.log.TaskLogger;
import cn.escheduler.server.worker.task.AbstractTask; import cn.escheduler.server.worker.task.AbstractTask;
import cn.escheduler.server.worker.task.TaskManager; import cn.escheduler.server.worker.task.TaskManager;
@ -141,6 +148,7 @@ public class TaskScheduleThread implements Callable<Boolean> {
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class);
List<String> projectRes = createProjectResFiles(taskNode); List<String> projectRes = createProjectResFiles(taskNode);
// copy hdfs file to local // copy hdfs file to local
@ -199,6 +207,31 @@ public class TaskScheduleThread implements Callable<Boolean> {
if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){
status = ExecutionStatus.SUCCESS; status = ExecutionStatus.SUCCESS;
// task recor flat : if true , start up qianfan
if (TaskRecordDao.getTaskRecordFlag()
&& TaskType.typeIsNormalTask(taskInstance.getTaskType())){
AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskProps.getTaskParams(), getCurTaskParamsClass());
// replace placeholder
Map<String, Property> paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(),
taskProps.getDefinedParams(),
params.getLocalParametersMap(),
processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
if (paramsMap != null && !paramsMap.isEmpty()
&& paramsMap.containsKey("v_proc_date")){
String vProcDate = paramsMap.get("v_proc_date").getValue();
if (!StringUtils.isEmpty(vProcDate)){
TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskInstance.getName(), vProcDate);
logger.info("task record status : {}",taskRecordState);
if (taskRecordState == TaskRecordStatus.FAILURE){
status = ExecutionStatus.FAILURE;
}
}
}
}
}else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL){ }else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL){
status = ExecutionStatus.KILL; status = ExecutionStatus.KILL;
}else { }else {
@ -251,6 +284,39 @@ public class TaskScheduleThread implements Callable<Boolean> {
} }
/**
* get current task parameter class
* @return
*/
private Class getCurTaskParamsClass(){
Class paramsClass = null;
TaskType taskType = TaskType.valueOf(taskInstance.getTaskType());
switch (taskType){
case SHELL:
paramsClass = ShellParameters.class;
break;
case SQL:
paramsClass = SqlParameters.class;
break;
case PROCEDURE:
paramsClass = ProcedureParameters.class;
break;
case MR:
paramsClass = MapreduceParameters.class;
break;
case SPARK:
paramsClass = SparkParameters.class;
break;
case PYTHON:
paramsClass = PythonParameters.class;
break;
default:
logger.error("not support this task type: {}", taskType);
throw new IllegalArgumentException("not support this task type");
}
return paramsClass;
}
/** /**
* kill task * kill task
*/ */

22
escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractCommandExecutor.java

@ -213,7 +213,7 @@ public abstract class AbstractCommandExecutor {
*/ */
private int updateState(ProcessDao processDao, int exitStatusCode, int pid, int taskInstId) { private int updateState(ProcessDao processDao, int exitStatusCode, int pid, int taskInstId) {
//get yarn state by log //get yarn state by log
if (exitStatusCode != -1) { if (exitStatusCode != 0) {
TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId);
logger.info("process id is {}", pid); logger.info("process id is {}", pid);
@ -380,14 +380,22 @@ public abstract class AbstractCommandExecutor {
boolean result = true; boolean result = true;
try { try {
for (String appId : appIds) { for (String appId : appIds) {
while(true){
ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId);
logger.info("appId:{}, final state:{}",appId,applicationStatus.name()); logger.info("appId:{}, final state:{}",appId,applicationStatus.name());
if (!applicationStatus.equals(ExecutionStatus.SUCCESS)) { if (applicationStatus.equals(ExecutionStatus.FAILURE) ||
result = false; applicationStatus.equals(ExecutionStatus.KILL)) {
return false;
}
if (applicationStatus.equals(ExecutionStatus.SUCCESS)){
break;
}
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
} }
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(String.format("mapreduce applications: %s status failed : " + e.getMessage(), appIds.toString()),e); logger.error(String.format("yarn applications: %s status failed : " + e.getMessage(), appIds.toString()),e);
result = false; result = false;
} }
return result; return result;
@ -548,10 +556,4 @@ public abstract class AbstractCommandExecutor {
protected abstract boolean checkShowLog(String line); protected abstract boolean checkShowLog(String line);
protected abstract boolean checkFindApp(String line); protected abstract boolean checkFindApp(String line);
protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException;
// if(line.contains(taskAppId) || !line.contains("cn.escheduler.server.worker.log.TaskLogger")){
// logs.add(line);
// }
} }

30
escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java

@ -196,7 +196,7 @@ public class SqlTask extends AbstractTask {
} }
// special characters need to be escaped, ${} needs to be escaped // special characters need to be escaped, ${} needs to be escaped
String rgex = "'?\\$\\{(.*?)\\}'?"; String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*";
setSqlParamsMap(sql,rgex,sqlParamsMap,paramsMap); setSqlParamsMap(sql,rgex,sqlParamsMap,paramsMap);
// replace the ${} of the SQL statement with the Placeholder // replace the ${} of the SQL statement with the Placeholder
@ -310,6 +310,7 @@ public class SqlTask extends AbstractTask {
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage());
} }
return connection; return connection;
} }
@ -326,6 +327,7 @@ public class SqlTask extends AbstractTask {
ParameterUtils.setInParameter(key,stmt,prop.getType(),prop.getValue()); ParameterUtils.setInParameter(key,stmt,prop.getType(),prop.getValue());
} }
} }
logger.info("prepare statement replace sql:{}",stmt.toString());
return stmt; return stmt;
} }
@ -347,14 +349,14 @@ public class SqlTask extends AbstractTask {
// receiving group list // receiving group list
List<String> receviersList = new ArrayList<String>(); List<String> receviersList = new ArrayList<String>();
for(User user:users){ for(User user:users){
receviersList.add(user.getEmail()); receviersList.add(user.getEmail().trim());
} }
// custom receiver // custom receiver
String receivers = sqlParameters.getReceivers(); String receivers = sqlParameters.getReceivers();
if (StringUtils.isNotEmpty(receivers)){ if (StringUtils.isNotEmpty(receivers)){
String[] splits = receivers.split(Constants.COMMA); String[] splits = receivers.split(Constants.COMMA);
for (String receiver : splits){ for (String receiver : splits){
receviersList.add(receiver); receviersList.add(receiver.trim());
} }
} }
@ -365,15 +367,19 @@ public class SqlTask extends AbstractTask {
if (StringUtils.isNotEmpty(receiversCc)){ if (StringUtils.isNotEmpty(receiversCc)){
String[] splits = receiversCc.split(Constants.COMMA); String[] splits = receiversCc.split(Constants.COMMA);
for (String receiverCc : splits){ for (String receiverCc : splits){
receviersCcList.add(receiverCc); receviersCcList.add(receiverCc.trim());
} }
} }
String showTypeName = sqlParameters.getShowType().replace(Constants.COMMA,"").trim(); String showTypeName = sqlParameters.getShowType().replace(Constants.COMMA,"").trim();
if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){ if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){
MailUtils.sendMails(receviersList,receviersCcList,title, content, ShowType.valueOf(showTypeName)); Map<String, Object> mailResult = MailUtils.sendMails(receviersList, receviersCcList, title, content, ShowType.valueOf(showTypeName));
if(!(Boolean) mailResult.get(Constants.STATUS)){
throw new RuntimeException("send mail failed!");
}
}else{ }else{
logger.error("showType: {} is not valid " ,showTypeName); logger.error("showType: {} is not valid " ,showTypeName);
throw new RuntimeException(String.format("showType: %s is not valid ",showTypeName));
} }
} }
@ -411,19 +417,5 @@ public class SqlTask extends AbstractTask {
logPrint.append(sqlParamsMap.get(i).getValue()+"("+sqlParamsMap.get(i).getType()+")"); logPrint.append(sqlParamsMap.get(i).getValue()+"("+sqlParamsMap.get(i).getType()+")");
} }
logger.info(logPrint.toString()); logger.info(logPrint.toString());
//direct print style
Pattern pattern = Pattern.compile(rgex);
Matcher m = pattern.matcher(content);
int index = 1;
StringBuffer sb = new StringBuffer("replaced sql , direct:");
while (m.find()) {
m.appendReplacement(sb, sqlParamsMap.get(index).getValue());
index ++;
}
m.appendTail(sb);
logger.info(sb.toString());
} }
} }

193
escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java

@ -18,6 +18,7 @@ package cn.escheduler.server.zk;
import cn.escheduler.common.Constants; import cn.escheduler.common.Constants;
import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.ExecutionStatus;
import cn.escheduler.common.enums.ZKNodeType;
import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.CollectionUtils;
import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.DateUtils;
import cn.escheduler.common.utils.OSUtils; import cn.escheduler.common.utils.OSUtils;
@ -28,10 +29,11 @@ import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.ServerDao; import cn.escheduler.dao.ServerDao;
import cn.escheduler.dao.model.ProcessInstance; import cn.escheduler.dao.model.ProcessInstance;
import cn.escheduler.dao.model.TaskInstance; import cn.escheduler.dao.model.TaskInstance;
import cn.escheduler.dao.model.WorkerServer;
import cn.escheduler.server.ResInfo; import cn.escheduler.server.ResInfo;
import cn.escheduler.server.utils.ProcessUtils; import cn.escheduler.server.utils.ProcessUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.imps.CuratorFrameworkState;
import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCache;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
@ -134,7 +136,9 @@ public class ZKMasterClient extends AbstractZKClient {
// check if fault tolerance is required,failure and tolerance // check if fault tolerance is required,failure and tolerance
if (getActiveMasterNum() == 1) { if (getActiveMasterNum() == 1) {
processDao.masterStartupFaultTolerant(); failoverWorker(null, true);
// processDao.masterStartupFaultTolerant();
failoverMaster(null);
} }
}catch (Exception e){ }catch (Exception e){
@ -190,31 +194,20 @@ public class ZKMasterClient extends AbstractZKClient {
Date now = new Date(); Date now = new Date();
createTime = now ; createTime = now ;
try { try {
String osHost = OSUtils.getHost();
// encapsulation master znnode // zookeeper node exists, cannot start a new one.
masterZNode = masterZNodeParentPath + "/" + OSUtils.getHost() + "_"; if(checkZKNodeExists(osHost, ZKNodeType.MASTER)){
List<String> masterZNodeList = zkClient.getChildren().forPath(masterZNodeParentPath); logger.error("register failure , master already started on host : {}" , osHost);
if (CollectionUtils.isNotEmpty(masterZNodeList)){
boolean flag = false;
for (String masterZNode : masterZNodeList){
if (masterZNode.startsWith(OSUtils.getHost())){
flag = true;
break;
}
}
if (flag){
logger.error("register failure , master already started on host : {}" , OSUtils.getHost());
// exit system // exit system
System.exit(-1); System.exit(-1);
} }
}
// specify the format of stored data in ZK nodes // specify the format of stored data in ZK nodes
String heartbeatZKInfo = getOsInfo(now); String heartbeatZKInfo = getOsInfo(now);
// create temporary sequence nodes for master znode // create temporary sequence nodes for master znode
masterZNode = zkClient.create().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).forPath(masterZNode, heartbeatZKInfo.getBytes()); masterZNode = zkClient.create().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).forPath(
masterZNodeParentPath + "/" + OSUtils.getHost() + "_", heartbeatZKInfo.getBytes());
logger.info("register master node {} success" , masterZNode); logger.info("register master node {} success" , masterZNode);
@ -238,6 +231,46 @@ public class ZKMasterClient extends AbstractZKClient {
} }
/**
* check the zookeeper node already exists
* @param host
* @param zkNodeType
* @return
* @throws Exception
*/
private boolean checkZKNodeExists(String host, ZKNodeType zkNodeType) throws Exception {
String path = null;
switch (zkNodeType){
case MASTER:
path = masterZNodeParentPath;
break;
case WORKER:
path = workerZNodeParentPath;
break;
case DEAD_SERVER:
path = deadServerZNodeParentPath;
break;
default:
break;
}
if(StringUtils.isEmpty(path)){
logger.error("check zk node exists error, host:{}, zk node type:{}", host, zkNodeType.toString());
return false;
}
List<String> masterZNodeList = null;
masterZNodeList = zkClient.getChildren().forPath(path);
if (CollectionUtils.isNotEmpty(masterZNodeList)){
for (String masterZNode : masterZNodeList){
if (masterZNode.startsWith(host)){
return true;
}
}
}
return false;
}
/** /**
* monitor master * monitor master
*/ */
@ -279,17 +312,9 @@ public class ZKMasterClient extends AbstractZKClient {
for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER;i++) { for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER;i++) {
alertDao.sendServerStopedAlert(1, masterHost, "Master-Server"); alertDao.sendServerStopedAlert(1, masterHost, "Master-Server");
} }
if(StringUtils.isNotEmpty(masterHost)){
logger.info("start master failover ..."); failoverMaster(masterHost);
List<ProcessInstance> needFailoverProcessInstanceList = processDao.queryNeedFailoverProcessInstances(masterHost);
//updateProcessInstance host is null and insert into command
for(ProcessInstance processInstance : needFailoverProcessInstanceList){
processDao.processNeedFailoverProcessInstances(processInstance);
} }
logger.info("master failover end");
}catch (Exception e){ }catch (Exception e){
logger.error("master failover failed : " + e.getMessage(),e); logger.error("master failover failed : " + e.getMessage(),e);
}finally { }finally {
@ -331,6 +356,8 @@ public class ZKMasterClient extends AbstractZKClient {
} }
/** /**
* monitor worker * monitor worker
*/ */
@ -369,23 +396,9 @@ public class ZKMasterClient extends AbstractZKClient {
alertDao.sendServerStopedAlert(1, workerHost, "Worker-Server"); alertDao.sendServerStopedAlert(1, workerHost, "Worker-Server");
} }
logger.info("start worker failover ..."); if(StringUtils.isNotEmpty(workerHost)){
failoverWorker(workerHost, true);
List<TaskInstance> needFailoverTaskInstanceList = processDao.queryNeedFailoverTaskInstances(workerHost);
for(TaskInstance taskInstance : needFailoverTaskInstanceList){
ProcessInstance instance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
if(instance!=null){
taskInstance.setProcessInstance(instance);
} }
// only kill yarn job if exists , the local thread has exited
ProcessUtils.killYarnJob(taskInstance);
}
//updateProcessInstance state value is NEED_FAULT_TOLERANCE
processDao.updateNeedFailoverTaskInstances(workerHost);
logger.info("worker failover end");
}catch (Exception e){ }catch (Exception e){
logger.error("worker failover failed : " + e.getMessage(),e); logger.error("worker failover failed : " + e.getMessage(),e);
} }
@ -476,6 +489,95 @@ public class ZKMasterClient extends AbstractZKClient {
} }
/**
* task needs failover if task start before worker starts
*
* @param taskInstance
* @return
*/
private boolean checkTaskInstanceNeedFailover(TaskInstance taskInstance) throws Exception {
boolean taskNeedFailover = true;
// if the worker node exists in zookeeper, we must check the task starts after the worker
if(checkZKNodeExists(taskInstance.getHost(), ZKNodeType.WORKER)){
//if task start after worker starts, there is no need to failover the task.
if(checkTaskAfterWorkerStart(taskInstance)){
taskNeedFailover = false;
}
}
return taskNeedFailover;
}
/**
* check task start after the worker server starts.
* @param taskInstance
* @return
*/
private boolean checkTaskAfterWorkerStart(TaskInstance taskInstance) {
Date workerServerStartDate = null;
List<WorkerServer> workerServers = processDao.queryWorkerServerByHost(taskInstance.getHost());
if(workerServers.size() > 0){
workerServerStartDate = workerServers.get(0).getCreateTime();
}
if(workerServerStartDate != null){
return taskInstance.getStartTime().after(workerServerStartDate);
}else{
return false;
}
}
/**
* failover worker tasks
* 1. kill yarn job if there are yarn jobs in tasks.
* 2. change task state from running to need failover.
* @param workerHost
*/
private void failoverWorker(String workerHost, boolean needCheckWorkerAlive) throws Exception {
logger.info("start worker[{}] failover ...", workerHost);
List<TaskInstance> needFailoverTaskInstanceList = processDao.queryNeedFailoverTaskInstances(workerHost);
for(TaskInstance taskInstance : needFailoverTaskInstanceList){
if(needCheckWorkerAlive){
if(!checkTaskInstanceNeedFailover(taskInstance)){
continue;
}
}
ProcessInstance instance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
if(instance!=null){
taskInstance.setProcessInstance(instance);
}
// only kill yarn job if exists , the local thread has exited
ProcessUtils.killYarnJob(taskInstance);
taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE);
processDao.saveTaskInstance(taskInstance);
}
//update task Instance state value is NEED_FAULT_TOLERANCE
// processDao.updateNeedFailoverTaskInstances(workerHost);
logger.info("end worker[{}] failover ...", workerHost);
}
/**
* failover master tasks
* @param masterHost
*/
private void failoverMaster(String masterHost) {
logger.info("start master failover ...");
List<ProcessInstance> needFailoverProcessInstanceList = processDao.queryNeedFailoverProcessInstances(masterHost);
//updateProcessInstance host is null and insert into command
for(ProcessInstance processInstance : needFailoverProcessInstanceList){
processDao.processNeedFailoverProcessInstances(processInstance);
}
logger.info("master failover end");
}
/** /**
* get host ip * get host ip
@ -488,6 +590,7 @@ public class ZKMasterClient extends AbstractZKClient {
if(startIndex >= endIndex){ if(startIndex >= endIndex){
logger.error("parse ip error"); logger.error("parse ip error");
return "";
} }
return path.substring(startIndex, endIndex); return path.substring(startIndex, endIndex);
} }

42
escheduler-server/src/test/java/cn/escheduler/server/master/MasterCommandTest.java

@ -18,15 +18,27 @@ package cn.escheduler.server.master;
import cn.escheduler.common.enums.CommandType; import cn.escheduler.common.enums.CommandType;
import cn.escheduler.common.enums.FailureStrategy; import cn.escheduler.common.enums.FailureStrategy;
import cn.escheduler.common.enums.TaskDependType;
import cn.escheduler.common.enums.WarningType; import cn.escheduler.common.enums.WarningType;
import cn.escheduler.common.graph.DAG;
import cn.escheduler.common.model.TaskNode;
import cn.escheduler.common.model.TaskNodeRelation;
import cn.escheduler.common.process.ProcessDag;
import cn.escheduler.dao.datasource.ConnectionFactory; import cn.escheduler.dao.datasource.ConnectionFactory;
import cn.escheduler.dao.mapper.CommandMapper; import cn.escheduler.dao.mapper.CommandMapper;
import cn.escheduler.dao.mapper.ProcessDefinitionMapper;
import cn.escheduler.dao.model.Command; import cn.escheduler.dao.model.Command;
import cn.escheduler.dao.model.ProcessDefinition;
import cn.escheduler.dao.utils.DagHelper;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
/** /**
* master test * master test
*/ */
@ -36,9 +48,14 @@ public class MasterCommandTest {
private CommandMapper commandMapper; private CommandMapper commandMapper;
private ProcessDefinitionMapper processDefinitionMapper;
@Before @Before
public void before(){ public void before(){
commandMapper = ConnectionFactory.getSqlSession().getMapper(CommandMapper.class); commandMapper = ConnectionFactory.getSqlSession().getMapper(CommandMapper.class);
processDefinitionMapper = ConnectionFactory.getSqlSession().getMapper(ProcessDefinitionMapper.class);
} }
@ -104,4 +121,29 @@ public class MasterCommandTest {
} }
@Test
public void testDagHelper(){
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineId(19);
try {
ProcessDag processDag = DagHelper.generateFlowDag(processDefinition.getProcessDefinitionJson(),
new ArrayList<>(), new ArrayList<>(), TaskDependType.TASK_POST);
DAG<String,TaskNode,TaskNodeRelation> dag = DagHelper.buildDagGraph(processDag);
Collection<String> start = DagHelper.getStartVertex("1", dag, null);
System.out.println(start.toString());
Map<String, TaskNode> forbidden = DagHelper.getForbiddenTaskNodeMaps(processDefinition.getProcessDefinitionJson());
System.out.println(forbidden);
} catch (Exception e) {
e.printStackTrace();
}
}
} }

2
escheduler-ui/src/js/conf/home/pages/dag/_source/config.js

@ -26,7 +26,7 @@ import Permissions from '@/module/permissions'
* @desc tooltip * @desc tooltip
*/ */
const toolOper = (dagThis) => { const toolOper = (dagThis) => {
let disabled = Permissions.getAuth() === false ? false : !dagThis.$store.state.dag.isDetails let disabled =!dagThis.$store.state.dag.isDetails// Permissions.getAuth() === false ? false : !dagThis.$store.state.dag.isDetails
return [ return [
{ {
code: 'pointer', code: 'pointer',

20
escheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue

@ -8,7 +8,7 @@
:id="v" :id="v"
v-for="(item,v) in tasksTypeList" v-for="(item,v) in tasksTypeList"
@mousedown="_getDagId(v)"> @mousedown="_getDagId(v)">
<div data-toggle="tooltip" :title="item.desc" :class="_isDetails"> <div data-toggle="tooltip" :title="item.desc">
<div class="icos" :class="'icos-' + v" ></div> <div class="icos" :class="'icos-' + v" ></div>
</div> </div>
</div> </div>
@ -68,10 +68,9 @@
type="primary" type="primary"
size="xsmall" size="xsmall"
:loading="spinnerLoading" :loading="spinnerLoading"
v-ps="['GENERAL_USER']"
@click="_saveChart" @click="_saveChart"
icon="fa fa-save" icon="fa fa-save"
:disabled="isDetails"> >
{{spinnerLoading ? 'Loading...' : $t('Save')}} {{spinnerLoading ? 'Loading...' : $t('Save')}}
</x-button> </x-button>
</div> </div>
@ -205,9 +204,9 @@
* @param item * @param item
*/ */
_getDagId (v) { _getDagId (v) {
if (this.isDetails) { // if (this.isDetails) {
return // return
} // }
this.dagBarId = v this.dagBarId = v
}, },
/** /**
@ -239,11 +238,12 @@
}) })
}, },
_operationClass (item) { _operationClass (item) {
if (item.disable) {
return this.toolOperCode === item.code ? 'active' : '' return this.toolOperCode === item.code ? 'active' : ''
} else { // if (item.disable) {
return 'disable' // return this.toolOperCode === item.code ? 'active' : ''
} // } else {
// return 'disable'
// }
}, },
/** /**
* Storage interface * Storage interface

2
escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue

@ -165,7 +165,7 @@
<div class="bottom-box"> <div class="bottom-box">
<div class="submit" style="background: #fff;"> <div class="submit" style="background: #fff;">
<x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button> <x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()" :disabled="isDetails" v-ps="['GENERAL_USER']">{{spinnerLoading ? 'Loading...' : $t('Confirm add')}} </x-button> <x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()" :disabled="isDetails">{{spinnerLoading ? 'Loading...' : $t('Confirm add')}} </x-button>
</div> </div>
</div> </div>
</div> </div>

2
escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue

@ -3,7 +3,7 @@
<m-list-box> <m-list-box>
<div slot="text">{{$t('Program Type')}}</div> <div slot="text">{{$t('Program Type')}}</div>
<div slot="content"> <div slot="content">
<x-select v-model="programType" :disabled="isDetails" style="width: 100px;"> <x-select v-model="programType" :disabled="isDetails" style="width: 110px;">
<x-option <x-option
v-for="city in programTypeList" v-for="city in programTypeList"
:key="city.code" :key="city.code"

2
escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js

@ -71,7 +71,7 @@ JSP.prototype.init = function ({ dag, instance }) {
this.setConfig({ this.setConfig({
isDrag: !store.state.dag.isDetails, isDrag: !store.state.dag.isDetails,
isAttachment: false, isAttachment: false,
isNewNodes: Permissions.getAuth() === false ? false : !store.state.dag.isDetails, isNewNodes: !store.state.dag.isDetails,//Permissions.getAuth() === false ? false : !store.state.dag.isDetails,
isDblclick: true, isDblclick: true,
isContextmenu: true, isContextmenu: true,
isClick: false isClick: false

13
escheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue

@ -26,6 +26,10 @@
</div> </div>
</template> </template>
<div class="title" style="padding-top: 6px;">
<span class="text-b">{{$t('select tenant')}}</span>
<form-tenant v-model="tenantId"></form-tenant>
</div>
<div class="title" style="padding-top: 6px;"> <div class="title" style="padding-top: 6px;">
<span>超时告警</span> <span>超时告警</span>
<span style="padding-left: 6px;"> <span style="padding-left: 6px;">
@ -62,7 +66,7 @@
</div> </div>
</template> </template>
<x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button> <x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" @click="ok()" v-ps="['GENERAL_USER']" >{{$t('Add')}}</x-button> <x-button type="primary" shape="circle" @click="ok()">{{$t('Add')}}</x-button>
</div> </div>
</div> </div>
</div> </div>
@ -73,6 +77,7 @@
import mLocalParams from '../formModel/tasks/_source/localParams' import mLocalParams from '../formModel/tasks/_source/localParams'
import disabledState from '@/module/mixin/disabledState' import disabledState from '@/module/mixin/disabledState'
import Affirm from '../jumpAffirm' import Affirm from '../jumpAffirm'
import FormTenant from "./_source/selectTenant";
export default { export default {
name: 'udp', name: 'udp',
@ -90,6 +95,8 @@
syncDefine: true, syncDefine: true,
// Timeout alarm // Timeout alarm
timeout: 0, timeout: 0,
tenantId: -1,
// checked Timeout alarm // checked Timeout alarm
checkedTimeout: true checkedTimeout: true
} }
@ -116,6 +123,7 @@
this.store.commit('dag/setGlobalParams', _.cloneDeep(this.udpList)) this.store.commit('dag/setGlobalParams', _.cloneDeep(this.udpList))
this.store.commit('dag/setName', _.cloneDeep(this.name)) this.store.commit('dag/setName', _.cloneDeep(this.name))
this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout)) this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout))
this.store.commit('dag/setTenantId', _.cloneDeep(this.tenantId))
this.store.commit('dag/setDesc', _.cloneDeep(this.desc)) this.store.commit('dag/setDesc', _.cloneDeep(this.desc))
this.store.commit('dag/setSyncDefine', this.syncDefine) this.store.commit('dag/setSyncDefine', this.syncDefine)
}, },
@ -181,9 +189,10 @@
this.syncDefine = dag.syncDefine this.syncDefine = dag.syncDefine
this.timeout = dag.timeout || 0 this.timeout = dag.timeout || 0
this.checkedTimeout = this.timeout !== 0 this.checkedTimeout = this.timeout !== 0
this.tenantId = dag.tenantId || -1
}, },
mounted () {}, mounted () {},
components: { mLocalParams } components: {FormTenant, mLocalParams }
} }
</script> </script>

5
escheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue

@ -26,7 +26,7 @@
methods: { methods: {
...mapMutations('dag', ['resetParams', 'setIsDetails']), ...mapMutations('dag', ['resetParams', 'setIsDetails']),
...mapActions('dag', ['getProcessList', 'getResourcesList', 'getProcessDetails']), ...mapActions('dag', ['getProcessList', 'getResourcesList', 'getProcessDetails']),
...mapActions('security', ['getWorkerGroupsAll']), ...mapActions('security', ['getTenantList','getWorkerGroupsAll']),
/** /**
* init * init
*/ */
@ -43,7 +43,8 @@
// get resource // get resource
this.getResourcesList(), this.getResourcesList(),
// get worker group list // get worker group list
this.getWorkerGroupsAll() this.getWorkerGroupsAll(),
this.getTenantList()
]).then((data) => { ]).then((data) => {
let item = data[0] let item = data[0]
this.setIsDetails(item.releaseState === 'ONLINE') this.setIsDetails(item.releaseState === 'ONLINE')

5
escheduler-ui/src/js/conf/home/pages/dag/index.vue

@ -25,7 +25,7 @@
methods: { methods: {
...mapMutations('dag', ['resetParams']), ...mapMutations('dag', ['resetParams']),
...mapActions('dag', ['getProcessList', 'getResourcesList']), ...mapActions('dag', ['getProcessList', 'getResourcesList']),
...mapActions('security', ['getWorkerGroupsAll']), ...mapActions('security', ['getTenantList','getWorkerGroupsAll']),
/** /**
* init * init
*/ */
@ -40,7 +40,8 @@
// get resource // get resource
this.getResourcesList(), this.getResourcesList(),
// get worker group list // get worker group list
this.getWorkerGroupsAll() this.getWorkerGroupsAll(),
this.getTenantList()
]).then((data) => { ]).then((data) => {
this.isLoading = false this.isLoading = false
// Whether to pop up the box? // Whether to pop up the box?

5
escheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue

@ -26,7 +26,7 @@
methods: { methods: {
...mapMutations('dag', ['setIsDetails', 'resetParams']), ...mapMutations('dag', ['setIsDetails', 'resetParams']),
...mapActions('dag', ['getProcessList', 'getResourcesList', 'getInstancedetail']), ...mapActions('dag', ['getProcessList', 'getResourcesList', 'getInstancedetail']),
...mapActions('security', ['getWorkerGroupsAll']), ...mapActions('security', ['getTenantList','getWorkerGroupsAll']),
/** /**
* init * init
*/ */
@ -43,7 +43,8 @@
// get resources // get resources
this.getResourcesList(), this.getResourcesList(),
// get worker group list // get worker group list
this.getWorkerGroupsAll() this.getWorkerGroupsAll(),
this.getTenantList()
]).then((data) => { ]).then((data) => {
let item = data[0] let item = data[0]
let flag = false let flag = false

41
escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue

@ -63,6 +63,17 @@
</x-input> </x-input>
</template> </template>
</m-list-box-f> </m-list-box-f>
<m-list-box-f :class="{hidden:showPrincipal}">
<template slot="name"><b>*</b>Principal</template>
<template slot="content">
<x-input
type="input"
v-model="principal"
:placeholder="$t('Please enter Principal')"
autocomplete="off">
</x-input>
</template>
</m-list-box-f>
<m-list-box-f> <m-list-box-f>
<template slot="name"><b>*</b>{{$t('User Name')}}</template> <template slot="name"><b>*</b>{{$t('User Name')}}</template>
<template slot="content"> <template slot="content">
@ -143,6 +154,8 @@
port: '', port: '',
// data storage name // data storage name
database: '', database: '',
// principal
principal:'',
// database username // database username
userName: '', userName: '',
// Database password // Database password
@ -150,12 +163,15 @@
// Jdbc connection parameter // Jdbc connection parameter
other: '', other: '',
// btn test loading // btn test loading
testLoading: false testLoading: false,
showPrincipal: true,
isShowPrincipal:true
} }
}, },
props: { props: {
item: Object item: Object
}, },
methods: { methods: {
_rtOtherPlaceholder () { _rtOtherPlaceholder () {
return `${i18n.$t('Please enter format')} {"key1":"value1","key2":"value2"...} ${i18n.$t('connection parameter')}` return `${i18n.$t('Please enter format')} {"key1":"value1","key2":"value2"...} ${i18n.$t('connection parameter')}`
@ -187,6 +203,7 @@
host: this.host, host: this.host,
port: this.port, port: this.port,
database: this.database, database: this.database,
principal:this.principal,
userName: this.userName, userName: this.userName,
password: this.password, password: this.password,
other: this.other other: this.other
@ -289,6 +306,7 @@
this.note = res.note this.note = res.note
this.host = res.host this.host = res.host
this.port = res.port this.port = res.port
this.principal = res.principal
this.database = res.database this.database = res.database
this.userName = res.userName this.userName = res.userName
this.password = res.password this.password = res.password
@ -298,12 +316,31 @@
}) })
} }
}, },
watch: {}, watch: {
'type'(value){
if((value =='HIVE'||value == 'SPARK')&&this.isShowPrincipal== true){
this.showPrincipal = false
}else{
this.showPrincipal = true
}
}
},
created () { created () {
// Backfill // Backfill
if (this.item.id) { if (this.item.id) {
this._getEditDatasource() this._getEditDatasource()
} }
return new Promise((resolve, reject) => {
this.store.dispatch('datasource/getKerberosStartupState').then(res => {
this.isShowPrincipal=res
}).catch(e => {
this.$message.error(e.msg || '')
reject(e)
})
})
}, },
mounted () { mounted () {
}, },

4
escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue

@ -56,7 +56,6 @@
shape="circle" shape="circle"
size="xsmall" size="xsmall"
data-toggle="tooltip" data-toggle="tooltip"
v-ps="['GENERAL_USER']"
:title="$t('Edit')" :title="$t('Edit')"
icon="iconfont icon-bianjixiugai" icon="iconfont icon-bianjixiugai"
@click="_edit(item)"> @click="_edit(item)">
@ -77,8 +76,7 @@
size="xsmall" size="xsmall"
icon="iconfont icon-shanchu" icon="iconfont icon-shanchu"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

2
escheduler-ui/src/js/conf/home/pages/datasource/pages/list/index.vue

@ -3,7 +3,7 @@
<template slot="conditions"> <template slot="conditions">
<m-conditions @on-conditions="_onConditions"> <m-conditions @on-conditions="_onConditions">
<template slot="button-group"> <template slot="button-group">
<x-button type="ghost" size="small" @click="_create('')" v-ps="['GENERAL_USER']">{{$t('Create Datasource')}}</x-button> <x-button type="ghost" size="small" @click="_create('')">{{$t('Create Datasource')}}</x-button>
</template> </template>
</m-conditions> </m-conditions>
</template> </template>

115
escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/statistics.vue

@ -0,0 +1,115 @@
<template>
<m-list-construction :title="$t('statistics') + $t('Manage')">
<template slot="content">
<div class="servers-wrapper mysql-model" v-show="2">
<div class="row">
<div class="col-md-3">
<div class="text-num-model text">
<div class="title">
<span >{{$t('process number of waiting for running')}}</span>
</div>
<div class="value-p">
<b :style="{color:color[0]}"> {{commandCountData.normalCount}}</b>
</div>
</div>
</div>
<div class="col-md-3">
<div class="text-num-model text">
<div class="title">
<span >{{$t('failure command number')}}}</span>
</div>
<div class="value-p">
<b :style="{color:color[1]}"> {{commandCountData.errorCount}}</b>
</div>
</div>
</div>
<div class="col-md-3">
<div class="text-num-model text">
<div class="title">
<span >{{$t('tasks number of waiting running')}}</span>
</div>
<div class="value-p">
<b :style="{color:color[0]}"> {{queueCount.taskQueue}}</b>
</div>
</div>
</div>
<div class="col-md-3">
<div class="text-num-model text">
<div class="title">
<span >{{$t('task number of ready to kill')}}</span>
</div>
<div class="value-p">
<b :style="{color:color[1]}">{{queueCount.taskKill}}</b>
</div>
</div>
</div>
</div>
</div>
<m-spin :is-spin="isLoading" ></m-spin>
</template>
</m-list-construction>
</template>
<script>
import { mapActions } from 'vuex'
import mSpin from '@/module/components/spin/spin'
import mNoData from '@/module/components/noData/noData'
import themeData from '@/module/echarts/themeData.json'
import mListConstruction from '@/module/components/listConstruction/listConstruction'
export default {
name: 'statistics',
data () {
return {
isLoading: false,
queueCount: {},
commandCountData: {},
color: themeData.color
}
},
props:{},
methods: {
//...mapActions('monitor', ['getDatabaseData'])
// ...mapActions('projects', ['getCommandStateCount']),
...mapActions('projects', ['getQueueCount']),
...mapActions('projects', ['getCommandStateCount']),
},
watch: {},
created () {
this.isLoading = true
this.getQueueCount().then(res => {
this.queueCount = res.data
this.isLoading = false
}).catch(() => {
this.isLoading = false
})
this.getCommandStateCount().then(res => {
let normal = 0
let error = 0
_.forEach(res.data, (v, i) => {
let key = _.keys(v)
if(key[0] == 'errorCount') {
error = error + v.errorCount
}
if(key[1] == 'normalCount'){
normal = normal + v.normalCount
}
}
)
this.commandCountData = {
'normalCount': normal,
'errorCount' : error
}
}).catch( () => {
})
},
mounted () {
},
components: { mListConstruction, mSpin, mNoData }
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./servers";
</style>

4
escheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue

@ -36,10 +36,10 @@
</x-select> </x-select>
</div> </div>
<div class="list"> <div class="list">
<x-input v-model="searchParams.host" style="width: 140px;" size="small" :placeholder="$t('host')"></x-input> <x-input v-model="searchParams.host" @on-enterkey="_ckQuery" style="width: 140px;" size="small" :placeholder="$t('host')"></x-input>
</div> </div>
<div class="list"> <div class="list">
<x-input v-model="searchParams.searchVal" style="width: 200px;" size="small" :placeholder="$t('name')"></x-input> <x-input v-model="searchParams.searchVal" @on-enterkey="_ckQuery" style="width: 200px;" size="small" :placeholder="$t('name')"></x-input>
</div> </div>
</template> </template>
</m-conditions> </m-conditions>

8
escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue

@ -32,9 +32,12 @@
v-model="email" v-model="email"
:disabled="disabled" :disabled="disabled"
:placeholder="$t('Please enter email')" :placeholder="$t('Please enter email')"
@blur="_emailEnter"
@keydown.tab="_emailTab" @keydown.tab="_emailTab"
@keyup.delete="_emailDelete" @keyup.delete="_emailDelete"
@keyup.enter="_emailEnter" @keyup.enter="_emailEnter"
@keyup.space="_emailEnter"
@keyup.186="_emailEnter"
@keyup.up="_emailKeyup('up')" @keyup.up="_emailKeyup('up')"
@keyup.down="_emailKeyup('down')"> @keyup.down="_emailKeyup('down')">
</span> </span>
@ -78,6 +81,11 @@
* Manually add a mailbox * Manually add a mailbox
*/ */
_manualEmail () { _manualEmail () {
if (this.email === '') {
return
}
this.email = _.trim(this.email).replace(/(;$)|(;$)/g, "")
let email = this.email let email = this.email
let is = (n) => { let is = (n) => {

15
escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue

@ -58,12 +58,12 @@
<span v-if="!item.scheduleReleaseState">-</span> <span v-if="!item.scheduleReleaseState">-</span>
</td> </td>
<td> <td>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Edit')" @click="_edit(item)" :disabled="item.releaseState === 'ONLINE'" v-ps="['GENERAL_USER']" icon="iconfont icon-bianji"><!--{{$t('编辑')}}--></x-button> <x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Edit')" @click="_edit(item)" :disabled="item.releaseState === 'ONLINE'" icon="iconfont icon-bianji"><!--{{$t('编辑')}}--></x-button>
<x-button type="success" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Start')" @click="_start(item)" :disabled="item.releaseState !== 'ONLINE'" v-ps="['GENERAL_USER']" icon="iconfont icon-qidong"><!--{{$t('启动')}}--></x-button> <x-button type="success" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Start')" @click="_start(item)" :disabled="item.releaseState !== 'ONLINE'" icon="iconfont icon-qidong"><!--{{$t('启动')}}--></x-button>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Timing')" @click="_timing(item)" :disabled="item.releaseState !== 'ONLINE' || item.scheduleReleaseState !== null" v-ps="['GENERAL_USER']" icon="iconfont icon-timer"><!--{{$t('定时')}}--></x-button> <x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Timing')" @click="_timing(item)" :disabled="item.releaseState !== 'ONLINE' || item.scheduleReleaseState !== null" icon="iconfont icon-timer"><!--{{$t('定时')}}--></x-button>
<x-button type="warning" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('online')" @click="_poponline(item)" v-if="item.releaseState === 'OFFLINE'" v-ps="['GENERAL_USER']" icon="iconfont icon-erji-xiaxianjilu-copy"><!--{{$t('下线')}}--></x-button> <x-button type="warning" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('online')" @click="_poponline(item)" v-if="item.releaseState === 'OFFLINE'" icon="iconfont icon-erji-xiaxianjilu-copy"><!--{{$t('下线')}}--></x-button>
<x-button type="error" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('offline')" @click="_downline(item)" v-if="item.releaseState === 'ONLINE'" v-ps="['GENERAL_USER']" icon="iconfont icon-erji-xiaxianjilu"><!--{{$t('上线')}}--></x-button> <x-button type="error" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('offline')" @click="_downline(item)" v-if="item.releaseState === 'ONLINE'" icon="iconfont icon-erji-xiaxianjilu"><!--{{$t('上线')}}--></x-button>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Cron Manage')" @click="_timingManage(item)" :disabled="item.releaseState !== 'ONLINE'" v-ps="['GENERAL_USER']" icon="iconfont icon-paibanguanli"><!--{{$t('定时管理')}}--></x-button> <x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Cron Manage')" @click="_timingManage(item)" :disabled="item.releaseState !== 'ONLINE'" icon="iconfont icon-paibanguanli"><!--{{$t('定时管理')}}--></x-button>
<x-poptip <x-poptip
:ref="'poptip-delete-' + $index" :ref="'poptip-delete-' + $index"
placement="bottom-end" placement="bottom-end"
@ -81,8 +81,7 @@
size="xsmall" size="xsmall"
:disabled="item.releaseState === 'ONLINE'" :disabled="item.releaseState === 'ONLINE'"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

2
escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue

@ -137,7 +137,7 @@
</template> </template>
<div class="submit"> <div class="submit">
<x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button> <x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()" v-ps="['GENERAL_USER']">{{spinnerLoading ? 'Loading...' : $t('Start')}} </x-button> <x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()">{{spinnerLoading ? 'Loading...' : $t('Start')}} </x-button>
</div> </div>
</div> </div>
</template> </template>

2
escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue

@ -3,7 +3,7 @@
<template slot="conditions"> <template slot="conditions">
<m-conditions @on-conditions="_onConditions"> <m-conditions @on-conditions="_onConditions">
<template slot="button-group"> <template slot="button-group">
<x-button type="ghost" size="small" v-ps="['GENERAL_USER']" @click="() => this.$router.push({name: 'definition-create'})">{{$t('Create process')}}</x-button> <x-button type="ghost" size="small" @click="() => this.$router.push({name: 'definition-create'})">{{$t('Create process')}}</x-button>
</template> </template>
</m-conditions> </m-conditions>
</template> </template>

24
escheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue

@ -33,30 +33,6 @@
</div> </div>
</div> </div>
</div> </div>
<div class="row" style="padding-top: 20px;">
<div class="col-md-6">
</div>
<div class="col-md-6">
<div class="chart-title">
<span>{{$t('Queue statistics')}}</span>
</div>
<div class="row">
<m-queue-count :search-params="searchParams">
</m-queue-count>
</div>
</div>
</div>
<div class="row">
<div class="col-md-12">
<div class="chart-title" style="margin-bottom: 20px;margin-top: 30px">
<span>{{$t('Command status statistics')}}</span>
</div>
<div>
<m-command-state-count :search-params="searchParams">
</m-command-state-count>
</div>
</div>
</div>
<div class="row"> <div class="row">
<div class="col-md-12"> <div class="col-md-12">
<div class="chart-title" style="margin-bottom: -20px;margin-top: 30px"> <div class="chart-title" style="margin-bottom: -20px;margin-top: 30px">

8
escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue

@ -73,7 +73,6 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Edit')" :title="$t('Edit')"
@click="_reEdit(item)" @click="_reEdit(item)"
v-ps="['GENERAL_USER']"
icon="iconfont icon-bianjixiugai" icon="iconfont icon-bianjixiugai"
:disabled="item.state !== 'SUCCESS' && item.state !== 'PAUSE' && item.state !== 'FAILURE' && item.state !== 'STOP'"></x-button> :disabled="item.state !== 'SUCCESS' && item.state !== 'PAUSE' && item.state !== 'FAILURE' && item.state !== 'STOP'"></x-button>
<x-button type="info" <x-button type="info"
@ -82,7 +81,6 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Rerun')" :title="$t('Rerun')"
@click="_reRun(item,$index)" @click="_reRun(item,$index)"
v-ps="['GENERAL_USER']"
icon="iconfont icon-shuaxin" icon="iconfont icon-shuaxin"
:disabled="item.state !== 'SUCCESS' && item.state !== 'PAUSE' && item.state !== 'FAILURE' && item.state !== 'STOP'"></x-button> :disabled="item.state !== 'SUCCESS' && item.state !== 'PAUSE' && item.state !== 'FAILURE' && item.state !== 'STOP'"></x-button>
<x-button type="success" <x-button type="success"
@ -91,7 +89,6 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Recovery Failed')" :title="$t('Recovery Failed')"
@click="_restore(item,$index)" @click="_restore(item,$index)"
v-ps="['GENERAL_USER']"
icon="iconfont icon-cuowuguanbishibai" icon="iconfont icon-cuowuguanbishibai"
:disabled="item.state !== 'FAILURE'"></x-button> :disabled="item.state !== 'FAILURE'"></x-button>
<x-button type="error" <x-button type="error"
@ -100,7 +97,6 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Stop')" :title="$t('Stop')"
@click="_stop(item)" @click="_stop(item)"
v-ps="['GENERAL_USER']"
icon="iconfont icon-zanting1" icon="iconfont icon-zanting1"
:disabled="item.state !== 'RUNNING_EXEUTION'"></x-button> :disabled="item.state !== 'RUNNING_EXEUTION'"></x-button>
<x-button type="warning" <x-button type="warning"
@ -109,7 +105,6 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="item.state === 'PAUSE' ? $t('Recovery Suspend') : $t('Pause')" :title="item.state === 'PAUSE' ? $t('Recovery Suspend') : $t('Pause')"
@click="_suspend(item,$index)" @click="_suspend(item,$index)"
v-ps="['GENERAL_USER']"
:icon="item.state === 'PAUSE' ? 'iconfont icon-ai06' : 'iconfont icon-zanting'" :icon="item.state === 'PAUSE' ? 'iconfont icon-ai06' : 'iconfont icon-zanting'"
:disabled="item.state !== 'RUNNING_EXEUTION' && item.state !== 'PAUSE'"></x-button> :disabled="item.state !== 'RUNNING_EXEUTION' && item.state !== 'PAUSE'"></x-button>
<x-poptip <x-poptip
@ -128,8 +123,7 @@
shape="circle" shape="circle"
size="xsmall" size="xsmall"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

6
escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue

@ -63,8 +63,7 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Edit')" :title="$t('Edit')"
@click="_edit(item)" @click="_edit(item)"
icon="iconfont icon-bianjixiugai" icon="iconfont icon-bianjixiugai">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-poptip <x-poptip
:ref="'poptip-' + $index" :ref="'poptip-' + $index"
@ -82,8 +81,7 @@
size="xsmall" size="xsmall"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')"
icon="iconfont icon-shanchu" icon="iconfont icon-shanchu">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

2
escheduler-ui/src/js/conf/home/pages/projects/pages/list/index.vue

@ -3,7 +3,7 @@
<template slot="conditions"> <template slot="conditions">
<m-conditions @on-conditions="_onConditions"> <m-conditions @on-conditions="_onConditions">
<template slot="button-group"> <template slot="button-group">
<x-button type="ghost" size="small" @click="_create('')" v-ps="['GENERAL_USER']">{{$t('Create Project')}}</x-button> <x-button type="ghost" size="small" @click="_create('')">{{$t('Create Project')}}</x-button>
</template> </template>
</m-conditions> </m-conditions>
</template> </template>

2
escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue

@ -49,7 +49,7 @@
<template slot="name">&nbsp;</template> <template slot="name">&nbsp;</template>
<template slot="content"> <template slot="content">
<div class="submit"> <div class="submit">
<x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()" v-ps="['GENERAL_USER']">{{spinnerLoading ? 'Loading...' : $t('Create')}} </x-button> <x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()">{{spinnerLoading ? 'Loading...' : $t('Create')}} </x-button>
<x-button type="text" @click="() => $router.push({name: 'file'})"> {{$t('Cancel')}} </x-button> <x-button type="text" @click="() => $router.push({name: 'file'})"> {{$t('Cancel')}} </x-button>
</div> </div>
</template> </template>

12
escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue

@ -51,8 +51,7 @@
:title="$t('Edit')" :title="$t('Edit')"
:disabled="_rtDisb(item)" :disabled="_rtDisb(item)"
@click="_edit(item,$index)" @click="_edit(item,$index)"
icon="iconfont icon-bianjixiugai" icon="iconfont icon-bianjixiugai">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-button <x-button
type="info" type="info"
@ -61,8 +60,7 @@
icon="iconfont icon-wendangxiugai" icon="iconfont icon-wendangxiugai"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Rename')" :title="$t('Rename')"
@click="_rename(item,$index)" @click="_rename(item,$index)">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-button <x-button
@ -72,8 +70,7 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Download')" :title="$t('Download')"
@click="_downloadFile(item)" @click="_downloadFile(item)"
icon="iconfont icon-download" icon="iconfont icon-download">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-poptip <x-poptip
@ -92,8 +89,7 @@
shape="circle" shape="circle"
size="xsmall" size="xsmall"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

4
escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue

@ -4,8 +4,8 @@
<m-conditions @on-conditions="_onConditions"> <m-conditions @on-conditions="_onConditions">
<template slot="button-group"> <template slot="button-group">
<x-button-group size="small" > <x-button-group size="small" >
<x-button type="ghost" @click="() => $router.push({name: 'resource-file-create'})" v-ps="['GENERAL_USER']">{{$t('Create File')}}</x-button> <x-button type="ghost" @click="() => $router.push({name: 'resource-file-create'})">{{$t('Create File')}}</x-button>
<x-button type="ghost" @click="_uploading" v-ps="['GENERAL_USER']">{{$t('Upload Files')}}</x-button> <x-button type="ghost" @click="_uploading">{{$t('Upload Files')}}</x-button>
</x-button-group> </x-button-group>
</template> </template>
</m-conditions> </m-conditions>

8
escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue

@ -1,4 +1,4 @@
<template> v-ps<template>
<div class="list-model"> <div class="list-model">
<div class="table-box"> <div class="table-box">
<table class="fixed"> <table class="fixed">
@ -70,8 +70,7 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Edit')" :title="$t('Edit')"
@click="_edit(item)" @click="_edit(item)"
icon="iconfont icon-bianjixiugai" icon="iconfont icon-bianjixiugai">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-poptip <x-poptip
:ref="'poptip-' + $index" :ref="'poptip-' + $index"
@ -89,8 +88,7 @@
size="xsmall" size="xsmall"
icon="iconfont icon-shanchu" icon="iconfont icon-shanchu"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

2
escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue

@ -3,7 +3,7 @@
<template slot="conditions"> <template slot="conditions">
<m-conditions @on-conditions="_onConditions"> <m-conditions @on-conditions="_onConditions">
<template slot="button-group"> <template slot="button-group">
<x-button type="ghost" @click="_create" v-ps="['GENERAL_USER']" size="small" >{{$t('Create UDF Function')}}</x-button> <x-button type="ghost" @click="_create" size="small" >{{$t('Create UDF Function')}}</x-button>
</template> </template>
</m-conditions> </m-conditions>
</template> </template>

9
escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue

@ -58,8 +58,7 @@
icon="iconfont icon-wendangxiugai" icon="iconfont icon-wendangxiugai"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Rename')" :title="$t('Rename')"
@click="_rename(item,$index)" @click="_rename(item,$index)">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-button <x-button
type="info" type="info"
@ -68,8 +67,7 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('Download')" :title="$t('Download')"
icon="iconfont icon-download" icon="iconfont icon-download"
@click="_downloadFile(item)" @click="_downloadFile(item)">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
<x-poptip <x-poptip
:ref="'poptip-' + $index" :ref="'poptip-' + $index"
@ -87,8 +85,7 @@
size="xsmall" size="xsmall"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')"
icon="iconfont icon-shanchu" icon="iconfont icon-shanchu">
v-ps="['GENERAL_USER']">
</x-button> </x-button>
</template> </template>
</x-poptip> </x-poptip>

2
escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue

@ -3,7 +3,7 @@
<template slot="conditions"> <template slot="conditions">
<m-conditions @on-conditions="_onConditions"> <m-conditions @on-conditions="_onConditions">
<template slot="button-group"> <template slot="button-group">
<x-button type="ghost" size="small" @click="_uploading" v-ps="['GENERAL_USER']">{{$t('Upload UDF Resources')}}</x-button> <x-button type="ghost" size="small" @click="_uploading">{{$t('Upload UDF Resources')}}</x-button>
</template> </template>
</m-conditions> </m-conditions>
</template> </template>

9
escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue

@ -98,7 +98,9 @@
userName: '', userName: '',
userPassword: '', userPassword: '',
tenantId: {}, tenantId: {},
queueName: {}, queueName: {
id:''
},
email: '', email: '',
phone: '', phone: '',
tenantList: [], tenantList: [],
@ -197,6 +199,7 @@
}, },
_submit () { _submit () {
this.$refs['popup'].spinnerLoading = true this.$refs['popup'].spinnerLoading = true
console.log(this.tenantId.id)
let param = { let param = {
userName: this.userName, userName: this.userName,
userPassword: this.userPassword, userPassword: this.userPassword,
@ -205,9 +208,11 @@
queue: this.queueName.code, queue: this.queueName.code,
phone: this.phone phone: this.phone
} }
if (this.item) { if (this.item) {
param.id = this.item.id param.id = this.item.id
} }
this.store.dispatch(`security/${this.item ? 'updateUser' : 'createUser'}`, param).then(res => { this.store.dispatch(`security/${this.item ? 'updateUser' : 'createUser'}`, param).then(res => {
setTimeout(() => { setTimeout(() => {
this.$refs['popup'].spinnerLoading = false this.$refs['popup'].spinnerLoading = false
@ -232,7 +237,7 @@
this.phone = this.item.phone this.phone = this.item.phone
this.tenantId = _.find(this.tenantList, ['id', this.item.tenantId]) this.tenantId = _.find(this.tenantList, ['id', this.item.tenantId])
this.$nextTick(() => { this.$nextTick(() => {
this.queueName = _.find(this.queueList, ['code', this.item.queue]) this.queueName = _.find(this.queueList, ['code', this.item.queue])||{id:''}
}) })
} }
}) })

10
escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue

@ -9,6 +9,9 @@
<th> <th>
<span>{{$t('User Name')}}</span> <span>{{$t('User Name')}}</span>
</th> </th>
<th>
<span>用户类型</span>
</th>
<th> <th>
<span>{{$t('Tenant')}}</span> <span>{{$t('Tenant')}}</span>
</th> </th>
@ -21,6 +24,7 @@
<th> <th>
<span>{{$t('Phone')}}</span> <span>{{$t('Phone')}}</span>
</th> </th>
<th> <th>
<span>{{$t('Create Time')}}</span> <span>{{$t('Create Time')}}</span>
</th> </th>
@ -40,6 +44,9 @@
<a href="javascript:" class="links">{{item.userName || '-'}}</a> <a href="javascript:" class="links">{{item.userName || '-'}}</a>
</span> </span>
</td> </td>
<td>
<span>{{item.userType === 'GENERAL_USER' ? `${$t('Ordinary users')}` : `${$t('Administrator')}`}}</span>
</td>
<td><span>{{item.tenantName || '-'}}</span></td> <td><span>{{item.tenantName || '-'}}</span></td>
<td><span>{{item.queue || '-'}}</span></td> <td><span>{{item.queue || '-'}}</span></td>
<td> <td>
@ -62,7 +69,7 @@
<a href="javascript:" @click="_authUdfFunc(item,$index)">{{$t('UDF Function')}}</a> <a href="javascript:" @click="_authUdfFunc(item,$index)">{{$t('UDF Function')}}</a>
</div> </div>
<template slot="reference"> <template slot="reference">
<x-button type="warning" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Authorize')" icon="iconfont icon-yonghu1"></x-button> <x-button type="warning" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Authorize')" icon="iconfont icon-yonghu1" :disabled="item.userType === 'ADMIN_USER'"></x-button>
</template> </template>
</x-poptip> </x-poptip>
@ -84,6 +91,7 @@
size="xsmall" size="xsmall"
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('delete')" :title="$t('delete')"
:disabled="item.userType === 'ADMIN_USER'"
icon="iconfont icon-shanchu"> icon="iconfont icon-shanchu">
</x-button> </x-button>
</template> </template>

8
escheduler-ui/src/js/conf/home/router/index.js

@ -439,6 +439,14 @@ const router = new Router({
meta: { meta: {
title: `Mysql` title: `Mysql`
} }
},
{
path: '/monitor/servers/statistics',
name: 'statistics',
component: resolve => require(['../pages/monitor/pages/servers/statistics'], resolve),
meta: {
title: `statistics`
}
} }
] ]
} }

6
escheduler-ui/src/js/conf/home/store/dag/actions.js

@ -115,6 +115,7 @@ export default {
// timeout // timeout
state.timeout = processDefinitionJson.timeout state.timeout = processDefinitionJson.timeout
state.tenantId = processDefinitionJson.tenantId
resolve(res.data) resolve(res.data)
}).catch(res => { }).catch(res => {
reject(res) reject(res)
@ -146,6 +147,8 @@ export default {
// timeout // timeout
state.timeout = processInstanceJson.timeout state.timeout = processInstanceJson.timeout
state.tenantId = processInstanceJson.tenantId
resolve(res.data) resolve(res.data)
}).catch(res => { }).catch(res => {
reject(res) reject(res)
@ -160,6 +163,7 @@ export default {
let data = { let data = {
globalParams: state.globalParams, globalParams: state.globalParams,
tasks: state.tasks, tasks: state.tasks,
tenantId: state.tenantId,
timeout: state.timeout timeout: state.timeout
} }
io.post(`projects/${state.projectName}/process/save`, { io.post(`projects/${state.projectName}/process/save`, {
@ -183,6 +187,7 @@ export default {
let data = { let data = {
globalParams: state.globalParams, globalParams: state.globalParams,
tasks: state.tasks, tasks: state.tasks,
tenantId: state.tenantId,
timeout: state.timeout timeout: state.timeout
} }
io.post(`projects/${state.projectName}/process/update`, { io.post(`projects/${state.projectName}/process/update`, {
@ -207,6 +212,7 @@ export default {
let data = { let data = {
globalParams: state.globalParams, globalParams: state.globalParams,
tasks: state.tasks, tasks: state.tasks,
tenantId: state.tenantId,
timeout: state.timeout timeout: state.timeout
} }
io.post(`projects/${state.projectName}/instance/update`, { io.post(`projects/${state.projectName}/instance/update`, {

7
escheduler-ui/src/js/conf/home/store/dag/mutations.js

@ -58,6 +58,12 @@ export default {
setTimeout (state, payload) { setTimeout (state, payload) {
state.timeout = payload state.timeout = payload
}, },
/**
* set tenantId
*/
setTenantId (state, payload) {
state.tenantId = payload
},
/** /**
* set global params * set global params
*/ */
@ -100,6 +106,7 @@ export default {
state.name = payload && payload.name || '' state.name = payload && payload.name || ''
state.desc = payload && payload.desc || '' state.desc = payload && payload.desc || ''
state.timeout = payload && payload.timeout || 0 state.timeout = payload && payload.timeout || 0
state.tenantId = payload && payload.tenantId || -1
state.processListS = payload && payload.processListS || [] state.processListS = payload && payload.processListS || []
state.resourcesListS = payload && payload.resourcesListS || [] state.resourcesListS = payload && payload.resourcesListS || []
state.isDetails = payload && payload.isDetails || false state.isDetails = payload && payload.isDetails || false

2
escheduler-ui/src/js/conf/home/store/dag/state.js

@ -31,6 +31,8 @@ export default {
tasks: [], tasks: [],
// Timeout alarm // Timeout alarm
timeout: 0, timeout: 0,
// tenant id
tenantId:-1,
// Node location information // Node location information
locations: {}, locations: {},
// Node-to-node connection // Node-to-node connection

9
escheduler-ui/src/js/conf/home/store/datasource/actions.js

@ -116,5 +116,14 @@ export default {
reject(e) reject(e)
}) })
}) })
},
getKerberosStartupState ({ state }, payload) {
return new Promise((resolve, reject) => {
io.get(`datasources/kerberos-startup-state`, payload, res => {
resolve(res.data)
}).catch(e => {
reject(e)
})
})
} }
} }

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save