Browse Source

Merge pull request #4058 from apache/133-merge-dev

[Merge][133-dev] merge from 1.3.3-release to branch  dev
pull/3/MERGE
dailidong 4 years ago committed by GitHub
parent
commit
46091bb870
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 128
      .gitignore
  2. BIN
      .mvn/wrapper/maven-wrapper.jar
  3. 467
      ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml
  4. 6
      ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json
  5. 7
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml
  6. 16
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml
  7. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml
  8. 206
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml
  9. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml
  10. 88
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml
  11. 23
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml
  12. 67
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml
  13. 76
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml
  14. 4
      ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml
  15. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py
  16. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py
  17. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py
  18. 46
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py
  19. 4
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py
  20. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py
  21. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py
  22. 85
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py
  23. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py
  24. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py
  25. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2
  26. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2
  27. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2
  28. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2
  29. 13
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2
  30. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2
  31. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2
  32. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2
  33. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2
  34. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json
  35. 84
      ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json
  36. 13
      docker/kubernetes/dolphinscheduler/requirements.yaml
  37. 2
      dolphinscheduler-alert/pom.xml
  38. 4
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java
  39. 2
      dolphinscheduler-api/pom.xml
  40. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java
  41. 47
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java
  42. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  43. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java
  44. 19
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java
  45. 165
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
  46. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java
  47. 51
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java
  48. 24
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java
  49. 1
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java
  50. 113
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  51. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java
  52. 1
      dolphinscheduler-api/src/main/resources/logback-api.xml
  53. 16
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java
  54. 10
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  55. 22
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
  56. 8
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java
  57. 10
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
  58. 2
      dolphinscheduler-common/pom.xml
  59. 23
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java
  60. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java
  61. 1
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java
  62. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java
  63. 5
      dolphinscheduler-dao/pom.xml
  64. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java
  65. 14
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java
  66. 7
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java
  67. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java
  68. 69
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java
  69. 190
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java
  70. 226
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java
  71. 13
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml
  72. 57
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java
  73. 250
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java
  74. 2
      dolphinscheduler-dist/pom.xml
  75. 2
      dolphinscheduler-microbench/pom.xml
  76. 2
      dolphinscheduler-plugin-api/pom.xml
  77. 2
      dolphinscheduler-remote/pom.xml
  78. 10
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java
  79. 74
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java
  80. 71
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java
  81. 2
      dolphinscheduler-server/pom.xml
  82. 39
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java
  83. 73
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java
  84. 28
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java
  85. 30
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java
  86. 36
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java
  87. 36
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java
  88. 3
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java
  89. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java
  90. 101
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java
  91. 191
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
  92. 32
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java
  93. 7
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java
  94. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java
  95. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java
  96. 6
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java
  97. 11
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java
  98. 94
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java
  99. 56
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java
  100. 58
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java
  101. Some files were not shown because too many files have changed in this diff Show More

128
.gitignore vendored

@ -4,25 +4,27 @@
.zip .zip
.gz .gz
.DS_Store .DS_Store
.idea
.idea/ .idea/
dist/ .idea/*
all-dependencies.txt .target
self-modules.txt .target/
third-party-dependencies.txt **/**/target/**
**/target/ target/*
*/target
*/target/*
.settings .settings
.nbproject .nbproject
.classpath .classpath
.project .project
**/*.iml *.iml
*.ipr *.ipr
*.iws *.iws
*.tgz *.tgz
.*.swp .*.swp
.factorypath
.vim .vim
.tmp .tmp
**/node_modules node_modules
npm-debug.log npm-debug.log
.vscode .vscode
logs/* logs/*
@ -39,10 +41,110 @@ dolphinscheduler-alert/logs/
dolphinscheduler-alert/src/main/resources/alert.properties_bak dolphinscheduler-alert/src/main/resources/alert.properties_bak
dolphinscheduler-alert/src/main/resources/logback.xml dolphinscheduler-alert/src/main/resources/logback.xml
dolphinscheduler-server/src/main/resources/logback.xml dolphinscheduler-server/src/main/resources/logback.xml
dolphinscheduler-ui/dist/ dolphinscheduler-ui/dist
dolphinscheduler-ui/node dolphinscheduler-ui/node
dolphinscheduler-dao/src/main/resources/dao/data_source.properties dolphinscheduler-ui/dist/css/common.16ac5d9.css
dolphinscheduler-ui/dist/css/home/index.b444b91.css
dolphinscheduler-ui/dist/css/login/index.5866c64.css
dolphinscheduler-ui/dist/js/0.ac94e5d.js
dolphinscheduler-ui/dist/js/0.ac94e5d.js.map
dolphinscheduler-ui/dist/js/1.0b043a3.js
dolphinscheduler-ui/dist/js/1.0b043a3.js.map
dolphinscheduler-ui/dist/js/10.1bce3dc.js
dolphinscheduler-ui/dist/js/10.1bce3dc.js.map
dolphinscheduler-ui/dist/js/11.79f04d8.js
dolphinscheduler-ui/dist/js/11.79f04d8.js.map
dolphinscheduler-ui/dist/js/12.420daa5.js
dolphinscheduler-ui/dist/js/12.420daa5.js.map
dolphinscheduler-ui/dist/js/13.e5bae1c.js
dolphinscheduler-ui/dist/js/13.e5bae1c.js.map
dolphinscheduler-ui/dist/js/14.f2a0dca.js
dolphinscheduler-ui/dist/js/14.f2a0dca.js.map
dolphinscheduler-ui/dist/js/15.45373e8.js
dolphinscheduler-ui/dist/js/15.45373e8.js.map
dolphinscheduler-ui/dist/js/16.fecb0fc.js
dolphinscheduler-ui/dist/js/16.fecb0fc.js.map
dolphinscheduler-ui/dist/js/17.84be279.js
dolphinscheduler-ui/dist/js/17.84be279.js.map
dolphinscheduler-ui/dist/js/18.307ea70.js
dolphinscheduler-ui/dist/js/18.307ea70.js.map
dolphinscheduler-ui/dist/js/19.144db9c.js
dolphinscheduler-ui/dist/js/19.144db9c.js.map
dolphinscheduler-ui/dist/js/2.8b4ef29.js
dolphinscheduler-ui/dist/js/2.8b4ef29.js.map
dolphinscheduler-ui/dist/js/20.4c527e9.js
dolphinscheduler-ui/dist/js/20.4c527e9.js.map
dolphinscheduler-ui/dist/js/21.831b2a2.js
dolphinscheduler-ui/dist/js/21.831b2a2.js.map
dolphinscheduler-ui/dist/js/22.2b4bb2a.js
dolphinscheduler-ui/dist/js/22.2b4bb2a.js.map
dolphinscheduler-ui/dist/js/23.81467ef.js
dolphinscheduler-ui/dist/js/23.81467ef.js.map
dolphinscheduler-ui/dist/js/24.54a00e4.js
dolphinscheduler-ui/dist/js/24.54a00e4.js.map
dolphinscheduler-ui/dist/js/25.8d7bd36.js
dolphinscheduler-ui/dist/js/25.8d7bd36.js.map
dolphinscheduler-ui/dist/js/26.2ec5e78.js
dolphinscheduler-ui/dist/js/26.2ec5e78.js.map
dolphinscheduler-ui/dist/js/27.3ab48c2.js
dolphinscheduler-ui/dist/js/27.3ab48c2.js.map
dolphinscheduler-ui/dist/js/28.363088a.js
dolphinscheduler-ui/dist/js/28.363088a.js.map
dolphinscheduler-ui/dist/js/29.6c5853a.js
dolphinscheduler-ui/dist/js/29.6c5853a.js.map
dolphinscheduler-ui/dist/js/3.a0edb5b.js
dolphinscheduler-ui/dist/js/3.a0edb5b.js.map
dolphinscheduler-ui/dist/js/30.940fdd3.js
dolphinscheduler-ui/dist/js/30.940fdd3.js.map
dolphinscheduler-ui/dist/js/31.168a460.js
dolphinscheduler-ui/dist/js/31.168a460.js.map
dolphinscheduler-ui/dist/js/32.8df6594.js
dolphinscheduler-ui/dist/js/32.8df6594.js.map
dolphinscheduler-ui/dist/js/33.4480bbe.js
dolphinscheduler-ui/dist/js/33.4480bbe.js.map
dolphinscheduler-ui/dist/js/34.b407fe1.js
dolphinscheduler-ui/dist/js/34.b407fe1.js.map
dolphinscheduler-ui/dist/js/35.f340b0a.js
dolphinscheduler-ui/dist/js/35.f340b0a.js.map
dolphinscheduler-ui/dist/js/36.8880c2d.js
dolphinscheduler-ui/dist/js/36.8880c2d.js.map
dolphinscheduler-ui/dist/js/37.ea2a25d.js
dolphinscheduler-ui/dist/js/37.ea2a25d.js.map
dolphinscheduler-ui/dist/js/38.98a59ee.js
dolphinscheduler-ui/dist/js/38.98a59ee.js.map
dolphinscheduler-ui/dist/js/39.a5e958a.js
dolphinscheduler-ui/dist/js/39.a5e958a.js.map
dolphinscheduler-ui/dist/js/4.4ca44db.js
dolphinscheduler-ui/dist/js/4.4ca44db.js.map
dolphinscheduler-ui/dist/js/40.e187b1e.js
dolphinscheduler-ui/dist/js/40.e187b1e.js.map
dolphinscheduler-ui/dist/js/41.0e89182.js
dolphinscheduler-ui/dist/js/41.0e89182.js.map
dolphinscheduler-ui/dist/js/42.341047c.js
dolphinscheduler-ui/dist/js/42.341047c.js.map
dolphinscheduler-ui/dist/js/43.27b8228.js
dolphinscheduler-ui/dist/js/43.27b8228.js.map
dolphinscheduler-ui/dist/js/44.e8869bc.js
dolphinscheduler-ui/dist/js/44.e8869bc.js.map
dolphinscheduler-ui/dist/js/45.8d54901.js
dolphinscheduler-ui/dist/js/45.8d54901.js.map
dolphinscheduler-ui/dist/js/5.e1ed7f3.js
dolphinscheduler-ui/dist/js/5.e1ed7f3.js.map
dolphinscheduler-ui/dist/js/6.241ba07.js
dolphinscheduler-ui/dist/js/6.241ba07.js.map
dolphinscheduler-ui/dist/js/7.ab2e297.js
dolphinscheduler-ui/dist/js/7.ab2e297.js.map
dolphinscheduler-ui/dist/js/8.83ff814.js
dolphinscheduler-ui/dist/js/8.83ff814.js.map
dolphinscheduler-ui/dist/js/9.39cb29f.js
dolphinscheduler-ui/dist/js/9.39cb29f.js.map
dolphinscheduler-ui/dist/js/common.733e342.js
dolphinscheduler-ui/dist/js/common.733e342.js.map
dolphinscheduler-ui/dist/js/home/index.78a5d12.js
dolphinscheduler-ui/dist/js/home/index.78a5d12.js.map
dolphinscheduler-ui/dist/js/login/index.291b8e3.js
dolphinscheduler-ui/dist/js/login/index.291b8e3.js.map
dolphinscheduler-ui/dist/lib/external/
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue
/dolphinscheduler-dao/src/main/resources/dao/data_source.properties
.mvn/wrapper/*.jar
!/zookeeper_data/

BIN
.mvn/wrapper/maven-wrapper.jar vendored

Binary file not shown.

467
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml

@ -1,467 +0,0 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>spring.datasource.initialSize</name>
<value>5</value>
<description>
Init connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minIdle</name>
<value>5</value>
<description>
Min connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxActive</name>
<value>50</value>
<description>
Max connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxWait</name>
<value>60000</value>
<description>
Max wait time for get a connection in milliseconds.
If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases.
If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenEvictionRunsMillis</name>
<value>60000</value>
<description>
Milliseconds for check to close free connections
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenConnectErrorMillis</name>
<value>60000</value>
<description>
The Destroy thread detects the connection interval and closes the physical connection in milliseconds
if the connection idle time is greater than or equal to minEvictableIdleTimeMillis.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minEvictableIdleTimeMillis</name>
<value>300000</value>
<description>
The longest time a connection remains idle without being evicted, in milliseconds
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQuery</name>
<value>SELECT 1</value>
<description>
The SQL used to check whether the connection is valid requires a query statement.
If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work.
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQueryTimeout</name>
<value>3</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
Check whether the connection is valid for timeout, in seconds
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testWhileIdle</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
When applying for a connection,
if it is detected that the connection is idle longer than time Between Eviction Runs Millis,
validation Query is performed to check whether the connection is valid
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnBorrow</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when applying for a connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnReturn</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when the connection is returned
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.defaultAutoCommit</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.keepAlive</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.poolPreparedStatements</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Open PSCache, specify count PSCache for every connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxPoolPreparedStatementPerConnectionSize</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.spring.datasource.filters</name>
<value>stat,wall,log4j</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.connectionProperties</name>
<value>druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.mapper-locations</name>
<value>classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.typeEnumsPackage</name>
<value>org.apache.dolphinscheduler.*.enums</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.typeAliasesPackage</name>
<value>org.apache.dolphinscheduler.dao.entity</value>
<description>
Entity scan, where multiple packages are separated by a comma or semicolon
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.id-type</name>
<value>AUTO</value>
<value-attributes>
<type>value-list</type>
<entries>
<entry>
<value>AUTO</value>
<label>AUTO</label>
</entry>
<entry>
<value>INPUT</value>
<label>INPUT</label>
</entry>
<entry>
<value>ID_WORKER</value>
<label>ID_WORKER</label>
</entry>
<entry>
<value>UUID</value>
<label>UUID</label>
</entry>
</entries>
<selection-cardinality>1</selection-cardinality>
</value-attributes>
<description>
Primary key type AUTO:" database ID AUTO ",
INPUT:" user INPUT ID",
ID_WORKER:" global unique ID (numeric type unique ID)",
UUID:" global unique ID UUID";
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.field-strategy</name>
<value>NOT_NULL</value>
<value-attributes>
<type>value-list</type>
<entries>
<entry>
<value>IGNORED</value>
<label>IGNORED</label>
</entry>
<entry>
<value>NOT_NULL</value>
<label>NOT_NULL</label>
</entry>
<entry>
<value>NOT_EMPTY</value>
<label>NOT_EMPTY</label>
</entry>
</entries>
<selection-cardinality>1</selection-cardinality>
</value-attributes>
<description>
Field policy IGNORED:" ignore judgment ",
NOT_NULL:" not NULL judgment "),
NOT_EMPTY:" not NULL judgment"
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.column-underline</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.logic-delete-value</name>
<value>1</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.logic-not-delete-value</name>
<value>0</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.banner</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.map-underscore-to-camel-case</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.cache-enabled</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.call-setters-on-nulls</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.jdbc-type-for-null</name>
<value>null</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.exec.task.num</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.retryTimes</name>
<value>5</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.interval</name>
<value>1000</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.reserved.memory</name>
<value>0.1</value>
<value-attributes>
<type>float</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.fetch.task.num</name>
<value>3</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.reserved.memory</name>
<value>0.1</value>
<value-attributes>
<type>float</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

6
ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json → ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json

@ -65,7 +65,7 @@
"enabled": true, "enabled": true,
"source": { "source": {
"type": "SCRIPT", "type": "SCRIPT",
"path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py",
"parameters": [ "parameters": [
{ {
@ -98,7 +98,7 @@
"enabled": true, "enabled": true,
"source": { "source": {
"type": "SCRIPT", "type": "SCRIPT",
"path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py",
"parameters": [ "parameters": [
{ {
@ -131,7 +131,7 @@
"enabled": true, "enabled": true,
"source": { "source": {
"type": "SCRIPT", "type": "SCRIPT",
"path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py",
"parameters": [ "parameters": [
{ {

7
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml

@ -90,13 +90,6 @@
<on-ambari-upgrade add="true"/> <on-ambari-upgrade add="true"/>
</property> </property>
<property>
<name>xls.file.path</name>
<value>/tmp/xls</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property> <property>
<name>enterprise.wechat.enable</name> <name>enterprise.wechat.enable</name>
<value>false</value> <value>false</value>

16
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml

@ -34,6 +34,12 @@
<description> <description>
</description> </description>
</property> </property>
<property>
<name>server.servlet.context-path</name>
<value>/dolphinscheduler/</value>
<description>
</description>
</property>
<property> <property>
<name>spring.servlet.multipart.max-file-size</name> <name>spring.servlet.multipart.max-file-size</name>
<value>1024</value> <value>1024</value>
@ -68,4 +74,14 @@
<value>UTF-8</value> <value>UTF-8</value>
<description></description> <description></description>
</property> </property>
<property>
<name>spring.messages.basename</name>
<value>i18n/messages</value>
<description></description>
</property>
<property>
<name>security.authentication.type</name>
<value>PASSWORD</value>
<description></description>
</property>
</configuration> </configuration>

0
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml

206
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml

@ -0,0 +1,206 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>spring.datasource.initialSize</name>
<value>5</value>
<description>
Init connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minIdle</name>
<value>5</value>
<description>
Min connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxActive</name>
<value>50</value>
<description>
Max connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxWait</name>
<value>60000</value>
<description>
Max wait time for get a connection in milliseconds.
If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases.
If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenEvictionRunsMillis</name>
<value>60000</value>
<description>
Milliseconds for check to close free connections
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenConnectErrorMillis</name>
<value>60000</value>
<description>
The Destroy thread detects the connection interval and closes the physical connection in milliseconds
if the connection idle time is greater than or equal to minEvictableIdleTimeMillis.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minEvictableIdleTimeMillis</name>
<value>300000</value>
<description>
The longest time a connection remains idle without being evicted, in milliseconds
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQuery</name>
<value>SELECT 1</value>
<description>
The SQL used to check whether the connection is valid requires a query statement.
If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work.
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQueryTimeout</name>
<value>3</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
Check whether the connection is valid for timeout, in seconds
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testWhileIdle</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
When applying for a connection,
if it is detected that the connection is idle longer than time Between Eviction Runs Millis,
validation Query is performed to check whether the connection is valid
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnBorrow</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when applying for a connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnReturn</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when the connection is returned
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.defaultAutoCommit</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.keepAlive</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.poolPreparedStatements</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Open PSCache, specify count PSCache for every connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxPoolPreparedStatementPerConnectionSize</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.spring.datasource.filters</name>
<value>stat,wall,log4j</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.connectionProperties</name>
<value>druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

0
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml

88
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml

@ -0,0 +1,88 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>master.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master execute thread num</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.exec.task.num</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master execute task number in parallel</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master heartbeat interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.retryTimes</name>
<value>5</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master commit task retry times</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.interval</name>
<value>1000</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master commit task interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>only less than cpu avg load, master server can work. default value : the number of cpu cores * 2</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.reserved.memory</name>
<value>0.3</value>
<description>only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G.</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.listen.port</name>
<value>5678</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master listen port</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

23
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml

@ -106,26 +106,21 @@
<description></description> <description></description>
</property> </property>
<property> <property>
<name>org.quartz.jobStore.dataSource</name> <name>org.quartz.jobStore.acquireTriggersWithinLock</name>
<value>myDs</value> <value>true</value>
<description></description> <value-attributes>
</property> <type>boolean</type>
<property> </value-attributes>
<name>org.quartz.dataSource.myDs.connectionProvider.class</name>
<value>org.apache.dolphinscheduler.server.quartz.DruidConnectionProvider</value>
<description></description> <description></description>
</property> </property>
<property> <property>
<name>org.quartz.dataSource.myDs.maxConnections</name> <name>org.quartz.jobStore.dataSource</name>
<value>10</value> <value>myDs</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description> <description></description>
</property> </property>
<property> <property>
<name>org.quartz.dataSource.myDs.validationQuery</name> <name>org.quartz.dataSource.myDs.connectionProvider.class</name>
<value>select 1</value> <value>org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider</value>
<description></description> <description></description>
</property> </property>
</configuration> </configuration>

67
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml

@ -0,0 +1,67 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>worker.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker execute thread num</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker heartbeat interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.reserved.memory</name>
<value>0.3</value>
<description>only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G.</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.listen.port</name>
<value>1234</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker listen port</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.groups</name>
<value>default</value>
<description>default worker group</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

76
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml

@ -0,0 +1,76 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>zookeeper.dolphinscheduler.root</name>
<value>/dolphinscheduler</value>
<description>
dolphinscheduler root directory
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.session.timeout</name>
<value>300</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.connection.timeout</name>
<value>300</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.retry.base.sleep</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.retry.max.sleep</name>
<value>30000</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.retry.maxtime</name>
<value>5</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

4
ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml

@ -22,7 +22,7 @@
<name>DOLPHIN</name> <name>DOLPHIN</name>
<displayName>Dolphin Scheduler</displayName> <displayName>Dolphin Scheduler</displayName>
<comment>分布式易扩展的可视化DAG工作流任务调度系统</comment> <comment>分布式易扩展的可视化DAG工作流任务调度系统</comment>
<version>1.2.1</version> <version>1.3.3</version>
<components> <components>
<component> <component>
<name>DOLPHIN_MASTER</name> <name>DOLPHIN_MASTER</name>
@ -103,7 +103,7 @@
<osFamily>any</osFamily> <osFamily>any</osFamily>
<packages> <packages>
<package> <package>
<name>apache-dolphinscheduler-incubating-1.2.1*</name> <name>apache-dolphinscheduler-incubating*</name>
</package> </package>
</packages> </packages>
</osSpecific> </osSpecific>

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py

@ -26,7 +26,8 @@ class DolphinAlertService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py

@ -26,7 +26,8 @@ class DolphinApiService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

46
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py

@ -42,31 +42,12 @@ def dolphin_env():
create_parents=True create_parents=True
) )
Directory(params.dolphin_alert_map['xls.file.path'],
mode=0777,
owner=params.dolphin_user,
group=params.dolphin_group,
create_parents=True
)
Directory(params.dolphin_common_map['data.basedir.path'], Directory(params.dolphin_common_map['data.basedir.path'],
mode=0777, mode=0777,
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group, group=params.dolphin_group,
create_parents=True create_parents=True
) )
Directory(params.dolphin_common_map['data.download.basedir.path'],
mode=0777,
owner=params.dolphin_user,
group=params.dolphin_group,
create_parents=True
)
Directory(params.dolphin_common_map['process.exec.basepath'],
mode=0777,
owner=params.dolphin_user,
group=params.dolphin_group,
create_parents=True
)
File(format(params.dolphin_env_path), File(format(params.dolphin_env_path),
@ -79,7 +60,21 @@ def dolphin_env():
File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"),
mode=0755, mode=0755,
content=Template("dolphin-daemon.j2"), content=Template("dolphin-daemon.sh.j2"),
owner=params.dolphin_user,
group=params.dolphin_group
)
File(format(params.dolphin_conf_dir + "/master.properties"),
mode=0755,
content=Template("master.properties.j2"),
owner=params.dolphin_user,
group=params.dolphin_group
)
File(format(params.dolphin_conf_dir + "/worker.properties"),
mode=0755,
content=Template("worker.properties.j2"),
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group group=params.dolphin_group
) )
@ -92,9 +87,9 @@ def dolphin_env():
group=params.dolphin_group group=params.dolphin_group
) )
File(format(params.dolphin_conf_dir + "/application.properties"), File(format(params.dolphin_conf_dir + "/datasource.properties"),
mode=0755, mode=0755,
content=Template("application.properties.j2"), content=Template("datasource.properties.j2"),
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group group=params.dolphin_group
) )
@ -119,3 +114,10 @@ def dolphin_env():
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group group=params.dolphin_group
) )
File(format(params.dolphin_conf_dir + "/zookeeper.properties"),
mode=0755,
content=Template("zookeeper.properties.j2"),
owner=params.dolphin_user,
group=params.dolphin_group
)

4
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py

@ -26,8 +26,8 @@ class DolphinLoggerService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params
params.pika_slave = True params.pika_slave = True

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py

@ -27,7 +27,8 @@ class DolphinMasterService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py

@ -26,7 +26,8 @@ class DolphinWorkerService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

85
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py

@ -54,11 +54,8 @@ dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content']
# database config # database config
dolphin_database_config = {} dolphin_database_config = {}
dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type']
dolphin_database_config['dolphin_database_host'] = dolphin_env_map['dolphin.database.host']
dolphin_database_config['dolphin_database_port'] = dolphin_env_map['dolphin.database.port']
dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username']
dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password']
if 'mysql' == dolphin_database_config['dolphin_database_type']: if 'mysql' == dolphin_database_config['dolphin_database_type']:
dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver'
dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate'
@ -72,6 +69,10 @@ else:
+ ':' + dolphin_env_map['dolphin.database.port'] \ + ':' + dolphin_env_map['dolphin.database.port'] \
+ '/dolphinscheduler' + '/dolphinscheduler'
# application-alert.properties # application-alert.properties
dolphin_alert_map = {} dolphin_alert_map = {}
wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token'
@ -79,27 +80,22 @@ wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&
wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}' wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}'
wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}' wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}'
dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url dolphin_alert_config_map = config['configurations']['dolphin-alert']
dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url
dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg if dolphin_alert_config_map['enterprise.wechat.enable']:
dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url
dolphin_alert_map.update(config['configurations']['dolphin-alert']) dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url
dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg
dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg
dolphin_alert_map.update(dolphin_alert_config_map)
# application-api.properties # application-api.properties
dolphin_app_api_map = {} dolphin_app_api_map = {}
dolphin_app_api_map['logging.config'] = 'classpath:apiserver_logback.xml'
dolphin_app_api_map['spring.messages.basename'] = 'i18n/messages'
dolphin_app_api_map['server.servlet.context-path'] = '/dolphinscheduler/'
dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) dolphin_app_api_map.update(config['configurations']['dolphin-application-api'])
# application-dao.properties
dolphin_application_map = {}
dolphin_application_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource'
dolphin_application_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver']
dolphin_application_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url']
dolphin_application_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username']
dolphin_application_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password']
dolphin_application_map.update(config['configurations']['dolphin-application'])
# common.properties # common.properties
dolphin_common_map = {} dolphin_common_map = {}
@ -118,33 +114,42 @@ else:
dolphin_common_map_tmp = config['configurations']['dolphin-common'] dolphin_common_map_tmp = config['configurations']['dolphin-common']
data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] data_basedir_path = dolphin_common_map_tmp['data.basedir.path']
process_exec_basepath = data_basedir_path + '/exec'
data_download_basedir_path = data_basedir_path + '/download'
dolphin_common_map['process.exec.basepath'] = process_exec_basepath
dolphin_common_map['data.download.basedir.path'] = data_download_basedir_path
dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path
dolphin_common_map.update(config['configurations']['dolphin-common'])
zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) # datasource.properties
if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: dolphin_datasource_map = {}
clientPort = config['configurations']['zoo.cfg']['clientPort'] dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource'
zookeeperPort = ":" + clientPort + "," dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver']
dolphin_common_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url']
dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username']
dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password']
dolphin_datasource_map.update(config['configurations']['dolphin-datasource'])
dolphin_common_map.update(config['configurations']['dolphin-common']) # master.properties
dolphin_master_map = config['configurations']['dolphin-master']
# quartz.properties # quartz.properties
dolphin_quartz_map = {} dolphin_quartz_map = {}
dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass']
dolphin_quartz_map['org.quartz.dataSource.myDs.driver'] = dolphin_database_config['dolphin_database_driver']
dolphin_quartz_map['org.quartz.dataSource.myDs.URL'] = dolphin_database_config['dolphin_database_url']
dolphin_quartz_map['org.quartz.dataSource.myDs.user'] = dolphin_database_config['dolphin_database_username']
dolphin_quartz_map['org.quartz.dataSource.myDs.password'] = dolphin_database_config['dolphin_database_password']
dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) dolphin_quartz_map.update(config['configurations']['dolphin-quartz'])
# if 'ganglia_server_host' in config['clusterHostInfo'] and \ # worker.properties
# len(config['clusterHostInfo']['ganglia_server_host'])>0: dolphin_worker_map = config['configurations']['dolphin-worker']
# ganglia_installed = True
# ganglia_server = config['clusterHostInfo']['ganglia_server_host'][0] # zookeeper.properties
# ganglia_report_interval = 60 dolphin_zookeeper_map={}
# else: zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", [])
# ganglia_installed = False if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']:
clientPort = config['configurations']['zoo.cfg']['clientPort']
zookeeperPort = ":" + clientPort + ","
dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort
dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper'])
if 'spring.servlet.multipart.max-file-size' in dolphin_app_api_map:
file_size = dolphin_app_api_map['spring.servlet.multipart.max-file-size']
dolphin_app_api_map['spring.servlet.multipart.max-file-size'] = file_size + "MB"
if 'spring.servlet.multipart.max-request-size' in dolphin_app_api_map:
request_size = dolphin_app_api_map['spring.servlet.multipart.max-request-size']
dolphin_app_api_map['spring.servlet.multipart.max-request-size'] = request_size + "MB"

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_datasource_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

13
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2

@ -48,22 +48,19 @@ pid={{dolphin_pidfile_dir}}/$command.pid
cd $DOLPHINSCHEDULER_HOME cd $DOLPHINSCHEDULER_HOME
if [ "$command" = "api-server" ]; then if [ "$command" = "api-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/apiserver_logback.xml -Dspring.profiles.active=api" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api"
CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer
elif [ "$command" = "master-server" ]; then elif [ "$command" = "master-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/master_logback.xml -Ddruid.mysql.usePingMethod=false" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false"
CLASS=org.apache.dolphinscheduler.server.master.MasterServer CLASS=org.apache.dolphinscheduler.server.master.MasterServer
elif [ "$command" = "worker-server" ]; then elif [ "$command" = "worker-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/worker_logback.xml -Ddruid.mysql.usePingMethod=false" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false"
CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer
elif [ "$command" = "alert-server" ]; then elif [ "$command" = "alert-server" ]; then
LOG_FILE="-Dlogback.configurationFile={{dolphin_conf_dir}}/alert_logback.xml" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml"
CLASS=org.apache.dolphinscheduler.alert.AlertServer CLASS=org.apache.dolphinscheduler.alert.AlertServer
elif [ "$command" = "logger-server" ]; then elif [ "$command" = "logger-server" ]; then
CLASS=org.apache.dolphinscheduler.server.rpc.LoggerServer CLASS=org.apache.dolphinscheduler.server.log.LoggerServer
elif [ "$command" = "combined-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/combined_logback.xml -Dspring.profiles.active=api -Dserver.is-combined-server=true"
CLASS=org.apache.dolphinscheduler.api.CombinedApplicationServer
else else
echo "Error: No command named \`$command' was found." echo "Error: No command named \`$command' was found."
exit 1 exit 1

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_master_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_worker_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_zookeeper_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

0
ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json → ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json

84
ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json → ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json

@ -151,18 +151,40 @@
"subsection-name": "env-row1-col2" "subsection-name": "env-row1-col2"
}, },
{ {
"config": "dolphin-common/res.upload.startup.type", "config": "dolphin-common/resource.storage.type",
"subsection-name": "dynamic-row1-col1" "subsection-name": "dynamic-row1-col1"
}, },
{
"config": "dolphin-common/resource.upload.path",
"subsection-name": "dynamic-row1-col1",
"depends-on": [
{
"configs":[
"dolphin-common/resource.storage.type"
],
"if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3",
"then": {
"property_value_attributes": {
"visible": true
}
},
"else": {
"property_value_attributes": {
"visible": false
}
}
}
]
},
{ {
"config": "dolphin-common/hdfs.root.user", "config": "dolphin-common/hdfs.root.user",
"subsection-name": "dynamic-row1-col1", "subsection-name": "dynamic-row1-col1",
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === HDFS", "if": "${dolphin-common/resource.storage.type} === HDFS",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -182,9 +204,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === HDFS", "if": "${dolphin-common/resource.storage.type} === HDFS",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -204,9 +226,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === HDFS", "if": "${dolphin-common/resource.storage.type} === HDFS",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -226,9 +248,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === S3", "if": "${dolphin-common/resource.storage.type} === S3",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -248,9 +270,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === S3", "if": "${dolphin-common/resource.storage.type} === S3",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -270,9 +292,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === S3", "if": "${dolphin-common/resource.storage.type} === S3",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -356,6 +378,28 @@
} }
] ]
}, },
{
"config": "dolphin-common/kerberos.expire.time",
"subsection-name": "dynamic-row1-col2",
"depends-on": [
{
"configs":[
"dolphin-common/hadoop.security.authentication.startup.state"
],
"if": "${dolphin-common/hadoop.security.authentication.startup.state}",
"then": {
"property_value_attributes": {
"visible": true
}
},
"else": {
"property_value_attributes": {
"visible": false
}
}
}
]
},
{ {
"config": "dolphin-alert/enterprise.wechat.enable", "config": "dolphin-alert/enterprise.wechat.enable",
"subsection-name": "dynamic-row1-col3" "subsection-name": "dynamic-row1-col3"
@ -505,11 +549,17 @@
} }
}, },
{ {
"config": "dolphin-common/res.upload.startup.type", "config": "dolphin-common/resource.storage.type",
"widget": { "widget": {
"type": "combo" "type": "combo"
} }
}, },
{
"config": "dolphin-common/resource.upload.path",
"widget": {
"type": "text-field"
}
},
{ {
"config": "dolphin-common/hdfs.root.user", "config": "dolphin-common/hdfs.root.user",
"widget": { "widget": {
@ -570,6 +620,12 @@
"type": "text-field" "type": "text-field"
} }
}, },
{
"config": "dolphin-common/kerberos.expire.time",
"widget": {
"type": "text-field"
}
},
{ {
"config": "dolphin-alert/enterprise.wechat.enable", "config": "dolphin-alert/enterprise.wechat.enable",
"widget": { "widget": {

13
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 → docker/kubernetes/dolphinscheduler/requirements.yaml

@ -14,7 +14,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# #
dependencies:
{% for key, value in dolphin_application_map.iteritems() -%} - name: postgresql
{{key}}={{value}} version: 8.x.x
{% endfor %} repository: https://charts.bitnami.com/bitnami
condition: postgresql.enabled
- name: zookeeper
version: 5.x.x
repository: https://charts.bitnami.com/bitnami
condition: redis.enabled

2
dolphinscheduler-alert/pom.xml

@ -21,7 +21,7 @@
<parent> <parent>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>dolphinscheduler-alert</artifactId> <artifactId>dolphinscheduler-alert</artifactId>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>

4
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java

@ -74,7 +74,7 @@ public class EmailAlertPluginTest {
alertInfo.setAlertData(alertData); alertInfo.setAlertData(alertData);
List<String> list = new ArrayList<String>(){{ add("xx@xx.com"); }}; List<String> list = new ArrayList<String>(){{ add("xx@xx.com"); }};
alertInfo.addProp("receivers", list); alertInfo.addProp("receivers", list);
Map<String, Object> ret = plugin.process(alertInfo); // Map<String, Object> ret = plugin.process(alertInfo);
assertFalse(Boolean.parseBoolean(String.valueOf(ret.get(Constants.STATUS)))); // assertFalse(Boolean.parseBoolean(String.valueOf(ret.get(Constants.STATUS))));
} }
} }

2
dolphinscheduler-api/pom.xml

@ -21,7 +21,7 @@
<parent> <parent>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>dolphinscheduler-api</artifactId> <artifactId>dolphinscheduler-api</artifactId>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java

@ -86,7 +86,7 @@ public class AccessTokenController extends BaseController {
logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(),
userId, expireTime, token); userId, expireTime, token);
Map<String, Object> result = accessTokenService.createToken(userId, expireTime, token); Map<String, Object> result = accessTokenService.createToken(loginUser, userId, expireTime, token);
return returnDataList(result); return returnDataList(result);
} }
@ -106,7 +106,7 @@ public class AccessTokenController extends BaseController {
@RequestParam(value = "userId") int userId, @RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime) { @RequestParam(value = "expireTime") String expireTime) {
logger.info("login user {}, generate token , userId : {} , token expire time : {}", loginUser, userId, expireTime); logger.info("login user {}, generate token , userId : {} , token expire time : {}", loginUser, userId, expireTime);
Map<String, Object> result = accessTokenService.generateToken(userId, expireTime); Map<String, Object> result = accessTokenService.generateToken(loginUser, userId, expireTime);
return returnDataList(result); return returnDataList(result);
} }
@ -185,7 +185,7 @@ public class AccessTokenController extends BaseController {
logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(),
userId, expireTime, token); userId, expireTime, token);
Map<String, Object> result = accessTokenService.updateToken(id, userId, expireTime, token); Map<String, Object> result = accessTokenService.updateToken(loginUser, id, userId, expireTime, token);
return returnDataList(result); return returnDataList(result);
} }

47
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java

@ -47,6 +47,7 @@ import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.service.UdfFuncService; import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
@ -95,6 +96,13 @@ public class ResourcesController extends BaseController {
/** /**
* *
* @param loginUser login user
* @param type type
* @param alias alias
* @param description description
* @param pid parent id
* @param currentDir current directory
* @return create result code
*/ */
@ApiOperation(value = "createDirctory", notes = "CREATE_RESOURCE_NOTES") @ApiOperation(value = "createDirctory", notes = "CREATE_RESOURCE_NOTES")
@ApiImplicitParams({ @ApiImplicitParams({
@ -159,6 +167,7 @@ public class ResourcesController extends BaseController {
* @param resourceId resource id * @param resourceId resource id
* @param type resource type * @param type resource type
* @param description description * @param description description
* @param file resource file
* @return update result code * @return update result code
*/ */
@ApiOperation(value = "updateResource", notes = "UPDATE_RESOURCE_NOTES") @ApiOperation(value = "updateResource", notes = "UPDATE_RESOURCE_NOTES")
@ -166,7 +175,8 @@ public class ResourcesController extends BaseController {
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String") @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile")
}) })
@PostMapping(value = "/update") @PostMapping(value = "/update")
@ApiException(UPDATE_RESOURCE_ERROR) @ApiException(UPDATE_RESOURCE_ERROR)
@ -174,10 +184,11 @@ public class ResourcesController extends BaseController {
@RequestParam(value = "id") int resourceId, @RequestParam(value = "id") int resourceId,
@RequestParam(value = "type") ResourceType type, @RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias, @RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description) { @RequestParam(value = "description", required = false) String description,
logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", @RequestParam(value = "file" ,required = false) MultipartFile file) {
loginUser.getUserName(), type, alias, description); logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}, file: {}",
return resourceService.updateResource(loginUser, resourceId, alias, description, type); loginUser.getUserName(), type, alias, description, file);
return resourceService.updateResource(loginUser, resourceId, alias, description, type, file);
} }
/** /**
@ -299,7 +310,7 @@ public class ResourcesController extends BaseController {
* @param type resource type * @param type resource type
* @return resource list * @return resource list
*/ */
@ApiOperation(value = "queryResourceJarList", notes = "QUERY_RESOURCE_LIST_NOTES") @ApiOperation(value = "queryResourceByProgramType", notes = "QUERY_RESOURCE_LIST_NOTES")
@ApiImplicitParams({ @ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType") @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType")
}) })
@ -307,10 +318,14 @@ public class ResourcesController extends BaseController {
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_ERROR) @ApiException(QUERY_RESOURCES_LIST_ERROR)
public Result queryResourceJarList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result queryResourceJarList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type @RequestParam(value = "type") ResourceType type,
@RequestParam(value = "programType",required = false) ProgramType programType
) { ) {
logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); String programTypeName = programType == null ? "" : programType.name();
Map<String, Object> result = resourceService.queryResourceJarList(loginUser, type); String userName = loginUser.getUserName();
userName = userName.replaceAll("[\n|\r|\t]", "_");
logger.info("query resource list, login user:{}, resource type:{}, program type:{}", userName,programTypeName);
Map<String, Object> result = resourceService.queryResourceByProgramType(loginUser, type,programType);
return returnDataList(result); return returnDataList(result);
} }
@ -594,7 +609,7 @@ public class ResourcesController extends BaseController {
@GetMapping(value = "/udf-func/list-paging") @GetMapping(value = "/udf-func/list-paging")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR) @ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR)
public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result<Object> queryUdfFuncListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo, @RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize @RequestParam("pageSize") Integer pageSize
@ -611,23 +626,25 @@ public class ResourcesController extends BaseController {
} }
/** /**
* query resource list by type * query udf func list by type
* *
* @param loginUser login user * @param loginUser login user
* @param type resource type * @param type resource type
* @return resource list * @return resource list
*/ */
@ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES") @ApiOperation(value = "queryUdfFuncList", notes = "QUERY_UDF_FUNC_LIST_NOTES")
@ApiImplicitParams({ @ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType") @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType")
}) })
@GetMapping(value = "/udf-func/list") @GetMapping(value = "/udf-func/list")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR) @ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR)
public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result<Object> queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("type") UdfType type) { @RequestParam("type") UdfType type) {
logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type); String userName = loginUser.getUserName();
Map<String, Object> result = udfFuncService.queryResourceList(loginUser, type.ordinal()); userName = userName.replaceAll("[\n|\r|\t]", "_");
logger.info("query udf func list, user:{}, type:{}", userName, type);
Map<String, Object> result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal());
return returnDataList(result); return returnDataList(result);
} }

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -166,7 +166,7 @@ public class UsersController extends BaseController {
@RequestParam(value = "state", required = false) int state) throws Exception { @RequestParam(value = "state", required = false) int state) throws Exception {
logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}, state: {}", logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}, state: {}",
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue, state); loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue, state);
Map<String, Object> result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue, state); Map<String, Object> result = usersService.updateUser(loginUser, id, userName, userPassword, email, tenantId, phone, queue, state);
return returnDataList(result); return returnDataList(result);
} }

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java

@ -44,7 +44,8 @@ public interface AccessTokenService {
* @param token token string * @param token token string
* @return create result code * @return create result code
*/ */
Map<String, Object> createToken(int userId, String expireTime, String token); Map<String, Object> createToken(User loginUser, int userId, String expireTime, String token);
/** /**
* generate token * generate token
@ -53,7 +54,7 @@ public interface AccessTokenService {
* @param expireTime token expire time * @param expireTime token expire time
* @return token string * @return token string
*/ */
Map<String, Object> generateToken(int userId, String expireTime); Map<String, Object> generateToken(User loginUser, int userId, String expireTime);
/** /**
* delete access token * delete access token
@ -73,5 +74,5 @@ public interface AccessTokenService {
* @param token token string * @param token token string
* @return update result code * @return update result code
*/ */
Map<String, Object> updateToken(int id, int userId, String expireTime, String token); Map<String, Object> updateToken(User loginUser, int id, int userId, String expireTime, String token);
} }

19
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java

@ -95,6 +95,25 @@ public class BaseService {
} }
/**
* check
*
* @param result result
* @param bool bool
* @param userNoOperationPerm status
* @return check result
*/
protected boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) {
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/** /**
* get cookie info by name * get cookie info by name
* *

165
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java

@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.entity.*;
@ -87,7 +88,7 @@ public class ResourcesService extends BaseService {
* @param currentDir current directory * @param currentDir current directory
* @return create directory result * @return create directory result
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = Exception.class)
public Result createDirectory(User loginUser, public Result createDirectory(User loginUser,
String name, String name,
String description, String description,
@ -101,8 +102,11 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.HDFS_NOT_STARTUP); putMsg(result, Status.HDFS_NOT_STARTUP);
return result; return result;
} }
String fullName = "/".equals(currentDir) ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name);
result = verifyResourceName(fullName,type,loginUser);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
if (pid != -1) { if (pid != -1) {
Resource parentResource = resourcesMapper.selectById(pid); Resource parentResource = resourcesMapper.selectById(pid);
@ -165,7 +169,7 @@ public class ResourcesService extends BaseService {
* @param currentDir current directory * @param currentDir current directory
* @return create result code * @return create result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = Exception.class)
public Result createResource(User loginUser, public Result createResource(User loginUser,
String name, String name,
String desc, String desc,
@ -230,7 +234,7 @@ public class ResourcesService extends BaseService {
} }
// check resoure name exists // check resoure name exists
String fullName = "/".equals(currentDir) ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name);
if (checkResourceExists(fullName, 0, type.ordinal())) { if (checkResourceExists(fullName, 0, type.ordinal())) {
logger.error("resource {} has exist, can't recreate", name); logger.error("resource {} has exist, can't recreate", name);
putMsg(result, Status.RESOURCE_EXIST); putMsg(result, Status.RESOURCE_EXIST);
@ -288,14 +292,16 @@ public class ResourcesService extends BaseService {
* @param name name * @param name name
* @param desc description * @param desc description
* @param type resource type * @param type resource type
* @param file resource file
* @return update result code * @return update result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = Exception.class)
public Result updateResource(User loginUser, public Result updateResource(User loginUser,
int resourceId, int resourceId,
String name, String name,
String desc, String desc,
ResourceType type) { ResourceType type,
MultipartFile file) {
Result result = new Result(); Result result = new Result();
// if resource upload startup // if resource upload startup
@ -315,7 +321,7 @@ public class ResourcesService extends BaseService {
return result; return result;
} }
if (name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
return result; return result;
} }
@ -331,6 +337,42 @@ public class ResourcesService extends BaseService {
return result; return result;
} }
if (file != null) {
// file is empty
if (file.isEmpty()) {
logger.error("file is empty: {}", file.getOriginalFilename());
putMsg(result, Status.RESOURCE_FILE_IS_EMPTY);
return result;
}
// file suffix
String fileSuffix = FileUtils.suffix(file.getOriginalFilename());
String nameSuffix = FileUtils.suffix(name);
// determine file suffix
if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
/**
* rename file suffix and original suffix must be consistent
*/
logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename());
putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE);
return result;
}
//If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar
if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(FileUtils.suffix(originFullName))) {
logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg());
putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR);
return result;
}
if (file.getSize() > Constants.MAX_FILE_SIZE) {
logger.error("file size is too large: {}", file.getOriginalFilename());
putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT);
return result;
}
}
// query tenant by user id // query tenant by user id
String tenantCode = getTenantCode(resource.getUserId(),result); String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)){ if (StringUtils.isEmpty(tenantCode)){
@ -380,31 +422,61 @@ public class ResourcesService extends BaseService {
} }
// updateResource data // updateResource data
List<Integer> childrenResource = listAllChildren(resource,false);
Date now = new Date(); Date now = new Date();
resource.setAlias(name); resource.setAlias(name);
resource.setFullName(fullName); resource.setFullName(fullName);
resource.setDescription(desc); resource.setDescription(desc);
resource.setUpdateTime(now); resource.setUpdateTime(now);
if (file != null) {
resource.setFileName(file.getOriginalFilename());
resource.setSize(file.getSize());
}
try { try {
resourcesMapper.updateById(resource); resourcesMapper.updateById(resource);
if (resource.isDirectory() && CollectionUtils.isNotEmpty(childrenResource)) { if (resource.isDirectory()) {
List<Integer> childrenResource = listAllChildren(resource,false);
if (CollectionUtils.isNotEmpty(childrenResource)) {
String matcherFullName = Matcher.quoteReplacement(fullName); String matcherFullName = Matcher.quoteReplacement(fullName);
List<Resource> childResourceList = new ArrayList<>(); List<Resource> childResourceList = new ArrayList<>();
List<Resource> resourceList = resourcesMapper.listResourceByIds(childrenResource.toArray(new Integer[childrenResource.size()])); Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]);
List<Resource> resourceList = resourcesMapper.listResourceByIds(childResIdArray);
childResourceList = resourceList.stream().map(t -> { childResourceList = resourceList.stream().map(t -> {
t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName)); t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now); t.setUpdateTime(now);
return t; return t;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
resourcesMapper.batchUpdateResource(childResourceList); resourcesMapper.batchUpdateResource(childResourceList);
if (ResourceType.UDF.equals(resource.getType())) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
udfFuncs = udfFuncs.stream().map(t -> {
t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
udfFunctionMapper.batchUpdateUdfFunc(udfFuncs);
}
}
}
} else if (ResourceType.UDF.equals(resource.getType())) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId});
if (CollectionUtils.isNotEmpty(udfFuncs)) {
udfFuncs = udfFuncs.stream().map(t -> {
t.setResourceName(fullName);
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
udfFunctionMapper.batchUpdateUdfFunc(udfFuncs);
}
} }
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource); Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>(); Map<String, Object> resultMap = new HashMap<>(5);
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue()); resultMap.put(entry.getKey().toString(), entry.getValue());
@ -415,11 +487,31 @@ public class ResourcesService extends BaseService {
logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e);
throw new ServiceException(Status.UPDATE_RESOURCE_ERROR); throw new ServiceException(Status.UPDATE_RESOURCE_ERROR);
} }
// if name unchanged, return directly without moving on HDFS // if name unchanged, return directly without moving on HDFS
if (originResourceName.equals(name)) { if (originResourceName.equals(name) && file == null) {
return result;
}
if (file != null) {
// fail upload
if (!upload(loginUser, fullName, file, type)) {
logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename());
putMsg(result, Status.HDFS_OPERATION_ERROR);
throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename()));
}
if (!fullName.equals(originFullName)) {
try {
HadoopUtils.getInstance().delete(originHdfsFileName,false);
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new RuntimeException(String.format("delete resource: %s failed.", originFullName));
}
}
return result; return result;
} }
// get the path of dest file in hdfs // get the path of dest file in hdfs
String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName); String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName);
@ -449,7 +541,7 @@ public class ResourcesService extends BaseService {
*/ */
public Map<String, Object> queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) {
HashMap<String, Object> result = new HashMap<>(); HashMap<String, Object> result = new HashMap<>(5);
Page<Resource> page = new Page(pageNo, pageSize); Page<Resource> page = new Page(pageNo, pageSize);
int userId = loginUser.getId(); int userId = loginUser.getId();
if (isAdmin(loginUser)) { if (isAdmin(loginUser)) {
@ -550,7 +642,7 @@ public class ResourcesService extends BaseService {
*/ */
public Map<String, Object> queryResourceList(User loginUser, ResourceType type) { public Map<String, Object> queryResourceList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
int userId = loginUser.getId(); int userId = loginUser.getId();
if(isAdmin(loginUser)){ if(isAdmin(loginUser)){
@ -565,21 +657,33 @@ public class ResourcesService extends BaseService {
} }
/** /**
* query resource list * query resource list by program type
* *
* @param loginUser login user * @param loginUser login user
* @param type resource type * @param type resource type
* @return resource list * @return resource list
*/ */
public Map<String, Object> queryResourceJarList(User loginUser, ResourceType type) { public Map<String, Object> queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
String suffix = ".jar";
int userId = loginUser.getId(); int userId = loginUser.getId();
if(isAdmin(loginUser)){ if(isAdmin(loginUser)){
userId = 0; userId = 0;
} }
if (programType != null) {
switch (programType) {
case JAVA:
break;
case SCALA:
break;
case PYTHON:
suffix = ".py";
break;
}
}
List<Resource> allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); List<Resource> allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0);
List<Resource> resources = new ResourceFilter(".jar",new ArrayList<>(allResourceList)).filter(); List<Resource> resources = new ResourceFilter(suffix,new ArrayList<>(allResourceList)).filter();
Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources); Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources);
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
putMsg(result,Status.SUCCESS); putMsg(result,Status.SUCCESS);
@ -829,7 +933,7 @@ public class ResourcesService extends BaseService {
* @param content content * @param content content
* @return create result code * @return create result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = Exception.class)
public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory) { public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory) {
Result result = new Result(); Result result = new Result();
// if resource upload startup // if resource upload startup
@ -852,12 +956,25 @@ public class ResourcesService extends BaseService {
} }
String name = fileName.trim() + "." + nameSuffix; String name = fileName.trim() + "." + nameSuffix;
String fullName = "/".equals(currentDirectory) ? String.format("%s%s",currentDirectory,name):String.format("%s/%s",currentDirectory,name); String fullName = currentDirectory.equals("/") ? String.format("%s%s",currentDirectory,name):String.format("%s/%s",currentDirectory,name);
result = verifyResourceName(fullName,type,loginUser); result = verifyResourceName(fullName,type,loginUser);
if (!result.getCode().equals(Status.SUCCESS.getCode())) { if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result; return result;
} }
if (pid != -1) {
Resource parentResource = resourcesMapper.selectById(pid);
if (parentResource == null) {
putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, parentResource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
}
// save data // save data
Date now = new Date(); Date now = new Date();
@ -891,7 +1008,7 @@ public class ResourcesService extends BaseService {
* @param content content * @param content content
* @return update result cod * @return update result cod
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = Exception.class)
public Result updateResourceContent(int resourceId, String content) { public Result updateResourceContent(int resourceId, String content) {
Result result = new Result(); Result result = new Result();
@ -1096,7 +1213,7 @@ public class ResourcesService extends BaseService {
* @return unauthorized result code * @return unauthorized result code
*/ */
public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) { public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (checkAdmin(loginUser, result)) { if (checkAdmin(loginUser, result)) {
return result; return result;
@ -1148,7 +1265,7 @@ public class ResourcesService extends BaseService {
* @return authorized result * @return authorized result
*/ */
public Map<String, Object> authorizedFile(User loginUser, Integer userId) { public Map<String, Object> authorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)){ if (checkAdmin(loginUser, result)){
return result; return result;
} }

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java

@ -148,7 +148,7 @@ public class UdfFuncService extends BaseService{
*/ */
public Map<String, Object> queryUdfFuncDetail(int id) { public Map<String, Object> queryUdfFuncDetail(int id) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
UdfFunc udfFunc = udfFuncMapper.selectById(id); UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) { if (udfFunc == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST); putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -244,7 +244,7 @@ public class UdfFuncService extends BaseService{
* @return udf function list page * @return udf function list page
*/ */
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
@ -276,15 +276,19 @@ public class UdfFuncService extends BaseService{
} }
/** /**
* query data resource by type * query udf list
* *
* @param loginUser login user * @param loginUser login user
* @param type resource type * @param type udf type
* @return resource list * @return udf func list
*/ */
public Map<String, Object> queryResourceList(User loginUser, Integer type) { public Map<String, Object> queryUdfFuncList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type); int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type);
result.put(Constants.DATA_LIST, udfFuncList); result.put(Constants.DATA_LIST, udfFuncList);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);

51
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java

@ -103,7 +103,7 @@ public class UsersService extends BaseService {
String queue, String queue,
int state) throws Exception { int state) throws Exception {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//check all user params //check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone); String msg = this.checkUserParams(userName, userPassword, email, phone);
@ -231,7 +231,7 @@ public class UsersService extends BaseService {
* @return user list page * @return user list page
*/ */
public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result; return result;
@ -253,6 +253,8 @@ public class UsersService extends BaseService {
/** /**
* updateProcessInstance user * updateProcessInstance user
* *
*
* @param loginUser
* @param userId user id * @param userId user id
* @param userName user name * @param userName user name
* @param userPassword user password * @param userPassword user password
@ -263,7 +265,7 @@ public class UsersService extends BaseService {
* @return update result code * @return update result code
* @throws Exception exception * @throws Exception exception
*/ */
public Map<String, Object> updateUser(int userId, public Map<String, Object> updateUser(User loginUser, int userId,
String userName, String userName,
String userPassword, String userPassword,
String email, String email,
@ -271,16 +273,17 @@ public class UsersService extends BaseService {
String phone, String phone,
String queue, String queue,
int state) throws Exception { int state) throws Exception {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId); User user = userMapper.selectById(userId);
if (user == null) { if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId); putMsg(result, Status.USER_NOT_EXIST, userId);
return result; return result;
} }
if (StringUtils.isNotEmpty(userName)) { if (StringUtils.isNotEmpty(userName)) {
if (!CheckUtils.checkUserName(userName)){ if (!CheckUtils.checkUserName(userName)){
@ -394,7 +397,7 @@ public class UsersService extends BaseService {
* @throws Exception exception when operate hdfs * @throws Exception exception when operate hdfs
*/ */
public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception { public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (!isAdmin(loginUser)) { if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id); putMsg(result, Status.USER_NO_OPERATION_PERM, id);
@ -434,7 +437,7 @@ public class UsersService extends BaseService {
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) { public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
//only admin can operate //only admin can operate
@ -484,7 +487,7 @@ public class UsersService extends BaseService {
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) { public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result; return result;
@ -581,7 +584,7 @@ public class UsersService extends BaseService {
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) { public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
@ -628,7 +631,7 @@ public class UsersService extends BaseService {
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) { public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
//only admin can operate //only admin can operate
@ -708,7 +711,7 @@ public class UsersService extends BaseService {
* @return user list * @return user list
*/ */
public Map<String, Object> queryAllGeneralUsers(User loginUser) { public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result; return result;
@ -729,7 +732,7 @@ public class UsersService extends BaseService {
* @return user list * @return user list
*/ */
public Map<String, Object> queryUserList(User loginUser) { public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result; return result;
@ -773,7 +776,7 @@ public class UsersService extends BaseService {
*/ */
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) { public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result; return result;
@ -809,7 +812,7 @@ public class UsersService extends BaseService {
* @return authorized result code * @return authorized result code
*/ */
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) { public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
//only admin can operate //only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result; return result;
@ -821,24 +824,6 @@ public class UsersService extends BaseService {
return result; return result;
} }
/**
* check
*
* @param result result
* @param bool bool
* @param userNoOperationPerm status
* @return check result
*/
private boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) {
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/** /**
* @param tenantId tenant id * @param tenantId tenant id
* @return true if tenant exists, otherwise return false * @return true if tenant exists, otherwise return false

24
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java

@ -86,9 +86,14 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @param token token string * @param token token string
* @return create result code * @return create result code
*/ */
public Map<String, Object> createToken(int userId, String expireTime, String token) { public Map<String, Object> createToken(User loginUser, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (userId <= 0) { if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0."); throw new IllegalArgumentException("User id should not less than or equals to 0.");
} }
@ -118,8 +123,12 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @param expireTime token expire time * @param expireTime token expire time
* @return token string * @return token string
*/ */
public Map<String, Object> generateToken(int userId, String expireTime) { public Map<String, Object> generateToken(User loginUser, int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
String token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis()); String token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis());
result.put(Constants.DATA_LIST, token); result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -144,8 +153,8 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
return result; return result;
} }
if (loginUser.getId() != accessToken.getUserId() &&
loginUser.getUserType() != UserType.ADMIN_USER) { if (!hasPerm(loginUser,accessToken.getUserId())){
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
} }
@ -164,9 +173,12 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @param token token string * @param token token string
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateToken(int id, int userId, String expireTime, String token) { public Map<String, Object> updateToken(User loginUser, int id, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
AccessToken accessToken = accessTokenMapper.selectById(id); AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) { if (accessToken == null) {
logger.error("access token not exist, access token id {}", id); logger.error("access token not exist, access token id {}", id);

1
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java

@ -130,7 +130,6 @@ public class LoggerServiceImpl implements LoggerService {
logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()));
} }
/** /**
* get host * get host
* *

113
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -49,6 +49,7 @@ import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.DateUtils;
@ -166,7 +167,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
String processDefinitionJson, String processDefinitionJson,
String desc, String desc,
String locations, String locations,
String connects) throws JsonProcessingException { String connects) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
@ -229,23 +230,40 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/** /**
* get resource ids * get resource ids
*
* @param processData process data * @param processData process data
* @return resource ids * @return resource ids
*/ */
private String getResourceIds(ProcessData processData) { private String getResourceIds(ProcessData processData) {
return Optional.ofNullable(processData.getTasks()) List<TaskNode> tasks = processData.getTasks();
.orElse(Collections.emptyList()) Set<Integer> resourceIds = new HashSet<>();
.stream() StringBuilder sb = new StringBuilder();
.map(taskNode -> TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams())) if (CollectionUtils.isEmpty(tasks)) {
.filter(Objects::nonNull) return sb.toString();
.flatMap(parameters -> parameters.getResourceFilesList().stream()) }
for (TaskNode taskNode : tasks) {
String taskParameter = taskNode.getParams();
AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter);
if (params == null) {
continue;
}
if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) {
Set<Integer> tempSet = params.getResourceFilesList().
stream()
.filter(t -> t.getId() != 0)
.map(ResourceInfo::getId) .map(ResourceInfo::getId)
.distinct() .collect(Collectors.toSet());
.map(Objects::toString) resourceIds.addAll(tempSet);
.collect(Collectors.joining(",")); }
} }
for (int i : resourceIds) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(i);
}
return sb.toString();
}
/** /**
* query process definition list * query process definition list
* *
@ -255,7 +273,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/ */
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) { public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(); HashMap<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -348,10 +366,15 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
* @param connects connects for nodes * @param connects connects for nodes
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateProcessDefinition(User loginUser, String projectName, int id, String name, public Map<String, Object> updateProcessDefinition(User loginUser,
String processDefinitionJson, String desc, String projectName,
String locations, String connects) { int id,
Map<String, Object> result = new HashMap<>(); String name,
String processDefinitionJson,
String desc,
String locations,
String connects) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -462,7 +485,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -674,6 +697,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
} }
/**
* get export process metadata string
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition));
}
/** /**
* get export process metadata string * get export process metadata string
* *
@ -758,7 +792,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
String processMetaJson = FileUtils.file2String(file); String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class); List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
@ -853,7 +887,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
processMeta.getProcessDefinitionLocations(), processMeta.getProcessDefinitionLocations(),
processMeta.getProcessDefinitionConnects()); processMeta.getProcessDefinitionConnects());
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
} catch (JsonProcessingException e) { } catch (Exception e) {
logger.error("import process meta json data: {}", e.getMessage(), e); logger.error("import process meta json data: {}", e.getMessage(), e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
} }
@ -927,7 +961,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
//recursive sub-process parameter correction map key for old process id value for new process id //recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(); Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements()) List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
@ -1215,7 +1249,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/ */
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) { public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(); HashMap<String, Object> result = new HashMap<>(5);
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId); List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId);
result.put(Constants.DATA_LIST, resourceList); result.put(Constants.DATA_LIST, resourceList);
@ -1425,7 +1459,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
Integer processId, Integer processId,
Project targetProject) throws JsonProcessingException { Project targetProject) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
ProcessDefinition processDefinition = processDefineMapper.selectById(processId); ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) { if (processDefinition == null) {
@ -1444,6 +1478,41 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
} }
/**
* copy process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return copy result code
*/
public Map<String, Object> copyProcessDefinition(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
} else {
return createProcessDefinition(
loginUser,
projectName,
processDefinition.getName() + "_copy_" + System.currentTimeMillis(),
processDefinition.getProcessDefinitionJson(),
processDefinition.getDescription(),
processDefinition.getLocations(),
processDefinition.getConnects());
}
}
/** /**
* batch copy process definition * batch copy process definition
* *

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java

@ -283,6 +283,26 @@ public class TenantServiceImpl extends BaseService implements TenantService {
return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES); return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES);
} }
/**
* query tenant list
*
* @param tenantCode tenant code
* @return tenant list
*/
public Map<String, Object> queryTenantList(String tenantCode) {
Map<String, Object> result = new HashMap<>(5);
List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode);
if (CollectionUtils.isNotEmpty(resourceList)) {
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.TENANT_NOT_EXIST);
}
return result;
}
/** /**
* query tenant list * query tenant list
* *

1
dolphinscheduler-api/src/main/resources/logback-api.xml

@ -55,7 +55,6 @@
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/>
<appender-ref ref="APILOGFILE"/> <appender-ref ref="APILOGFILE"/>
</root> </root>

16
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java

@ -81,7 +81,7 @@ public class AccessTokenServiceTest {
public void testCreateToken() { public void testCreateToken() {
when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2); when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2);
Map<String, Object> result = accessTokenService.createToken(1, getDate(), "AccessTokenServiceTest"); Map<String, Object> result = accessTokenService.createToken(getLoginUser(), 1, getDate(), "AccessTokenServiceTest");
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} }
@ -89,7 +89,7 @@ public class AccessTokenServiceTest {
@Test @Test
public void testGenerateToken() { public void testGenerateToken() {
Map<String, Object> result = accessTokenService.generateToken(Integer.MAX_VALUE, getDate()); Map<String, Object> result = accessTokenService.generateToken(getLoginUser(), Integer.MAX_VALUE,getDate());
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
String token = (String) result.get(Constants.DATA_LIST); String token = (String) result.get(Constants.DATA_LIST);
@ -121,16 +121,24 @@ public class AccessTokenServiceTest {
public void testUpdateToken() { public void testUpdateToken() {
when(accessTokenMapper.selectById(1)).thenReturn(getEntity()); when(accessTokenMapper.selectById(1)).thenReturn(getEntity());
Map<String, Object> result = accessTokenService.updateToken(1, Integer.MAX_VALUE, getDate(), "token"); Map<String, Object> result = accessTokenService.updateToken(getLoginUser(), 1,Integer.MAX_VALUE,getDate(),"token");
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
// not exist // not exist
result = accessTokenService.updateToken(2, Integer.MAX_VALUE, getDate(), "token"); result = accessTokenService.updateToken(getLoginUser(), 2,Integer.MAX_VALUE,getDate(),"token");
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS)); Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS));
} }
private User getLoginUser(){
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
return loginUser;
}
/** /**
* create entity * create entity
*/ */

10
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -1035,15 +1035,15 @@ public class ProcessDefinitionServiceTest {
taskNode5.setType("SHELL"); taskNode5.setType("SHELL");
ShellParameters shellParameters5 = new ShellParameters(); ShellParameters shellParameters5 = new ShellParameters();
ResourceInfo resourceInfo5A = new ResourceInfo(); ResourceInfo resourceInfo5A = new ResourceInfo();
resourceInfo5A.setId(0); resourceInfo5A.setId(1);
ResourceInfo resourceInfo5B = new ResourceInfo(); ResourceInfo resourceInfo5B = new ResourceInfo();
resourceInfo5B.setId(1); resourceInfo5B.setId(2);
shellParameters5.setResourceList(Arrays.asList(resourceInfo5A, resourceInfo5B)); shellParameters5.setResourceList(Arrays.asList(resourceInfo5A, resourceInfo5B));
taskNode5.setParams(JSONUtils.toJsonString(shellParameters5)); taskNode5.setParams(JSONUtils.toJsonString(shellParameters5));
input5.setTasks(Collections.singletonList(taskNode5)); input5.setTasks(Collections.singletonList(taskNode5));
String output5 = (String) testMethod.invoke(processDefinitionService, input5); String output5 = (String) testMethod.invoke(processDefinitionService, input5);
assertThat(output5.split(",")).hasSize(2) assertThat(output5.split(",")).hasSize(2)
.containsExactlyInAnyOrder("0", "1"); .containsExactlyInAnyOrder("1", "2");
// when resource id list is 0 1 1 2, then return 0,1,2 // when resource id list is 0 1 1 2, then return 0,1,2
ProcessData input6 = new ProcessData(); ProcessData input6 = new ProcessData();
@ -1051,7 +1051,7 @@ public class ProcessDefinitionServiceTest {
taskNode6.setType("SHELL"); taskNode6.setType("SHELL");
ShellParameters shellParameters6 = new ShellParameters(); ShellParameters shellParameters6 = new ShellParameters();
ResourceInfo resourceInfo6A = new ResourceInfo(); ResourceInfo resourceInfo6A = new ResourceInfo();
resourceInfo6A.setId(0); resourceInfo6A.setId(3);
ResourceInfo resourceInfo6B = new ResourceInfo(); ResourceInfo resourceInfo6B = new ResourceInfo();
resourceInfo6B.setId(1); resourceInfo6B.setId(1);
ResourceInfo resourceInfo6C = new ResourceInfo(); ResourceInfo resourceInfo6C = new ResourceInfo();
@ -1065,7 +1065,7 @@ public class ProcessDefinitionServiceTest {
String output6 = (String) testMethod.invoke(processDefinitionService, input6); String output6 = (String) testMethod.invoke(processDefinitionService, input6);
assertThat(output6.split(",")).hasSize(3) assertThat(output6.split(",")).hasSize(3)
.containsExactlyInAnyOrder("0", "1", "2"); .containsExactlyInAnyOrder("3", "1", "2");
} }
/** /**

22
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java

@ -139,6 +139,10 @@ public class ResourcesServiceTest {
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg());
//PARENT_RESOURCE_NOT_EXIST //PARENT_RESOURCE_NOT_EXIST
user.setId(1);
user.setTenantId(1);
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(resourcesMapper.selectById(Mockito.anyInt())).thenReturn(null); Mockito.when(resourcesMapper.selectById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,1,"/"); result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,1,"/");
@ -159,19 +163,19 @@ public class ResourcesServiceTest {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
User user = new User(); User user = new User();
//HDFS_NOT_STARTUP //HDFS_NOT_STARTUP
Result result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE); Result result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg());
//RESOURCE_NOT_EXIST //RESOURCE_NOT_EXIST
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource());
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
result = resourcesService.updateResource(user,0,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE); result = resourcesService.updateResource(user,0,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg());
//USER_NO_OPERATION_PERM //USER_NO_OPERATION_PERM
result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE); result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(),result.getMsg()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(),result.getMsg());
@ -186,7 +190,7 @@ public class ResourcesServiceTest {
} catch (IOException e) { } catch (IOException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
} }
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF); result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF,null);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg());
//SUCCESS //SUCCESS
@ -199,25 +203,25 @@ public class ResourcesServiceTest {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
} }
result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE); result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg());
//RESOURCE_EXIST //RESOURCE_EXIST
Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest1.jar", 0, 0)).thenReturn(getResourceList()); Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest1.jar", 0, 0)).thenReturn(getResourceList());
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.FILE); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.FILE,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg());
//USER_NOT_EXIST //USER_NOT_EXIST
Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(null); Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode());
//TENANT_NOT_EXIST //TENANT_NOT_EXIST
Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null); Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg());
@ -231,7 +235,7 @@ public class ResourcesServiceTest {
logger.error(e.getMessage(),e); logger.error(e.getMessage(),e);
} }
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF,null);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg());

8
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java

@ -149,9 +149,11 @@ public class UdfFuncServiceTest {
} }
@Test @Test
public void testQueryResourceList(){ public void testQueryUdfFuncList(){
Mockito.when(udfFuncMapper.getUdfFuncByType(1, 1)).thenReturn(getList()); User user = getLoginUser();
Map<String, Object> result = udfFuncService.queryResourceList(getLoginUser(),1); user.setUserType(UserType.GENERAL_USER);
Mockito.when(udfFuncMapper.getUdfFuncByType(user.getId(), UdfType.HIVE.ordinal())).thenReturn(getList());
Map<String, Object> result = udfFuncService.queryUdfFuncList(user,UdfType.HIVE.ordinal());
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
List<UdfFunc> udfFuncList = (List<UdfFunc>) result.get(Constants.DATA_LIST); List<UdfFunc> udfFuncList = (List<UdfFunc>) result.get(Constants.DATA_LIST);

10
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java

@ -225,13 +225,13 @@ public class UsersServiceTest {
String userPassword = "userTest0001"; String userPassword = "userTest0001";
try { try {
//user not exist //user not exist
Map<String, Object> result = usersService.updateUser(0,userName,userPassword,"3443@qq.com",1,"13457864543","queue", 1); Map<String, Object> result = usersService.updateUser(getLoginUser(), 0,userName,userPassword,"3443@qq.com",1,"13457864543","queue", 1);
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
logger.info(result.toString()); logger.info(result.toString());
//success //success
when(userMapper.selectById(1)).thenReturn(getUser()); when(userMapper.selectById(1)).thenReturn(getUser());
result = usersService.updateUser(1,userName,userPassword,"32222s@qq.com",1,"13457864543","queue", 1); result = usersService.updateUser(getLoginUser(), 1,userName,userPassword,"32222s@qq.com",1,"13457864543","queue", 1);
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) { } catch (Exception e) {
@ -357,6 +357,12 @@ public class UsersServiceTest {
} }
private User getLoginUser(){
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
return loginUser;
}
@Test @Test
public void getUserInfo(){ public void getUserInfo(){

2
dolphinscheduler-common/pom.xml

@ -21,7 +21,7 @@
<parent> <parent>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>dolphinscheduler-common</artifactId> <artifactId>dolphinscheduler-common</artifactId>
<name>dolphinscheduler-common</name> <name>dolphinscheduler-common</name>

23
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
public enum Event {
ACK,
RESULT;
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java

@ -42,6 +42,4 @@ public class ResourceInfo {
public void setRes(String res) { public void setRes(String res) {
this.res = res; this.res = res;
} }
} }

1
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.common.task.flink;
import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java

@ -216,7 +216,7 @@ public class SparkParameters extends AbstractParameters {
@Override @Override
public boolean checkParameters() { public boolean checkParameters() {
return mainJar != null && programType != null && sparkVersion != null; return mainJar != null && programType != null;
} }
@Override @Override

5
dolphinscheduler-dao/pom.xml

@ -16,12 +16,13 @@
~ limitations under the License. ~ limitations under the License.
--> -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>dolphinscheduler-dao</artifactId> <artifactId>dolphinscheduler-dao</artifactId>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java

@ -131,6 +131,7 @@ public class Command {
WarningType warningType, WarningType warningType,
int warningGroupId, int warningGroupId,
Date scheduleTime, Date scheduleTime,
String workerGroup,
Priority processInstancePriority) { Priority processInstancePriority) {
this.commandType = commandType; this.commandType = commandType;
this.executorId = executorId; this.executorId = executorId;
@ -143,6 +144,7 @@ public class Command {
this.failureStrategy = failureStrategy; this.failureStrategy = failureStrategy;
this.startTime = new Date(); this.startTime = new Date();
this.updateTime = new Date(); this.updateTime = new Date();
this.workerGroup = workerGroup;
this.processInstancePriority = processInstancePriority; this.processInstancePriority = processInstancePriority;
} }

14
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java

@ -82,6 +82,20 @@ public interface ProcessInstanceMapper extends BaseMapper<ProcessInstance> {
* @param endTime endTime * @param endTime endTime
* @return process instance IPage * @return process instance IPage
*/ */
/**
* process instance page
* @param page page
* @param projectId projectId
* @param processDefinitionId processDefinitionId
* @param searchVal searchVal
* @param executorId executorId
* @param statusArray statusArray
* @param host host
* @param startTime startTime
* @param endTime endTime
* @return process instance page
*/
IPage<ProcessInstance> queryProcessInstanceListPaging(Page<ProcessInstance> page, IPage<ProcessInstance> queryProcessInstanceListPaging(Page<ProcessInstance> page,
@Param("projectId") int projectId, @Param("projectId") int projectId,
@Param("processDefinitionId") Integer processDefinitionId, @Param("processDefinitionId") Integer processDefinitionId,

7
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java

@ -100,5 +100,12 @@ public interface UdfFuncMapper extends BaseMapper<UdfFunc> {
*/ */
List<UdfFunc> listAuthorizedUdfByResourceId(@Param("userId") int userId,@Param("resourceIds") int[] resourceIds); List<UdfFunc> listAuthorizedUdfByResourceId(@Param("userId") int userId,@Param("resourceIds") int[] resourceIds);
/**
* batch update udf func
* @param udfFuncList udf list
* @return update num
*/
int batchUpdateUdfFunc(@Param("udfFuncList") List<UdfFunc> udfFuncList);
} }

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java

@ -117,6 +117,8 @@ public class DolphinSchedulerManager {
upgradeDao.upgradeDolphinScheduler(schemaDir); upgradeDao.upgradeDolphinScheduler(schemaDir);
if ("1.3.0".equals(schemaVersion)) { if ("1.3.0".equals(schemaVersion)) {
upgradeDao.upgradeDolphinSchedulerWorkerGroup(); upgradeDao.upgradeDolphinSchedulerWorkerGroup();
} else if ("1.3.2".equals(schemaVersion)) {
upgradeDao.upgradeDolphinSchedulerResourceList();
} }
version = schemaVersion; version = schemaVersion;
} }

69
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.upgrade;
import org.apache.dolphinscheduler.common.utils.ConnectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.Map;
/**
* resource dao
*/
public class ResourceDao {
public static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDao.class);
/**
* list all resources
*
* @param conn connection
* @return map that key is full_name and value is id
*/
Map<String, Integer> listAllResources(Connection conn) {
Map<String, Integer> resourceMap = new HashMap<>();
String sql = String.format("SELECT id,full_name FROM t_ds_resources");
ResultSet rs = null;
PreparedStatement pstmt = null;
try {
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
while (rs.next()) {
Integer id = rs.getInt(1);
String fullName = rs.getString(2);
resourceMap.put(fullName, id);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
} finally {
ConnectionUtils.releaseResource(rs, pstmt, conn);
}
return resourceMap;
}
}

190
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java

@ -16,10 +16,10 @@
*/ */
package org.apache.dolphinscheduler.dao.upgrade; package org.apache.dolphinscheduler.dao.upgrade;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.AbstractBaseDao; import org.apache.dolphinscheduler.dao.AbstractBaseDao;
import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory;
@ -34,7 +34,9 @@ import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
public abstract class UpgradeDao extends AbstractBaseDao { public abstract class UpgradeDao extends AbstractBaseDao {
@ -88,7 +90,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* init schema * init schema
*/ */
public void initSchema(){ public void initSchema() {
DbType dbType = getDbType(); DbType dbType = getDbType();
String initSqlPath = ""; String initSqlPath = "";
if (dbType != null) { if (dbType != null) {
@ -111,6 +113,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* init scheam * init scheam
*
* @param initSqlPath initSqlPath * @param initSqlPath initSqlPath
*/ */
public void initSchema(String initSqlPath) { public void initSchema(String initSqlPath) {
@ -126,6 +129,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* run DML * run DML
*
* @param initSqlPath initSqlPath * @param initSqlPath initSqlPath
*/ */
private void runInitDML(String initSqlPath) { private void runInitDML(String initSqlPath) {
@ -148,20 +152,20 @@ public abstract class UpgradeDao extends AbstractBaseDao {
try { try {
conn.rollback(); conn.rollback();
} catch (SQLException e1) { } catch (SQLException e1) {
logger.error(e1.getMessage(),e1); logger.error(e1.getMessage(), e1);
} }
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) { } catch (Exception e) {
try { try {
if (null != conn) { if (null != conn) {
conn.rollback(); conn.rollback();
} }
} catch (SQLException e1) { } catch (SQLException e1) {
logger.error(e1.getMessage(),e1); logger.error(e1.getMessage(), e1);
} }
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} finally { } finally {
ConnectionUtils.releaseResource(conn); ConnectionUtils.releaseResource(conn);
@ -171,6 +175,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* run DDL * run DDL
*
* @param initSqlPath initSqlPath * @param initSqlPath initSqlPath
*/ */
private void runInitDDL(String initSqlPath) { private void runInitDDL(String initSqlPath) {
@ -189,12 +194,12 @@ public abstract class UpgradeDao extends AbstractBaseDao {
} catch (IOException e) { } catch (IOException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} finally { } finally {
ConnectionUtils.releaseResource(conn); ConnectionUtils.releaseResource(conn);
@ -204,6 +209,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* determines whether a table exists * determines whether a table exists
*
* @param tableName tableName * @param tableName tableName
* @return if table exist return trueelse return false * @return if table exist return trueelse return false
*/ */
@ -211,20 +217,22 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* determines whether a field exists in the specified table * determines whether a field exists in the specified table
*
* @param tableName tableName * @param tableName tableName
* @param columnName columnName * @param columnName columnName
* @return if column name exist return trueelse return false * @return if column name exist return trueelse return false
*/ */
public abstract boolean isExistsColumn(String tableName,String columnName); public abstract boolean isExistsColumn(String tableName, String columnName);
/** /**
* get current version * get current version
*
* @param versionName versionName * @param versionName versionName
* @return version * @return version
*/ */
public String getCurrentVersion(String versionName) { public String getCurrentVersion(String versionName) {
String sql = String.format("select version from %s",versionName); String sql = String.format("select version from %s", versionName);
Connection conn = null; Connection conn = null;
ResultSet rs = null; ResultSet rs = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
@ -241,7 +249,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
return version; return version;
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e); throw new RuntimeException("sql: " + sql, e);
} finally { } finally {
ConnectionUtils.releaseResource(rs, pstmt, conn); ConnectionUtils.releaseResource(rs, pstmt, conn);
@ -251,6 +259,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* upgrade DolphinScheduler * upgrade DolphinScheduler
*
* @param schemaDir schema dir * @param schemaDir schema dir
*/ */
public void upgradeDolphinScheduler(String schemaDir) { public void upgradeDolphinScheduler(String schemaDir) {
@ -268,52 +277,121 @@ public abstract class UpgradeDao extends AbstractBaseDao {
public void upgradeDolphinSchedulerWorkerGroup() { public void upgradeDolphinSchedulerWorkerGroup() {
updateProcessDefinitionJsonWorkerGroup(); updateProcessDefinitionJsonWorkerGroup();
} }
/**
* upgrade DolphinScheduler resource list
* ds-1.3.2 modify the resource list for process definition json
*/
public void upgradeDolphinSchedulerResourceList() {
updateProcessDefinitionJsonResourceList();
}
/** /**
* updateProcessDefinitionJsonWorkerGroup * updateProcessDefinitionJsonWorkerGroup
*/ */
protected void updateProcessDefinitionJsonWorkerGroup(){ protected void updateProcessDefinitionJsonWorkerGroup() {
WorkerGroupDao workerGroupDao = new WorkerGroupDao(); WorkerGroupDao workerGroupDao = new WorkerGroupDao();
ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao();
Map<Integer,String> replaceProcessDefinitionMap = new HashMap<>(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>();
try { try {
Map<Integer, String> oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection()); Map<Integer, String> oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection());
Map<Integer,String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection());
for (Map.Entry<Integer,String> entry : processDefinitionJsonMap.entrySet()){ for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) {
ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue());
ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString());
for (int i = 0 ;i < tasks.size() ; i++){ for (int i = 0; i < tasks.size(); i++) {
ObjectNode task = (ObjectNode) tasks.path(i); ObjectNode task = (ObjectNode) tasks.path(i);
ObjectNode workerGroupNode = (ObjectNode) task.path("workerGroupId"); ObjectNode workerGroupNode = (ObjectNode) task.path("workerGroupId");
Integer workerGroupId = -1; Integer workerGroupId = -1;
if(workerGroupNode != null && workerGroupNode.canConvertToInt()){ if (workerGroupNode != null && workerGroupNode.canConvertToInt()) {
workerGroupId = workerGroupNode.asInt(-1); workerGroupId = workerGroupNode.asInt(-1);
} }
if (workerGroupId == -1) { if (workerGroupId == -1) {
task.put("workerGroup", "default"); task.put("workerGroup", "default");
}else { } else {
task.put("workerGroup", oldWorkerGroupMap.get(workerGroupId)); task.put("workerGroup", oldWorkerGroupMap.get(workerGroupId));
} }
} }
jsonObject.remove("task"); jsonObject.remove("task");
jsonObject.put("tasks",tasks); jsonObject.put("tasks", tasks);
replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString());
}
if (replaceProcessDefinitionMap.size() > 0) {
processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap);
}
} catch (Exception e) {
logger.error("update process definition json workergroup error", e);
}
}
/**
* updateProcessDefinitionJsonResourceList
*/
protected void updateProcessDefinitionJsonResourceList() {
ResourceDao resourceDao = new ResourceDao();
ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao();
Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>();
try {
Map<String, Integer> resourcesMap = resourceDao.listAllResources(dataSource.getConnection());
Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection());
for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) {
ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue());
ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString());
for (int i = 0; i < tasks.size(); i++) {
ObjectNode task = (ObjectNode) tasks.get(i);
ObjectNode param = (ObjectNode) task.get("params");
if (param != null) {
List<ResourceInfo> resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class);
ResourceInfo mainJar = JSONUtils.parseObject(param.get("mainJar").toString(), ResourceInfo.class);
if (mainJar != null && mainJar.getId() == 0) {
String fullName = mainJar.getRes().startsWith("/") ? mainJar.getRes() : String.format("/%s", mainJar.getRes());
if (resourcesMap.containsKey(fullName)) {
mainJar.setId(resourcesMap.get(fullName));
param.put("mainJar", JSONUtils.parseObject(JSONUtils.toJsonString(mainJar)));
}
}
if (CollectionUtils.isNotEmpty(resourceList)) {
List<ResourceInfo> newResourceList = resourceList.stream().map(resInfo -> {
String fullName = resInfo.getRes().startsWith("/") ? resInfo.getRes() : String.format("/%s", resInfo.getRes());
if (resInfo.getId() == 0 && resourcesMap.containsKey(fullName)) {
resInfo.setId(resourcesMap.get(fullName));
}
return resInfo;
}).collect(Collectors.toList());
param.put("resourceList", JSONUtils.parseObject(JSONUtils.toJsonString(newResourceList)));
}
}
task.put("params", param);
}
jsonObject.remove("tasks");
replaceProcessDefinitionMap.put(entry.getKey(),jsonObject.toString()); jsonObject.put("tasks", tasks);
replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString());
} }
if (replaceProcessDefinitionMap.size() > 0){ if (replaceProcessDefinitionMap.size() > 0) {
processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(),replaceProcessDefinitionMap); processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap);
} }
}catch (Exception e){ } catch (Exception e) {
logger.error("update process definition json workergroup error",e); logger.error("update process definition json resource list error", e);
} }
} }
/** /**
* upgradeDolphinScheduler DML * upgradeDolphinScheduler DML
*
* @param schemaDir schemaDir * @param schemaDir schemaDir
*/ */
private void upgradeDolphinSchedulerDML(String schemaDir) { private void upgradeDolphinSchedulerDML(String schemaDir) {
@ -321,8 +399,8 @@ public abstract class UpgradeDao extends AbstractBaseDao {
if (StringUtils.isEmpty(rootDir)) { if (StringUtils.isEmpty(rootDir)) {
throw new RuntimeException("Environment variable user.dir not found"); throw new RuntimeException("Environment variable user.dir not found");
} }
String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql", rootDir, schemaDir, getDbType().name().toLowerCase());
logger.info("sqlSQLFilePath"+sqlFilePath); logger.info("sqlSQLFilePath" + sqlFilePath);
Connection conn = null; Connection conn = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
try { try {
@ -334,13 +412,13 @@ public abstract class UpgradeDao extends AbstractBaseDao {
scriptRunner.runScript(sqlReader); scriptRunner.runScript(sqlReader);
if (isExistsTable(T_VERSION_NAME)) { if (isExistsTable(T_VERSION_NAME)) {
// Change version in the version table to the new version // Change version in the version table to the new version
String upgradeSQL = String.format("update %s set version = ?",T_VERSION_NAME); String upgradeSQL = String.format("update %s set version = ?", T_VERSION_NAME);
pstmt = conn.prepareStatement(upgradeSQL); pstmt = conn.prepareStatement(upgradeSQL);
pstmt.setString(1, schemaVersion); pstmt.setString(1, schemaVersion);
pstmt.executeUpdate(); pstmt.executeUpdate();
}else if (isExistsTable(T_NEW_VERSION_NAME)) { } else if (isExistsTable(T_NEW_VERSION_NAME)) {
// Change version in the version table to the new version // Change version in the version table to the new version
String upgradeSQL = String.format("update %s set version = ?",T_NEW_VERSION_NAME); String upgradeSQL = String.format("update %s set version = ?", T_NEW_VERSION_NAME);
pstmt = conn.prepareStatement(upgradeSQL); pstmt = conn.prepareStatement(upgradeSQL);
pstmt.setString(1, schemaVersion); pstmt.setString(1, schemaVersion);
pstmt.executeUpdate(); pstmt.executeUpdate();
@ -350,38 +428,38 @@ public abstract class UpgradeDao extends AbstractBaseDao {
try { try {
conn.rollback(); conn.rollback();
} catch (SQLException e1) { } catch (SQLException e1) {
logger.error(e1.getMessage(),e1); logger.error(e1.getMessage(), e1);
} }
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException("sql file not found ", e); throw new RuntimeException("sql file not found ", e);
} catch (IOException e) { } catch (IOException e) {
try { try {
conn.rollback(); conn.rollback();
} catch (SQLException e1) { } catch (SQLException e1) {
logger.error(e1.getMessage(),e1); logger.error(e1.getMessage(), e1);
} }
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} catch (SQLException e) { } catch (SQLException e) {
try { try {
if (null != conn) { if (null != conn) {
conn.rollback(); conn.rollback();
} }
} catch (SQLException e1) { } catch (SQLException e1) {
logger.error(e1.getMessage(),e1); logger.error(e1.getMessage(), e1);
} }
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) { } catch (Exception e) {
try { try {
if (null != conn) { if (null != conn) {
conn.rollback(); conn.rollback();
} }
} catch (SQLException e1) { } catch (SQLException e1) {
logger.error(e1.getMessage(),e1); logger.error(e1.getMessage(), e1);
} }
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} finally { } finally {
ConnectionUtils.releaseResource(pstmt, conn); ConnectionUtils.releaseResource(pstmt, conn);
} }
@ -390,13 +468,14 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* upgradeDolphinScheduler DDL * upgradeDolphinScheduler DDL
*
* @param schemaDir schemaDir * @param schemaDir schemaDir
*/ */
private void upgradeDolphinSchedulerDDL(String schemaDir) { private void upgradeDolphinSchedulerDDL(String schemaDir) {
if (StringUtils.isEmpty(rootDir)) { if (StringUtils.isEmpty(rootDir)) {
throw new RuntimeException("Environment variable user.dir not found"); throw new RuntimeException("Environment variable user.dir not found");
} }
String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql", rootDir, schemaDir, getDbType().name().toLowerCase());
Connection conn = null; Connection conn = null;
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
try { try {
@ -411,20 +490,20 @@ public abstract class UpgradeDao extends AbstractBaseDao {
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException("sql file not found ", e); throw new RuntimeException("sql file not found ", e);
} catch (IOException e) { } catch (IOException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e);
} finally { } finally {
ConnectionUtils.releaseResource(pstmt, conn); ConnectionUtils.releaseResource(pstmt, conn);
} }
@ -434,15 +513,16 @@ public abstract class UpgradeDao extends AbstractBaseDao {
/** /**
* update version * update version
*
* @param version version * @param version version
*/ */
public void updateVersion(String version) { public void updateVersion(String version) {
// Change version in the version table to the new version // Change version in the version table to the new version
String versionName = T_VERSION_NAME; String versionName = T_VERSION_NAME;
if(!SchemaUtils.isAGreatVersion("1.2.0" , version)){ if (!SchemaUtils.isAGreatVersion("1.2.0", version)) {
versionName = "t_ds_version"; versionName = "t_ds_version";
} }
String upgradeSQL = String.format("update %s set version = ?",versionName); String upgradeSQL = String.format("update %s set version = ?", versionName);
PreparedStatement pstmt = null; PreparedStatement pstmt = null;
Connection conn = null; Connection conn = null;
try { try {
@ -452,7 +532,7 @@ public abstract class UpgradeDao extends AbstractBaseDao {
pstmt.executeUpdate(); pstmt.executeUpdate();
} catch (SQLException e) { } catch (SQLException e) {
logger.error(e.getMessage(),e); logger.error(e.getMessage(), e);
throw new RuntimeException("sql: " + upgradeSQL, e); throw new RuntimeException("sql: " + upgradeSQL, e);
} finally { } finally {
ConnectionUtils.releaseResource(pstmt, conn); ConnectionUtils.releaseResource(pstmt, conn);

226
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java

@ -22,11 +22,13 @@ import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -45,6 +47,7 @@ public class DagHelper {
/** /**
* generate flow node relation list by task node list; * generate flow node relation list by task node list;
* Edges that are not in the task Node List will not be added to the result * Edges that are not in the task Node List will not be added to the result
*
* @param taskNodeList taskNodeList * @param taskNodeList taskNodeList
* @return task node relation list * @return task node relation list
*/ */
@ -66,6 +69,7 @@ public class DagHelper {
/** /**
* generate task nodes needed by dag * generate task nodes needed by dag
*
* @param taskNodeList taskNodeList * @param taskNodeList taskNodeList
* @param startNodeNameList startNodeNameList * @param startNodeNameList startNodeNameList
* @param recoveryNodeNameList recoveryNodeNameList * @param recoveryNodeNameList recoveryNodeNameList
@ -77,8 +81,8 @@ public class DagHelper {
List<TaskNode> destFlowNodeList = new ArrayList<>(); List<TaskNode> destFlowNodeList = new ArrayList<>();
List<String> startNodeList = startNodeNameList; List<String> startNodeList = startNodeNameList;
if(taskDependType != TaskDependType.TASK_POST if (taskDependType != TaskDependType.TASK_POST
&& CollectionUtils.isEmpty(startNodeList)){ && CollectionUtils.isEmpty(startNodeList)) {
logger.error("start node list is empty! cannot continue run the process "); logger.error("start node list is empty! cannot continue run the process ");
return destFlowNodeList; return destFlowNodeList;
} }
@ -126,6 +130,7 @@ public class DagHelper {
/** /**
* find all the nodes that depended on the start node * find all the nodes that depended on the start node
*
* @param startNode startNode * @param startNode startNode
* @param taskNodeList taskNodeList * @param taskNodeList taskNodeList
* @return task node list * @return task node list
@ -150,6 +155,7 @@ public class DagHelper {
/** /**
* find all nodes that start nodes depend on. * find all nodes that start nodes depend on.
*
* @param startNode startNode * @param startNode startNode
* @param recoveryNodeNameList recoveryNodeNameList * @param recoveryNodeNameList recoveryNodeNameList
* @param taskNodeList taskNodeList * @param taskNodeList taskNodeList
@ -165,9 +171,6 @@ public class DagHelper {
resultList.add(startNode); resultList.add(startNode);
} }
if (CollectionUtils.isEmpty(depList)) { if (CollectionUtils.isEmpty(depList)) {
if (null != startNode) {
visitedNodeNameList.add(startNode.getName());
}
return resultList; return resultList;
} }
for (String depNodeName : depList) { for (String depNodeName : depList) {
@ -187,6 +190,7 @@ public class DagHelper {
/** /**
* generate dag by start nodes and recovery nodes * generate dag by start nodes and recovery nodes
*
* @param processDefinitionJson processDefinitionJson * @param processDefinitionJson processDefinitionJson
* @param startNodeNameList startNodeNameList * @param startNodeNameList startNodeNameList
* @param recoveryNodeNameList recoveryNodeNameList * @param recoveryNodeNameList recoveryNodeNameList
@ -217,10 +221,11 @@ public class DagHelper {
/** /**
* parse the forbidden task nodes in process definition. * parse the forbidden task nodes in process definition.
*
* @param processDefinitionJson processDefinitionJson * @param processDefinitionJson processDefinitionJson
* @return task node map * @return task node map
*/ */
public static Map<String, TaskNode> getForbiddenTaskNodeMaps(String processDefinitionJson){ public static Map<String, TaskNode> getForbiddenTaskNodeMaps(String processDefinitionJson) {
Map<String, TaskNode> forbidTaskNodeMap = new ConcurrentHashMap<>(); Map<String, TaskNode> forbidTaskNodeMap = new ConcurrentHashMap<>();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
@ -228,8 +233,8 @@ public class DagHelper {
if (null != processData) { if (null != processData) {
taskNodeList = processData.getTasks(); taskNodeList = processData.getTasks();
} }
for(TaskNode node : taskNodeList){ for (TaskNode node : taskNodeList) {
if(node.isForbidden()){ if (node.isForbidden()) {
forbidTaskNodeMap.putIfAbsent(node.getName(), node); forbidTaskNodeMap.putIfAbsent(node.getName(), node);
} }
} }
@ -239,6 +244,7 @@ public class DagHelper {
/** /**
* find node by node name * find node by node name
*
* @param nodeDetails nodeDetails * @param nodeDetails nodeDetails
* @param nodeName nodeName * @param nodeName nodeName
* @return task node * @return task node
@ -252,75 +258,154 @@ public class DagHelper {
return null; return null;
} }
/** /**
* get start vertex in one dag * the task can be submit when all the depends nodes are forbidden or complete
* it would find the post node if the start vertex is forbidden running *
* @param parentNodeName previous node * @param taskNode taskNode
* @param dag dag * @param dag dag
* @param completeTaskList completeTaskList * @param completeTaskList completeTaskList
* @return start Vertex list * @return can submit
*/ */
public static Collection<String> getStartVertex(String parentNodeName, DAG<String, TaskNode, TaskNodeRelation> dag, public static boolean allDependsForbiddenOrEnd(TaskNode taskNode,
Map<String, TaskInstance> completeTaskList){ DAG<String, TaskNode, TaskNodeRelation> dag,
Map<String, TaskNode> skipTaskNodeList,
if(completeTaskList == null){ Map<String, TaskInstance> completeTaskList) {
completeTaskList = new HashMap<>(); List<String> dependList = taskNode.getDepList();
if (dependList == null) {
return true;
}
for (String dependNodeName : dependList) {
TaskNode dependNode = dag.getNode(dependNodeName);
if (completeTaskList.containsKey(dependNodeName)
|| dependNode.isForbidden()
|| skipTaskNodeList.containsKey(dependNodeName)) {
continue;
} else {
return false;
} }
Collection<String> startVertexs = null;
if(StringUtils.isNotEmpty(parentNodeName)){
startVertexs = dag.getSubsequentNodes(parentNodeName);
}else{
startVertexs = dag.getBeginNode();
} }
return true;
List<String> tmpStartVertexs = new ArrayList<>();
if(startVertexs!= null){
tmpStartVertexs.addAll(startVertexs);
} }
for(String start : startVertexs){ /**
TaskNode startNode = dag.getNode(start); * parse the successor nodes of previous node.
if(!startNode.isForbidden() && !completeTaskList.containsKey(start)){ * this function parse the condition node to find the right branch.
// the start can be submit if not forbidden and not in complete tasks * also check all the depends nodes forbidden or complete
*
* @param preNodeName
* @return successor nodes
*/
public static Set<String> parsePostNodes(String preNodeName,
Map<String, TaskNode> skipTaskNodeList,
DAG<String, TaskNode, TaskNodeRelation> dag,
Map<String, TaskInstance> completeTaskList) {
Set<String> postNodeList = new HashSet<>();
Collection<String> startVertexes = new ArrayList<>();
if (preNodeName == null) {
startVertexes = dag.getBeginNode();
} else if (dag.getNode(preNodeName).isConditionsTask()) {
List<String> conditionTaskList = parseConditionTask(preNodeName, skipTaskNodeList, dag, completeTaskList);
startVertexes.addAll(conditionTaskList);
} else {
startVertexes = dag.getSubsequentNodes(preNodeName);
}
for (String subsequent : startVertexes) {
TaskNode taskNode = dag.getNode(subsequent);
if (isTaskNodeNeedSkip(taskNode, skipTaskNodeList)) {
setTaskNodeSkip(subsequent, dag, completeTaskList, skipTaskNodeList);
continue; continue;
} }
// then submit the post nodes if (!DagHelper.allDependsForbiddenOrEnd(taskNode, dag, skipTaskNodeList, completeTaskList)) {
Collection<String> postNodes = getStartVertex(start, dag, completeTaskList); continue;
for(String post : postNodes){ }
TaskNode postNode = dag.getNode(post); if (taskNode.isForbidden() || completeTaskList.containsKey(subsequent)) {
if(taskNodeCanSubmit(postNode, dag, completeTaskList)){ postNodeList.addAll(parsePostNodes(subsequent, skipTaskNodeList, dag, completeTaskList));
tmpStartVertexs.add(post); continue;
}
postNodeList.add(subsequent);
}
return postNodeList;
} }
/**
* if all of the task dependence are skipped, skip it too.
*
* @param taskNode
* @return
*/
private static boolean isTaskNodeNeedSkip(TaskNode taskNode,
Map<String, TaskNode> skipTaskNodeList
) {
if (CollectionUtils.isEmpty(taskNode.getDepList())) {
return false;
} }
tmpStartVertexs.remove(start); for (String depNode : taskNode.getDepList()) {
if (!skipTaskNodeList.containsKey(depNode)) {
return false;
} }
return tmpStartVertexs;
} }
return true;
}
/** /**
* the task can be submit when all the depends nodes are forbidden or complete * parse condition task find the branch process
* @param taskNode taskNode * set skip flag for another one.
* @param dag dag *
* @param completeTaskList completeTaskList * @param nodeName
* @return can submit * @return
*/ */
public static boolean taskNodeCanSubmit(TaskNode taskNode, public static List<String> parseConditionTask(String nodeName,
Map<String, TaskNode> skipTaskNodeList,
DAG<String, TaskNode, TaskNodeRelation> dag, DAG<String, TaskNode, TaskNodeRelation> dag,
Map<String, TaskInstance> completeTaskList) { Map<String, TaskInstance> completeTaskList) {
List<String> conditionTaskList = new ArrayList<>();
List<String> dependList = taskNode.getDepList(); TaskNode taskNode = dag.getNode(nodeName);
if(dependList == null){ if (!taskNode.isConditionsTask()) {
return true; return conditionTaskList;
}
if (!completeTaskList.containsKey(nodeName)) {
return conditionTaskList;
}
TaskInstance taskInstance = completeTaskList.get(nodeName);
ConditionsParameters conditionsParameters =
JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class);
List<String> skipNodeList = new ArrayList<>();
if (taskInstance.getState().typeIsSuccess()) {
conditionTaskList = conditionsParameters.getSuccessNode();
skipNodeList = conditionsParameters.getFailedNode();
} else if (taskInstance.getState().typeIsFailure()) {
conditionTaskList = conditionsParameters.getFailedNode();
skipNodeList = conditionsParameters.getSuccessNode();
} else {
conditionTaskList.add(nodeName);
}
for (String failedNode : skipNodeList) {
setTaskNodeSkip(failedNode, dag, completeTaskList, skipTaskNodeList);
}
return conditionTaskList;
} }
for(String dependNodeName : dependList){ /**
TaskNode dependNode = dag.getNode(dependNodeName); * set task node and the post nodes skip flag
if(!dependNode.isForbidden() && !completeTaskList.containsKey(dependNodeName)){ *
return false; * @param skipNodeName
* @param dag
* @param completeTaskList
* @param skipTaskNodeList
*/
private static void setTaskNodeSkip(String skipNodeName,
DAG<String, TaskNode, TaskNodeRelation> dag,
Map<String, TaskInstance> completeTaskList,
Map<String, TaskNode> skipTaskNodeList) {
skipTaskNodeList.putIfAbsent(skipNodeName, dag.getNode(skipNodeName));
Collection<String> postNodeList = dag.getSubsequentNodes(skipNodeName);
for (String post : postNodeList) {
TaskNode postNode = dag.getNode(post);
if (isTaskNodeNeedSkip(postNode, skipTaskNodeList)) {
setTaskNodeSkip(post, dag, completeTaskList, skipTaskNodeList);
} }
} }
return true;
} }
@ -331,19 +416,19 @@ public class DagHelper {
*/ */
public static DAG<String, TaskNode, TaskNodeRelation> buildDagGraph(ProcessDag processDag) { public static DAG<String, TaskNode, TaskNodeRelation> buildDagGraph(ProcessDag processDag) {
DAG<String,TaskNode,TaskNodeRelation> dag = new DAG<>(); DAG<String, TaskNode, TaskNodeRelation> dag = new DAG<>();
//add vertex //add vertex
if (CollectionUtils.isNotEmpty(processDag.getNodes())){ if (CollectionUtils.isNotEmpty(processDag.getNodes())) {
for (TaskNode node : processDag.getNodes()){ for (TaskNode node : processDag.getNodes()) {
dag.addNode(node.getName(),node); dag.addNode(node.getName(), node);
} }
} }
//add edge //add edge
if (CollectionUtils.isNotEmpty(processDag.getEdges())){ if (CollectionUtils.isNotEmpty(processDag.getEdges())) {
for (TaskNodeRelation edge : processDag.getEdges()){ for (TaskNodeRelation edge : processDag.getEdges()) {
dag.addEdge(edge.getStartNode(),edge.getEndNode()); dag.addEdge(edge.getStartNode(), edge.getEndNode());
} }
} }
return dag; return dag;
@ -351,6 +436,7 @@ public class DagHelper {
/** /**
* get process dag * get process dag
*
* @param taskNodeList task node list * @param taskNodeList task node list
* @return Process dag * @return Process dag
*/ */
@ -378,21 +464,22 @@ public class DagHelper {
/** /**
* is there have conditions after the parent node * is there have conditions after the parent node
*
* @param parentNodeName * @param parentNodeName
* @return * @return
*/ */
public static boolean haveConditionsAfterNode(String parentNodeName, public static boolean haveConditionsAfterNode(String parentNodeName,
DAG<String, TaskNode, TaskNodeRelation> dag DAG<String, TaskNode, TaskNodeRelation> dag
){ ) {
boolean result = false; boolean result = false;
Set<String> subsequentNodes = dag.getSubsequentNodes(parentNodeName); Set<String> subsequentNodes = dag.getSubsequentNodes(parentNodeName);
if(CollectionUtils.isEmpty(subsequentNodes)){ if (CollectionUtils.isEmpty(subsequentNodes)) {
return result; return result;
} }
for(String nodeName : subsequentNodes){ for (String nodeName : subsequentNodes) {
TaskNode taskNode = dag.getNode(nodeName); TaskNode taskNode = dag.getNode(nodeName);
List<String> preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); List<String> preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class);
if(preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()){ if (preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()) {
return true; return true;
} }
} }
@ -401,19 +488,20 @@ public class DagHelper {
/** /**
* is there have conditions after the parent node * is there have conditions after the parent node
*
* @param parentNodeName * @param parentNodeName
* @return * @return
*/ */
public static boolean haveConditionsAfterNode(String parentNodeName, public static boolean haveConditionsAfterNode(String parentNodeName,
List<TaskNode> taskNodes List<TaskNode> taskNodes
){ ) {
boolean result = false; boolean result = false;
if(CollectionUtils.isEmpty(taskNodes)){ if (CollectionUtils.isEmpty(taskNodes)) {
return result; return result;
} }
for(TaskNode taskNode : taskNodes){ for (TaskNode taskNode : taskNodes) {
List<String> preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); List<String> preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class);
if(preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()){ if (preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()) {
return true; return true;
} }
} }

13
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml

@ -111,4 +111,17 @@
</foreach> </foreach>
</if> </if>
</select> </select>
<update id="batchUpdateUdfFunc" parameterType="java.util.List">
<foreach collection="udfFuncList" item="udf" index="index" open="" close="" separator =";">
update t_ds_udfs
<set>
resource_name=#{udf.resourceName},
update_time=#{udf.updateTime}
</set>
<where>
id=#{udf.id}
</where>
</foreach>
</update>
</mapper> </mapper>

57
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java

@ -19,11 +19,13 @@ package org.apache.dolphinscheduler.dao.mapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.UDFUser; import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -33,6 +35,7 @@ import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -56,9 +59,10 @@ public class UdfFuncMapperTest {
/** /**
* insert one udf * insert one udf
*
* @return UdfFunc * @return UdfFunc
*/ */
private UdfFunc insertOne(){ private UdfFunc insertOne() {
UdfFunc udfFunc = new UdfFunc(); UdfFunc udfFunc = new UdfFunc();
udfFunc.setUserId(1); udfFunc.setUserId(1);
udfFunc.setFuncName("dolphin_udf_func"); udfFunc.setFuncName("dolphin_udf_func");
@ -74,9 +78,10 @@ public class UdfFuncMapperTest {
/** /**
* insert one udf * insert one udf
*
* @return * @return
*/ */
private UdfFunc insertOne(User user){ private UdfFunc insertOne(User user) {
UdfFunc udfFunc = new UdfFunc(); UdfFunc udfFunc = new UdfFunc();
udfFunc.setUserId(user.getId()); udfFunc.setUserId(user.getId());
udfFunc.setFuncName("dolphin_udf_func"); udfFunc.setFuncName("dolphin_udf_func");
@ -92,9 +97,10 @@ public class UdfFuncMapperTest {
/** /**
* insert one user * insert one user
*
* @return User * @return User
*/ */
private User insertOneUser(){ private User insertOneUser() {
User user = new User(); User user = new User();
user.setUserName("user1"); user.setUserName("user1");
user.setUserPassword("1"); user.setUserPassword("1");
@ -109,9 +115,10 @@ public class UdfFuncMapperTest {
/** /**
* insert one user * insert one user
*
* @return User * @return User
*/ */
private User insertOneUser(String userName){ private User insertOneUser(String userName) {
User user = new User(); User user = new User();
user.setUserName(userName); user.setUserName(userName);
user.setUserPassword("1"); user.setUserPassword("1");
@ -126,11 +133,12 @@ public class UdfFuncMapperTest {
/** /**
* insert UDFUser * insert UDFUser
*
* @param user user * @param user user
* @param udfFunc udf func * @param udfFunc udf func
* @return UDFUser * @return UDFUser
*/ */
private UDFUser insertOneUDFUser(User user,UdfFunc udfFunc){ private UDFUser insertOneUDFUser(User user, UdfFunc udfFunc) {
UDFUser udfUser = new UDFUser(); UDFUser udfUser = new UDFUser();
udfUser.setUdfId(udfFunc.getId()); udfUser.setUdfId(udfFunc.getId());
udfUser.setUserId(user.getId()); udfUser.setUserId(user.getId());
@ -142,9 +150,10 @@ public class UdfFuncMapperTest {
/** /**
* create general user * create general user
*
* @return User * @return User
*/ */
private User createGeneralUser(String userName){ private User createGeneralUser(String userName) {
User user = new User(); User user = new User();
user.setUserName(userName); user.setUserName(userName);
user.setUserPassword("1"); user.setUserPassword("1");
@ -161,7 +170,7 @@ public class UdfFuncMapperTest {
* test update * test update
*/ */
@Test @Test
public void testUpdate(){ public void testUpdate() {
//insertOne //insertOne
UdfFunc udfFunc = insertOne(); UdfFunc udfFunc = insertOne();
udfFunc.setResourceName("dolphin_resource_update"); udfFunc.setResourceName("dolphin_resource_update");
@ -178,7 +187,7 @@ public class UdfFuncMapperTest {
* test delete * test delete
*/ */
@Test @Test
public void testDelete(){ public void testDelete() {
//insertOne //insertOne
UdfFunc udfFunc = insertOne(); UdfFunc udfFunc = insertOne();
//delete //delete
@ -190,7 +199,7 @@ public class UdfFuncMapperTest {
* test query * test query
*/ */
@Test @Test
public void testQuery(){ public void testQuery() {
//insertOne //insertOne
UdfFunc udfFunc = insertOne(); UdfFunc udfFunc = insertOne();
//query //query
@ -207,9 +216,9 @@ public class UdfFuncMapperTest {
UdfFunc udfFunc = insertOne(); UdfFunc udfFunc = insertOne();
//insertOne //insertOne
UdfFunc udfFunc1 = insertOne(); UdfFunc udfFunc1 = insertOne();
int[] idArray = new int[]{udfFunc.getId(),udfFunc1.getId()}; int[] idArray = new int[]{udfFunc.getId(), udfFunc1.getId()};
//queryUdfByIdStr //queryUdfByIdStr
List<UdfFunc> udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray,""); List<UdfFunc> udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray, "");
Assert.assertNotEquals(udfFuncList.size(), 0); Assert.assertNotEquals(udfFuncList.size(), 0);
} }
@ -223,8 +232,8 @@ public class UdfFuncMapperTest {
//insertOne //insertOne
UdfFunc udfFunc = insertOne(user); UdfFunc udfFunc = insertOne(user);
//queryUdfFuncPaging //queryUdfFuncPaging
Page<UdfFunc> page = new Page(1,3); Page<UdfFunc> page = new Page(1, 3);
IPage<UdfFunc> udfFuncIPage = udfFuncMapper.queryUdfFuncPaging(page,user.getId(),""); IPage<UdfFunc> udfFuncIPage = udfFuncMapper.queryUdfFuncPaging(page, user.getId(), "");
Assert.assertNotEquals(udfFuncIPage.getTotal(), 0); Assert.assertNotEquals(udfFuncIPage.getTotal(), 0);
} }
@ -279,7 +288,7 @@ public class UdfFuncMapperTest {
} }
@Test @Test
public void testListAuthorizedUdfFunc(){ public void testListAuthorizedUdfFunc() {
//create general user //create general user
User generalUser1 = createGeneralUser("user1"); User generalUser1 = createGeneralUser("user1");
User generalUser2 = createGeneralUser("user2"); User generalUser2 = createGeneralUser("user2");
@ -289,18 +298,30 @@ public class UdfFuncMapperTest {
UdfFunc unauthorizdUdfFunc = insertOne(generalUser2); UdfFunc unauthorizdUdfFunc = insertOne(generalUser2);
//udf function ids //udf function ids
Integer[] udfFuncIds = new Integer[]{udfFunc.getId(),unauthorizdUdfFunc.getId()}; Integer[] udfFuncIds = new Integer[]{udfFunc.getId(), unauthorizdUdfFunc.getId()};
List<UdfFunc> authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); List<UdfFunc> authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds);
Assert.assertEquals(generalUser1.getId(),udfFunc.getUserId()); Assert.assertEquals(generalUser1.getId(), udfFunc.getUserId());
Assert.assertNotEquals(generalUser1.getId(),unauthorizdUdfFunc.getUserId()); Assert.assertNotEquals(generalUser1.getId(), unauthorizdUdfFunc.getUserId());
Assert.assertFalse(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds))); Assert.assertFalse(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds)));
//authorize object unauthorizdUdfFunc to generalUser1 //authorize object unauthorizdUdfFunc to generalUser1
insertOneUDFUser(generalUser1,unauthorizdUdfFunc); insertOneUDFUser(generalUser1, unauthorizdUdfFunc);
authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds);
Assert.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds))); Assert.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds)));
} }
@Test
public void batchUpdateUdfFuncTest() {
//create general user
User generalUser1 = createGeneralUser("user1");
UdfFunc udfFunc = insertOne(generalUser1);
udfFunc.setResourceName("/updateTest");
List<UdfFunc> udfFuncList = new ArrayList<>();
udfFuncList.add(udfFunc);
Assert.assertTrue(udfFuncMapper.batchUpdateUdfFunc(udfFuncList) > 0);
}
} }

250
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java

@ -18,7 +18,9 @@
package org.apache.dolphinscheduler.dao.utils; package org.apache.dolphinscheduler.dao.utils;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNode;
@ -27,6 +29,7 @@ import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -34,6 +37,8 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
/** /**
* dag helper test * dag helper test
@ -41,15 +46,17 @@ import java.util.Map;
public class DagHelperTest { public class DagHelperTest {
/** /**
* test task node can submit * test task node can submit
*
* @throws JsonProcessingException if error throws JsonProcessingException * @throws JsonProcessingException if error throws JsonProcessingException
*/ */
@Test @Test
public void testTaskNodeCanSubmit() throws JsonProcessingException { public void testTaskNodeCanSubmit() throws JsonProcessingException {
//1->2->3->5 //1->2->3->5->7
//4->3 //4->3->6
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag(); DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
TaskNode taskNode3 = dag.getNode("3"); TaskNode taskNode3 = dag.getNode("3");
Map<String, TaskInstance > completeTaskList = new HashMap<>(); Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
completeTaskList.putIfAbsent("1", new TaskInstance()); completeTaskList.putIfAbsent("1", new TaskInstance());
Boolean canSubmit = false; Boolean canSubmit = false;
@ -58,27 +65,206 @@ public class DagHelperTest {
node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
TaskNode nodex = dag.getNode("4"); TaskNode nodex = dag.getNode("4");
nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
canSubmit = DagHelper.taskNodeCanSubmit(taskNode3, dag, completeTaskList); canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList);
Assert.assertEquals(canSubmit, true); Assert.assertEquals(canSubmit, true);
// 2forbidden, 3 cannot be submit // 2forbidden, 3 cannot be submit
completeTaskList.putIfAbsent("2", new TaskInstance()); completeTaskList.putIfAbsent("2", new TaskInstance());
TaskNode nodey = dag.getNode("4"); TaskNode nodey = dag.getNode("4");
nodey.setRunFlag(""); nodey.setRunFlag("");
canSubmit = DagHelper.taskNodeCanSubmit(taskNode3, dag, completeTaskList); canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList);
Assert.assertEquals(canSubmit, false); Assert.assertEquals(canSubmit, false);
// 2/3 forbidden submit 5 // 2/3 forbidden submit 5
TaskNode node3 = dag.getNode("3"); TaskNode node3 = dag.getNode("3");
node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
TaskNode node8 = dag.getNode("8");
node8.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
TaskNode node5 = dag.getNode("5"); TaskNode node5 = dag.getNode("5");
canSubmit = DagHelper.taskNodeCanSubmit(node5, dag, completeTaskList); canSubmit = DagHelper.allDependsForbiddenOrEnd(node5, dag, skipNodeList, completeTaskList);
Assert.assertEquals(canSubmit, true); Assert.assertEquals(canSubmit, true);
} }
/** /**
* 1->2->3->5 * test parse post node list
* 4->3 */
@Test
public void testParsePostNodeList() throws JsonProcessingException {
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
Set<String> postNodes = null;
//complete : null
// expect post: 1/4
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("1"));
Assert.assertTrue(postNodes.contains("4"));
//complete : 1
// expect post: 2/4
completeTaskList.put("1", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("2"));
Assert.assertTrue(postNodes.contains("4"));
// complete : 1/2
// expect post: 4
completeTaskList.put("2", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("4"));
Assert.assertTrue(postNodes.contains("8"));
// complete : 1/2/4
// expect post: 3
completeTaskList.put("4", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("3"));
Assert.assertTrue(postNodes.contains("8"));
// complete : 1/2/4/3
// expect post: 8/6
completeTaskList.put("3", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("8"));
Assert.assertTrue(postNodes.contains("6"));
// complete : 1/2/4/3/8
// expect post: 6/5
completeTaskList.put("8", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("5"));
Assert.assertTrue(postNodes.contains("6"));
// complete : 1/2/4/3/5/6/8
// expect post: 7
completeTaskList.put("6", new TaskInstance());
completeTaskList.put("5", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("7"));
}
/**
* test forbidden post node
*
* @throws JsonProcessingException
*/
@Test
public void testForbiddenPostNode() throws JsonProcessingException {
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
Set<String> postNodes = null;
// dag: 1-2-3-5-7 4-3-6 2-8-5-7
// forbid:2 complete:1 post:4/8
completeTaskList.put("1", new TaskInstance());
TaskNode node2 = dag.getNode("2");
node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("4"));
Assert.assertTrue(postNodes.contains("8"));
//forbid:2/4 complete:1 post:3/8
TaskNode node4 = dag.getNode("4");
node4.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("3"));
Assert.assertTrue(postNodes.contains("8"));
//forbid:2/4/5 complete:1/8 post:3
completeTaskList.put("8", new TaskInstance());
TaskNode node5 = dag.getNode("5");
node5.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("3"));
}
/**
* test condition post node
*
* @throws JsonProcessingException
*/
@Test
public void testConditionPostNode() throws JsonProcessingException {
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
Set<String> postNodes = null;
// dag: 1-2-3-5-7 4-3-6 2-8-5-7
// 3-if
completeTaskList.put("1", new TaskInstance());
completeTaskList.put("2", new TaskInstance());
completeTaskList.put("4", new TaskInstance());
TaskNode node3 = dag.getNode("3");
node3.setType("CONDITIONS");
node3.setConditionResult("{\n"
+
" \"successNode\": [5\n"
+
" ],\n"
+
" \"failedNode\": [6\n"
+
" ]\n"
+
" }");
completeTaskList.remove("3");
TaskInstance taskInstance = new TaskInstance();
taskInstance.setState(ExecutionStatus.SUCCESS);
//complete 1/2/3/4 expect:8
completeTaskList.put("3", taskInstance);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("8"));
//2.complete 1/2/3/4/8 expect:5 skip:6
completeTaskList.put("8", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertTrue(postNodes.contains("5"));
Assert.assertEquals(1, skipNodeList.size());
Assert.assertTrue(skipNodeList.containsKey("6"));
// 3.complete 1/2/3/4/5/8 expect post:7 skip:6
skipNodeList.clear();
TaskInstance taskInstance1 = new TaskInstance();
taskInstance.setState(ExecutionStatus.SUCCESS);
completeTaskList.put("5", taskInstance1);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("7"));
Assert.assertEquals(1, skipNodeList.size());
Assert.assertTrue(skipNodeList.containsKey("6"));
// dag: 1-2-3-5-7 4-3-6
// 3-if , complete:1/2/3/4
// 1.failure:3 expect post:6 skip:5/7
skipNodeList.clear();
completeTaskList.remove("3");
taskInstance = new TaskInstance();
taskInstance.setState(ExecutionStatus.FAILURE);
completeTaskList.put("3", taskInstance);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("6"));
Assert.assertEquals(2, skipNodeList.size());
Assert.assertTrue(skipNodeList.containsKey("5"));
Assert.assertTrue(skipNodeList.containsKey("7"));
}
/**
* 1->2->3->5->7
* 4->3->6
* 2->8->5->7
*
* @return dag * @return dag
* @throws JsonProcessingException if error throws JsonProcessingException * @throws JsonProcessingException if error throws JsonProcessingException
*/ */
@ -87,11 +273,13 @@ public class DagHelperTest {
TaskNode node1 = new TaskNode(); TaskNode node1 = new TaskNode();
node1.setId("1"); node1.setId("1");
node1.setName("1"); node1.setName("1");
node1.setType("SHELL");
taskNodeList.add(node1); taskNodeList.add(node1);
TaskNode node2 = new TaskNode(); TaskNode node2 = new TaskNode();
node2.setId("2"); node2.setId("2");
node2.setName("2"); node2.setName("2");
node2.setType("SHELL");
List<String> dep2 = new ArrayList<>(); List<String> dep2 = new ArrayList<>();
dep2.add("1"); dep2.add("1");
node2.setDepList(dep2); node2.setDepList(dep2);
@ -101,11 +289,13 @@ public class DagHelperTest {
TaskNode node4 = new TaskNode(); TaskNode node4 = new TaskNode();
node4.setId("4"); node4.setId("4");
node4.setName("4"); node4.setName("4");
node4.setType("SHELL");
taskNodeList.add(node4); taskNodeList.add(node4);
TaskNode node3 = new TaskNode(); TaskNode node3 = new TaskNode();
node3.setId("3"); node3.setId("3");
node3.setName("3"); node3.setName("3");
node3.setType("SHELL");
List<String> dep3 = new ArrayList<>(); List<String> dep3 = new ArrayList<>();
dep3.add("2"); dep3.add("2");
dep3.add("4"); dep3.add("4");
@ -115,29 +305,61 @@ public class DagHelperTest {
TaskNode node5 = new TaskNode(); TaskNode node5 = new TaskNode();
node5.setId("5"); node5.setId("5");
node5.setName("5"); node5.setName("5");
node5.setType("SHELL");
List<String> dep5 = new ArrayList<>(); List<String> dep5 = new ArrayList<>();
dep5.add("3"); dep5.add("3");
dep5.add("8");
node5.setDepList(dep5); node5.setDepList(dep5);
taskNodeList.add(node5); taskNodeList.add(node5);
TaskNode node6 = new TaskNode();
node6.setId("6");
node6.setName("6");
node6.setType("SHELL");
List<String> dep6 = new ArrayList<>();
dep6.add("3");
node6.setDepList(dep6);
taskNodeList.add(node6);
TaskNode node7 = new TaskNode();
node7.setId("7");
node7.setName("7");
node7.setType("SHELL");
List<String> dep7 = new ArrayList<>();
dep7.add("5");
node7.setDepList(dep7);
taskNodeList.add(node7);
TaskNode node8 = new TaskNode();
node8.setId("8");
node8.setName("8");
node8.setType("SHELL");
List<String> dep8 = new ArrayList<>();
dep8.add("2");
node8.setDepList(dep8);
taskNodeList.add(node8);
List<String> startNodes = new ArrayList<>(); List<String> startNodes = new ArrayList<>();
List<String> recoveryNodes = new ArrayList<>(); List<String> recoveryNodes = new ArrayList<>();
List<TaskNode> destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, List<TaskNode> destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList,
startNodes, recoveryNodes, TaskDependType.TASK_POST); startNodes, recoveryNodes, TaskDependType.TASK_POST);
List<TaskNodeRelation> taskNodeRelations =DagHelper.generateRelationListByFlowNodes(destTaskNodeList); List<TaskNodeRelation> taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList);
ProcessDag processDag = new ProcessDag(); ProcessDag processDag = new ProcessDag();
processDag.setEdges(taskNodeRelations); processDag.setEdges(taskNodeRelations);
processDag.setNodes(destTaskNodeList); processDag.setNodes(destTaskNodeList);
return DagHelper.buildDagGraph(processDag); return DagHelper.buildDagGraph(processDag);
} }
@Test @Test
public void testBuildDagGraph() { public void testBuildDagGraph() {
String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\","
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"},"
"\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\","
+
"\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\","
+
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
ProcessData processData = JSONUtils.parseObject(shellJson, ProcessData.class); ProcessData processData = JSONUtils.parseObject(shellJson, ProcessData.class);

2
dolphinscheduler-dist/pom.xml vendored

@ -20,7 +20,7 @@
<parent> <parent>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

2
dolphinscheduler-microbench/pom.xml

@ -21,7 +21,7 @@
<parent> <parent>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

2
dolphinscheduler-plugin-api/pom.xml

@ -23,7 +23,7 @@
<parent> <parent>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>dolphinscheduler-plugin-api</artifactId> <artifactId>dolphinscheduler-plugin-api</artifactId>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>

2
dolphinscheduler-remote/pom.xml

@ -20,7 +20,7 @@
<parent> <parent>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

10
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java

@ -78,6 +78,16 @@ public enum CommandType {
*/ */
TASK_EXECUTE_RESPONSE, TASK_EXECUTE_RESPONSE,
/**
* db task ack
*/
DB_TASK_ACK,
/**
* db task response
*/
DB_TASK_RESPONSE,
/** /**
* kill task * kill task
*/ */

74
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java

@ -0,0 +1,74 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.io.Serializable;
/**
* db task ack request command
*/
public class DBTaskAckCommand implements Serializable {
private int taskInstanceId;
private int status;
public DBTaskAckCommand(int status, int taskInstanceId) {
this.status = status;
this.taskInstanceId = taskInstanceId;
}
public int getTaskInstanceId() {
return taskInstanceId;
}
public void setTaskInstanceId(int taskInstanceId) {
this.taskInstanceId = taskInstanceId;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
/**
* package response command
*
* @return command
*/
public Command convert2Command() {
Command command = new Command();
command.setType(CommandType.DB_TASK_ACK);
byte[] body = JSONUtils.toJsonByteArray(this);
command.setBody(body);
return command;
}
@Override
public String toString() {
return "DBTaskAckCommand{" +
"taskInstanceId=" + taskInstanceId +
", status=" + status +
'}';
}
}

71
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java

@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.io.Serializable;
/**
* db task final result response command
*/
public class DBTaskResponseCommand implements Serializable {
private int taskInstanceId;
private int status;
public DBTaskResponseCommand(int status,int taskInstanceId) {
this.status = status;
this.taskInstanceId = taskInstanceId;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public int getTaskInstanceId() {
return taskInstanceId;
}
public void setTaskInstanceId(int taskInstanceId) {
this.taskInstanceId = taskInstanceId;
}
/**
* package response command
* @return command
*/
public Command convert2Command(){
Command command = new Command();
command.setType(CommandType.DB_TASK_RESPONSE);
byte[] body = JSONUtils.toJsonByteArray(this);
command.setBody(body);
return command;
}
@Override
public String toString() {
return "DBTaskResponseCommand{" +
"taskInstanceId=" + taskInstanceId +
", status=" + status +
'}';
}
}

2
dolphinscheduler-server/pom.xml

@ -21,7 +21,7 @@
<parent> <parent>
<groupId>org.apache.dolphinscheduler</groupId> <groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId> <artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version> <version>1.3.4-SNAPSHOT</version>
</parent> </parent>
<artifactId>dolphinscheduler-server</artifactId> <artifactId>dolphinscheduler-server</artifactId>
<name>dolphinscheduler-server</name> <name>dolphinscheduler-server</name>

39
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.log;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.FileAppender;
import org.slf4j.Marker;
import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER;
/**
* Task log appender
*/
public class TaskLogAppender extends FileAppender<ILoggingEvent>{
@Override
protected void append(ILoggingEvent event) {
Marker marker = event.getMarker();
if (marker !=null) {
if (marker.equals(FINALIZE_SESSION_MARKER)) {
stop();
}
}
super.subAppend(event);
}
}

73
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java

@ -17,8 +17,9 @@
package org.apache.dolphinscheduler.server.master.dispatch.executor; package org.apache.dolphinscheduler.server.master.dispatch.executor;
import com.github.rholder.retry.RetryException; import org.apache.commons.collections.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.RetryerUtils;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.CommandType;
@ -31,14 +32,15 @@ import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor;
import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor;
import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor;
import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import java.util.*; import java.util.*;
import java.util.concurrent.ExecutionException;
/** /**
* netty executor manager * netty executor manager
@ -86,11 +88,17 @@ public class NettyExecutorManager extends AbstractExecutorManager<Boolean>{
*/ */
@Override @Override
public Boolean execute(ExecutionContext context) throws ExecuteException { public Boolean execute(ExecutionContext context) throws ExecuteException {
LinkedList<String> allNodes = new LinkedList<>();
Set<String> nodes = getAllNodes(context); /**
if (nodes != null) { * all nodes
allNodes.addAll(nodes); */
} Set<String> allNodes = getAllNodes(context);
/**
* fail nodes
*/
Set<String> failNodeSet = new HashSet<>();
/** /**
* build command accord executeContext * build command accord executeContext
*/ */
@ -99,26 +107,30 @@ public class NettyExecutorManager extends AbstractExecutorManager<Boolean>{
/** /**
* execute task host * execute task host
*/ */
String startHostAddress = context.getHost().getAddress(); Host host = context.getHost();
// remove start host address and add it to head
allNodes.remove(startHostAddress);
allNodes.addFirst(startHostAddress);
boolean success = false; boolean success = false;
for (String address : allNodes) { while (!success) {
try { try {
Host host = Host.of(address); doExecute(host,command);
doExecute(host, command);
success = true; success = true;
context.setHost(host); context.setHost(host);
break;
} catch (ExecuteException ex) { } catch (ExecuteException ex) {
logger.error("retry execute command : {} host : {}", command, address); logger.error(String.format("execute command : %s error", command), ex);
} try {
failNodeSet.add(host.getAddress());
Set<String> tmpAllIps = new HashSet<>(allNodes);
Collection<String> remained = CollectionUtils.subtract(tmpAllIps, failNodeSet);
if (remained != null && remained.size() > 0) {
host = Host.of(remained.iterator().next());
logger.error("retry execute command : {} host : {}", command, host);
} else {
throw new ExecuteException("fail after try all nodes");
} }
if (!success) { } catch (Throwable t) {
throw new ExecuteException("fail after try all nodes"); throw new ExecuteException("fail after try all nodes");
} }
}
}
return success; return success;
} }
@ -136,13 +148,24 @@ public class NettyExecutorManager extends AbstractExecutorManager<Boolean>{
* @throws ExecuteException if error throws ExecuteException * @throws ExecuteException if error throws ExecuteException
*/ */
private void doExecute(final Host host, final Command command) throws ExecuteException { private void doExecute(final Host host, final Command command) throws ExecuteException {
/**
* retry countdefault retry 3
*/
int retryCount = 3;
boolean success = false;
do {
try { try {
RetryerUtils.retryCall(() -> {
nettyRemotingClient.send(host, command); nettyRemotingClient.send(host, command);
return Boolean.TRUE; success = true;
}); } catch (Exception ex) {
} catch (ExecutionException | RetryException e) { logger.error(String.format("send command : %s to %s error", command, host), ex);
throw new ExecuteException(String.format("send command : %s to %s error", command, host), e); retryCount--;
ThreadUtils.sleep(100);
}
} while (retryCount >= 0 && !success);
if (!success) {
throw new ExecuteException(String.format("send command : %s to %s error", command, host));
} }
} }

28
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java

@ -17,14 +17,9 @@
package org.apache.dolphinscheduler.server.master.processor; package org.apache.dolphinscheduler.server.master.processor;
import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions; import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
@ -35,7 +30,6 @@ import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheMan
import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent;
import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -59,16 +53,9 @@ public class TaskAckProcessor implements NettyRequestProcessor {
*/ */
private final TaskInstanceCacheManager taskInstanceCacheManager; private final TaskInstanceCacheManager taskInstanceCacheManager;
public TaskAckProcessor(){
/**
* processService
*/
private ProcessService processService;
public TaskAckProcessor() {
this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class);
this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class);
this.processService = SpringApplicationContext.getBean(ProcessService.class);
} }
/** /**
@ -94,19 +81,10 @@ public class TaskAckProcessor implements NettyRequestProcessor {
workerAddress, workerAddress,
taskAckCommand.getExecutePath(), taskAckCommand.getExecutePath(),
taskAckCommand.getLogPath(), taskAckCommand.getLogPath(),
taskAckCommand.getTaskInstanceId()); taskAckCommand.getTaskInstanceId(),
channel);
taskResponseService.addResponse(taskResponseEvent); taskResponseService.addResponse(taskResponseEvent);
while (Stopper.isRunning()) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskAckCommand.getTaskInstanceId());
if (taskInstance != null && ackStatus.typeIsRunning()) {
break;
}
ThreadUtils.sleep(SLEEP_TIME_MILLIS);
}
} }
} }

30
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java

@ -17,14 +17,9 @@
package org.apache.dolphinscheduler.server.master.processor; package org.apache.dolphinscheduler.server.master.processor;
import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions; import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand;
@ -34,7 +29,6 @@ import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheMan
import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent;
import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -58,15 +52,9 @@ public class TaskResponseProcessor implements NettyRequestProcessor {
*/ */
private final TaskInstanceCacheManager taskInstanceCacheManager; private final TaskInstanceCacheManager taskInstanceCacheManager;
/** public TaskResponseProcessor(){
* processService
*/
private ProcessService processService;
public TaskResponseProcessor() {
this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class);
this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class);
this.processService = SpringApplicationContext.getBean(ProcessService.class);
} }
/** /**
@ -85,26 +73,16 @@ public class TaskResponseProcessor implements NettyRequestProcessor {
taskInstanceCacheManager.cacheTaskInstance(responseCommand); taskInstanceCacheManager.cacheTaskInstance(responseCommand);
ExecutionStatus responseStatus = ExecutionStatus.of(responseCommand.getStatus());
// TaskResponseEvent // TaskResponseEvent
TaskResponseEvent taskResponseEvent = TaskResponseEvent.newResult(ExecutionStatus.of(responseCommand.getStatus()), TaskResponseEvent taskResponseEvent = TaskResponseEvent.newResult(ExecutionStatus.of(responseCommand.getStatus()),
responseCommand.getEndTime(), responseCommand.getEndTime(),
responseCommand.getProcessId(), responseCommand.getProcessId(),
responseCommand.getAppIds(), responseCommand.getAppIds(),
responseCommand.getTaskInstanceId(), responseCommand.getTaskInstanceId(),
responseCommand.getVarPool()); responseCommand.getVarPool(),
channel);
taskResponseService.addResponse(taskResponseEvent); taskResponseService.addResponse(taskResponseEvent);
while (Stopper.isRunning()) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId());
if (taskInstance != null && responseStatus.typeIsFinished()) {
break;
}
ThreadUtils.sleep(SLEEP_TIME_MILLIS);
}
} }
} }

36
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java

@ -18,10 +18,14 @@
package org.apache.dolphinscheduler.server.master.processor.queue; package org.apache.dolphinscheduler.server.master.processor.queue;
import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonFormat;
import org.apache.dolphinscheduler.common.enums.Event;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import java.util.Date; import java.util.Date;
import io.netty.channel.Channel;
/** /**
* task event * task event
*/ */
@ -84,7 +88,18 @@ public class TaskResponseEvent {
*/ */
private String varPool; private String varPool;
public static TaskResponseEvent newAck(ExecutionStatus state, Date startTime, String workerAddress, String executePath, String logPath, int taskInstanceId) { /**
* channel
*/
private Channel channel;
public static TaskResponseEvent newAck(ExecutionStatus state,
Date startTime,
String workerAddress,
String executePath,
String logPath,
int taskInstanceId,
Channel channel) {
TaskResponseEvent event = new TaskResponseEvent(); TaskResponseEvent event = new TaskResponseEvent();
event.setState(state); event.setState(state);
event.setStartTime(startTime); event.setStartTime(startTime);
@ -93,10 +108,17 @@ public class TaskResponseEvent {
event.setLogPath(logPath); event.setLogPath(logPath);
event.setTaskInstanceId(taskInstanceId); event.setTaskInstanceId(taskInstanceId);
event.setEvent(Event.ACK); event.setEvent(Event.ACK);
event.setChannel(channel);
return event; return event;
} }
public static TaskResponseEvent newResult(ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstanceId, String varPool) { public static TaskResponseEvent newResult(ExecutionStatus state,
Date endTime,
int processId,
String appIds,
int taskInstanceId,
String varPool,
Channel channel) {
TaskResponseEvent event = new TaskResponseEvent(); TaskResponseEvent event = new TaskResponseEvent();
event.setState(state); event.setState(state);
event.setEndTime(endTime); event.setEndTime(endTime);
@ -105,6 +127,7 @@ public class TaskResponseEvent {
event.setTaskInstanceId(taskInstanceId); event.setTaskInstanceId(taskInstanceId);
event.setEvent(Event.RESULT); event.setEvent(Event.RESULT);
event.setVarPool(varPool); event.setVarPool(varPool);
event.setChannel(channel);
return event; return event;
} }
@ -196,8 +219,11 @@ public class TaskResponseEvent {
this.event = event; this.event = event;
} }
public enum Event{ public Channel getChannel() {
ACK, return channel;
RESULT; }
public void setChannel(Channel channel) {
this.channel = channel;
} }
} }

36
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java

@ -17,7 +17,13 @@
package org.apache.dolphinscheduler.server.master.processor.queue; package org.apache.dolphinscheduler.server.master.processor.queue;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.enums.Event;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.command.DBTaskAckCommand;
import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -123,24 +129,50 @@ public class TaskResponseService {
* @param taskResponseEvent taskResponseEvent * @param taskResponseEvent taskResponseEvent
*/ */
private void persist(TaskResponseEvent taskResponseEvent){ private void persist(TaskResponseEvent taskResponseEvent){
TaskResponseEvent.Event event = taskResponseEvent.getEvent(); Event event = taskResponseEvent.getEvent();
Channel channel = taskResponseEvent.getChannel();
switch (event){ switch (event){
case ACK: case ACK:
try {
TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId());
if (taskInstance != null){
processService.changeTaskState(taskResponseEvent.getState(), processService.changeTaskState(taskResponseEvent.getState(),
taskResponseEvent.getStartTime(), taskResponseEvent.getStartTime(),
taskResponseEvent.getWorkerAddress(), taskResponseEvent.getWorkerAddress(),
taskResponseEvent.getExecutePath(), taskResponseEvent.getExecutePath(),
taskResponseEvent.getLogPath(), taskResponseEvent.getLogPath(),
taskResponseEvent.getTaskInstanceId()); taskResponseEvent.getTaskInstanceId());
}
// if taskInstance is null (maybe deleted) . retry will be meaningless . so ack success
DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.SUCCESS.getCode(),taskResponseEvent.getTaskInstanceId());
channel.writeAndFlush(taskAckCommand.convert2Command());
}catch (Exception e){
logger.error("worker ack master error",e);
DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.FAILURE.getCode(),-1);
channel.writeAndFlush(taskAckCommand.convert2Command());
}
break; break;
case RESULT: case RESULT:
try {
TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId());
if (taskInstance != null){
processService.changeTaskState(taskResponseEvent.getState(), processService.changeTaskState(taskResponseEvent.getState(),
taskResponseEvent.getEndTime(), taskResponseEvent.getEndTime(),
taskResponseEvent.getProcessId(), taskResponseEvent.getProcessId(),
taskResponseEvent.getAppIds(), taskResponseEvent.getAppIds(),
taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getTaskInstanceId(),
taskResponseEvent.getVarPool()); taskResponseEvent.getVarPool()
);
}
// if taskInstance is null (maybe deleted) . retry will be meaningless . so response success
DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.SUCCESS.getCode(),taskResponseEvent.getTaskInstanceId());
channel.writeAndFlush(taskResponseCommand.convert2Command());
}catch (Exception e){
logger.error("worker response master error",e);
DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.FAILURE.getCode(),-1);
channel.writeAndFlush(taskResponseCommand.convert2Command());
}
break; break;
default: default:
throw new IllegalArgumentException("invalid event type : " + event); throw new IllegalArgumentException("invalid event type : " + event);

3
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java

@ -39,7 +39,6 @@ import java.util.concurrent.ConcurrentHashMap;
public class ConditionsTaskExecThread extends MasterBaseTaskExecThread { public class ConditionsTaskExecThread extends MasterBaseTaskExecThread {
/** /**
* dependent parameters * dependent parameters
*/ */
@ -134,7 +133,6 @@ public class ConditionsTaskExecThread extends MasterBaseTaskExecThread {
this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(), DependentParameters.class); this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(), DependentParameters.class);
} }
/** /**
* depend result for depend item * depend result for depend item
* @param item * @param item
@ -158,5 +156,4 @@ public class ConditionsTaskExecThread extends MasterBaseTaskExecThread {
return dependResult; return dependResult;
} }
} }

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java

@ -149,6 +149,10 @@ public class DependentTaskExecThread extends MasterBaseTaskExecThread {
logger.error("process instance not exists , master task exec thread exit"); logger.error("process instance not exists , master task exec thread exit");
return true; return true;
} }
if (checkTaskTimeout()) {
this.checkTimeoutFlag = !alertTimeout();
handleTimeoutFailed();
}
if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){
cancelTaskInstance(); cancelTaskInstance();
break; break;

101
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java

@ -14,14 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.master.runner; package org.apache.dolphinscheduler.server.master.runner;
import static org.apache.dolphinscheduler.common.Constants.UNDERLINE; import static org.apache.dolphinscheduler.common.Constants.UNDERLINE;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig;
@ -34,6 +37,11 @@ import java.util.concurrent.Callable;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import static org.apache.dolphinscheduler.common.Constants.*;
import java.util.Date;
import java.util.concurrent.Callable;
/** /**
* master task exec base class * master task exec base class
@ -81,9 +89,18 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
*/ */
private TaskPriorityQueue taskUpdateQueue; private TaskPriorityQueue taskUpdateQueue;
/**
* whether need check task time out.
*/
protected boolean checkTimeoutFlag = false;
/**
* task timeout parameters
*/
protected TaskTimeoutParameter taskTimeoutParameter;
/** /**
* constructor of MasterBaseTaskExecThread * constructor of MasterBaseTaskExecThread
*
* @param taskInstance task instance * @param taskInstance task instance
*/ */
public MasterBaseTaskExecThread(TaskInstance taskInstance) { public MasterBaseTaskExecThread(TaskInstance taskInstance) {
@ -93,6 +110,27 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
this.taskInstance = taskInstance; this.taskInstance = taskInstance;
this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class);
this.taskUpdateQueue = SpringApplicationContext.getBean(TaskPriorityQueueImpl.class); this.taskUpdateQueue = SpringApplicationContext.getBean(TaskPriorityQueueImpl.class);
initTaskParams();
}
/**
* init task ordinary parameters
*/
private void initTaskParams() {
initTimeoutParams();
}
/**
* init task timeout parameters
*/
private void initTimeoutParams() {
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
taskTimeoutParameter = taskNode.getTaskTimeoutParameter();
if(taskTimeoutParameter.getEnable()){
checkTimeoutFlag = true;
}
} }
/** /**
@ -113,7 +151,6 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
/** /**
* submit master base task exec thread * submit master base task exec thread
*
* @return TaskInstance * @return TaskInstance
*/ */
protected TaskInstance submit() { protected TaskInstance submit() {
@ -154,14 +191,13 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
return task; return task;
} }
/** /**
* dispatcht task * dispatcht task
*
* @param taskInstance taskInstance * @param taskInstance taskInstance
* @return whether submit task success * @return whether submit task success
*/ */
public Boolean dispatchTask(TaskInstance taskInstance) { public Boolean dispatchTask(TaskInstance taskInstance) {
try{ try{
if(taskInstance.isConditionsTask() if(taskInstance.isConditionsTask()
|| taskInstance.isDependTask() || taskInstance.isDependTask()
@ -198,7 +234,6 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
} }
} }
/** /**
* buildTaskPriorityInfo * buildTaskPriorityInfo
* *
@ -227,7 +262,6 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
/** /**
* submit wait complete * submit wait complete
*
* @return true * @return true
*/ */
protected Boolean submitWaitComplete() { protected Boolean submitWaitComplete() {
@ -236,7 +270,6 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
/** /**
* call * call
*
* @return boolean * @return boolean
* @throws Exception exception * @throws Exception exception
*/ */
@ -246,4 +279,56 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
return submitWaitComplete(); return submitWaitComplete();
} }
/**
* alert time out
* @return
*/
protected boolean alertTimeout(){
if( TaskTimeoutStrategy.FAILED == this.taskTimeoutParameter.getStrategy()){
return true;
}
logger.warn("process id:{} process name:{} task id: {},name:{} execution time out",
processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName());
// send warn mail
ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),
processDefine.getReceiversCc(), processInstance.getId(), processInstance.getName(),
taskInstance.getId(),taskInstance.getName());
return true;
}
/**
* handle time out for time out strategy warn&&failed
*/
protected void handleTimeoutFailed(){
if(TaskTimeoutStrategy.WARN == this.taskTimeoutParameter.getStrategy()){
return;
}
logger.info("process id:{} name:{} task id:{} name:{} cancel because of timeout.",
processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName());
this.cancel = true;
}
/**
* check task remain time valid
* @return
*/
protected boolean checkTaskTimeout(){
if (!checkTimeoutFlag || taskInstance.getStartTime() == null){
return false;
}
long remainTime = getRemainTime(taskTimeoutParameter.getInterval() * 60L);
return remainTime <= 0;
}
/**
* get remain time
*
* @return remain time
*/
protected long getRemainTime(long timeoutSeconds) {
Date startTime = taskInstance.getStartTime();
long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000;
return timeoutSeconds - usedTime;
}
} }

191
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java

@ -68,6 +68,7 @@ import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future; import java.util.concurrent.Future;
@ -348,11 +349,12 @@ public class MasterExecThread implements Runnable {
* @throws Exception exception * @throws Exception exception
*/ */
private void prepareProcess() throws Exception { private void prepareProcess() throws Exception {
// init task queue
initTaskQueue();
// gen process dag // gen process dag
buildFlowDag(); buildFlowDag();
// init task queue
initTaskQueue();
logger.info("prepare process :{} end", processInstance.getId()); logger.info("prepare process :{} end", processInstance.getId());
} }
@ -407,6 +409,9 @@ public class MasterExecThread implements Runnable {
if(task.isTaskComplete()){ if(task.isTaskComplete()){
completeTaskList.put(task.getName(), task); completeTaskList.put(task.getName(), task);
} }
if(task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)){
continue;
}
if(task.getState().typeIsFailure() && !task.taskCanRetry()){ if(task.getState().typeIsFailure() && !task.taskCanRetry()){
errorTaskList.put(task.getName(), task); errorTaskList.put(task.getName(), task);
} }
@ -498,6 +503,9 @@ public class MasterExecThread implements Runnable {
// task instance whether alert // task instance whether alert
taskInstance.setAlertFlag(Flag.NO); taskInstance.setAlertFlag(Flag.NO);
// task instance start time
taskInstance.setStartTime(null);
// task instance flag // task instance flag
taskInstance.setFlag(Flag.YES); taskInstance.setFlag(Flag.YES);
@ -532,132 +540,13 @@ public class MasterExecThread implements Runnable {
return taskInstance; return taskInstance;
} }
/**
* if all of the task dependence are skip, skip it too.
* @param taskNode
* @return
*/
private boolean isTaskNodeNeedSkip(TaskNode taskNode){
if(CollectionUtils.isEmpty(taskNode.getDepList())){
return false;
}
for(String depNode : taskNode.getDepList()){
if(!skipTaskNodeList.containsKey(depNode)){
return false;
}
}
return true;
}
/**
* set task node skip if dependence all skip
* @param taskNodesSkipList
*/
private void setTaskNodeSkip(List<String> taskNodesSkipList){
for(String skipNode : taskNodesSkipList){
skipTaskNodeList.putIfAbsent(skipNode, dag.getNode(skipNode));
Collection<String> postNodeList = DagHelper.getStartVertex(skipNode, dag, completeTaskList);
List<String> postSkipList = new ArrayList<>();
for(String post : postNodeList){
TaskNode postNode = dag.getNode(post);
if(isTaskNodeNeedSkip(postNode)){
postSkipList.add(post);
}
}
setTaskNodeSkip(postSkipList);
}
}
/**
* parse condition task find the branch process
* set skip flag for another one.
* @param nodeName
* @return
*/
private List<String> parseConditionTask(String nodeName){
List<String> conditionTaskList = new ArrayList<>();
TaskNode taskNode = dag.getNode(nodeName);
if(!taskNode.isConditionsTask()){
return conditionTaskList;
}
ConditionsParameters conditionsParameters =
JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class);
TaskInstance taskInstance = completeTaskList.get(nodeName);
if(taskInstance == null){
logger.error("task instance {} cannot find, please check it!", nodeName);
return conditionTaskList;
}
if(taskInstance.getState().typeIsSuccess()){
conditionTaskList = conditionsParameters.getSuccessNode();
setTaskNodeSkip(conditionsParameters.getFailedNode());
}else if(taskInstance.getState().typeIsFailure()){
conditionTaskList = conditionsParameters.getFailedNode();
setTaskNodeSkip(conditionsParameters.getSuccessNode());
}else{
conditionTaskList.add(nodeName);
}
return conditionTaskList;
}
/**
* parse post node list of previous node
* if condition node: return process according to the settings
* if post node completed, return post nodes of the completed node
* @param previousNodeName
* @return
*/
private List<String> parsePostNodeList(String previousNodeName){
List<String> postNodeList = new ArrayList<>();
TaskNode taskNode = dag.getNode(previousNodeName);
if(taskNode != null && taskNode.isConditionsTask()){
return parseConditionTask(previousNodeName);
}
Collection<String> postNodeCollection = DagHelper.getStartVertex(previousNodeName, dag, completeTaskList);
List<String> postSkipList = new ArrayList<>();
// delete success node, parse the past nodes
// if conditions node,
// 1. parse the branch process according the conditions setting
// 2. set skip flag on anther branch process
for(String postNode : postNodeCollection){
if(completeTaskList.containsKey(postNode)){
TaskInstance postTaskInstance = completeTaskList.get(postNode);
if(dag.getNode(postNode).isConditionsTask()){
List<String> conditionTaskNodeList = parseConditionTask(postNode);
for(String conditions : conditionTaskNodeList){
postNodeList.addAll(parsePostNodeList(conditions));
}
}else if(postTaskInstance.getState().typeIsSuccess()){
postNodeList.addAll(parsePostNodeList(postNode));
}else{
postNodeList.add(postNode);
}
}else if(isTaskNodeNeedSkip(dag.getNode(postNode))){
postSkipList.add(postNode);
setTaskNodeSkip(postSkipList);
postSkipList.clear();
}else{
postNodeList.add(postNode);
}
}
return postNodeList;
}
/** /**
* submit post node * submit post node
* @param parentNodeName parent node name * @param parentNodeName parent node name
*/ */
private Map<String,Object> propToValue = new ConcurrentHashMap<String, Object>(); private Map<String,Object> propToValue = new ConcurrentHashMap<String, Object>();
private void submitPostNode(String parentNodeName){ private void submitPostNode(String parentNodeName){
Set<String> submitTaskNodeList = DagHelper.parsePostNodes(parentNodeName, skipTaskNodeList, dag, completeTaskList);
List<String> submitTaskNodeList = parsePostNodeList(parentNodeName);
List<TaskInstance> taskInstances = new ArrayList<>(); List<TaskInstance> taskInstances = new ArrayList<>();
for(String taskNode : submitTaskNodeList){ for(String taskNode : submitTaskNodeList){
try { try {
@ -702,7 +591,6 @@ public class MasterExecThread implements Runnable {
if(startNodes.contains(taskName)){ if(startNodes.contains(taskName)){
return DependResult.SUCCESS; return DependResult.SUCCESS;
} }
TaskNode taskNode = dag.getNode(taskName); TaskNode taskNode = dag.getNode(taskName);
List<String> depNameList = taskNode.getDepList(); List<String> depNameList = taskNode.getDepList();
for(String depsNode : depNameList ){ for(String depsNode : depNameList ){
@ -716,23 +604,42 @@ public class MasterExecThread implements Runnable {
return DependResult.WAITING; return DependResult.WAITING;
} }
ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState();
// conditions task would not return failed.
if(depTaskState.typeIsFailure()
&& !DagHelper.haveConditionsAfterNode(depsNode, dag )
&& !dag.getNode(depsNode).isConditionsTask()){
return DependResult.FAILED;
}
if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){ if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){
return DependResult.WAITING; return DependResult.WAITING;
} }
// ignore task state if current task is condition
if(taskNode.isConditionsTask()){
continue;
}
if(!dependTaskSuccess(depsNode, taskName)){
return DependResult.FAILED;
}
} }
logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray()));
return DependResult.SUCCESS; return DependResult.SUCCESS;
} }
/**
* depend node is completed, but here need check the condition task branch is the next node
* @param dependNodeName
* @param nextNodeName
* @return
*/
private boolean dependTaskSuccess(String dependNodeName, String nextNodeName){
if(dag.getNode(dependNodeName).isConditionsTask()){
//condition task need check the branch to run
List<String> nextTaskList = DagHelper.parseConditionTask(dependNodeName, skipTaskNodeList, dag, completeTaskList);
if(!nextTaskList.contains(nextNodeName)){
return false;
}
}else {
ExecutionStatus depTaskState = completeTaskList.get(dependNodeName).getState();
if(depTaskState.typeIsFailure()){
return false;
}
}
return true;
}
/** /**
* query task instance by complete state * query task instance by complete state
@ -889,6 +796,24 @@ public class MasterExecThread implements Runnable {
return state; return state;
} }
/**
* whether standby task list have retry tasks
* @return
*/
private boolean retryTaskExists() {
boolean result = false;
for(String taskName : readyToSubmitTaskList.keySet()){
TaskInstance task = readyToSubmitTaskList.get(taskName);
if(task.getState().typeIsFailure()){
result = true;
break;
}
}
return result;
}
/** /**
* whether complement end * whether complement end
* @return Boolean whether is complement end * @return Boolean whether is complement end
@ -976,7 +901,7 @@ public class MasterExecThread implements Runnable {
// submit start node // submit start node
submitPostNode(null); submitPostNode(null);
boolean sendTimeWarning = false; boolean sendTimeWarning = false;
while(!processInstance.isProcessInstanceStop()){ while(!processInstance.isProcessInstanceStop() && Stopper.isRunning()){
// send warning email if process time out. // send warning email if process time out.
if(!sendTimeWarning && checkProcessTimeOut(processInstance) ){ if(!sendTimeWarning && checkProcessTimeOut(processInstance) ){

32
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java

@ -121,15 +121,6 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
taskInstance = processService.findTaskInstanceById(taskInstance.getId()); taskInstance = processService.findTaskInstanceById(taskInstance.getId());
logger.info("wait task: process id: {}, task id:{}, task name:{} complete", logger.info("wait task: process id: {}, task id:{}, task name:{} complete",
this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName()); this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName());
// task time out
boolean checkTimeout = false;
TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter();
if(taskTimeoutParameter.getEnable()){
TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy();
if(strategy == TaskTimeoutStrategy.WARN || strategy == TaskTimeoutStrategy.WARNFAILED){
checkTimeout = true;
}
}
while (Stopper.isRunning()){ while (Stopper.isRunning()){
try { try {
@ -150,18 +141,8 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
taskInstanceCacheManager.removeByTaskInstanceId(taskInstance.getId()); taskInstanceCacheManager.removeByTaskInstanceId(taskInstance.getId());
break; break;
} }
if(checkTimeout){ if (checkTaskTimeout()) {
long remainTime = DateUtils.getRemainTime(taskInstance.getStartTime(), taskTimeoutParameter.getInterval() * 60L); this.checkTimeoutFlag = !alertTimeout();
if (remainTime < 0) {
logger.warn("task id: {} execution time out",taskInstance.getId());
// process define
ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
// send warn mail
alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),
processDefine.getReceiversCc(), processInstance.getId(), processInstance.getName(),
taskInstance.getId(),taskInstance.getName());
checkTimeout = false;
}
} }
// updateProcessInstance task instance // updateProcessInstance task instance
taskInstance = processService.findTaskInstanceById(taskInstance.getId()); taskInstance = processService.findTaskInstanceById(taskInstance.getId());
@ -248,13 +229,4 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
return true; return true;
} }
/**
* get task timeout parameter
* @return TaskTimeoutParameter
*/
private TaskTimeoutParameter getTaskTimeoutParameter(){
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
return taskNode.getTaskTimeoutParameter();
}
} }

7
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java

@ -130,19 +130,20 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
while (Stopper.isRunning()) { while (Stopper.isRunning()) {
// waiting for subflow process instance establishment // waiting for subflow process instance establishment
if (subProcessInstance == null) { if (subProcessInstance == null) {
Thread.sleep(Constants.SLEEP_TIME_MILLIS); Thread.sleep(Constants.SLEEP_TIME_MILLIS);
if(!setTaskInstanceState()){ if(!setTaskInstanceState()){
continue; continue;
} }
} }
subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId()); subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId());
if (checkTaskTimeout()) {
this.checkTimeoutFlag = !alertTimeout();
handleTimeoutFailed();
}
updateParentProcessState(); updateParentProcessState();
if (subProcessInstance.getState().typeIsFinished()){ if (subProcessInstance.getState().typeIsFinished()){
break; break;
} }
if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){ if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){
// parent process "ready to pause" , child process "pause" // parent process "ready to pause" , child process "pause"
pauseSubProcess(); pauseSubProcess();

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.utils;
import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ShowType; import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
@ -197,6 +198,9 @@ public class AlertManager {
public void sendAlertProcessInstance(ProcessInstance processInstance, public void sendAlertProcessInstance(ProcessInstance processInstance,
List<TaskInstance> taskInstances) { List<TaskInstance> taskInstances) {
if(Flag.YES == processInstance.getIsSubProcess()){
return;
}
boolean sendWarnning = false; boolean sendWarnning = false;
WarningType warningType = processInstance.getWarningType(); WarningType warningType = processInstance.getWarningType();
switch (warningType) { switch (warningType) {

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java

@ -104,9 +104,7 @@ public class DependentExecute {
ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(),
dateInterval); dateInterval);
if(processInstance == null){ if(processInstance == null){
logger.error("cannot find the right process instance: definition id:{}, start:{}, end:{}", return DependResult.WAITING;
dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() );
return DependResult.FAILED;
} }
// need to check workflow for updates, so get all task and check the task state // need to check workflow for updates, so get all task and check the task state
if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){

6
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java

@ -70,14 +70,16 @@ public class UDFUtils {
*/ */
private static void buildJarSql(List<String> sqls, Map<UdfFunc,String> udfFuncTenantCodeMap) { private static void buildJarSql(List<String> sqls, Map<UdfFunc,String> udfFuncTenantCodeMap) {
String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS); String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS);
String resourceFullName;
Set<Map.Entry<UdfFunc,String>> entries = udfFuncTenantCodeMap.entrySet(); Set<Map.Entry<UdfFunc,String>> entries = udfFuncTenantCodeMap.entrySet();
for (Map.Entry<UdfFunc,String> entry:entries){ for (Map.Entry<UdfFunc,String> entry:entries){
String uploadPath = HadoopUtils.getHdfsUdfDir(entry.getValue()); String uploadPath = HadoopUtils.getHdfsUdfDir(entry.getValue());
if (!uploadPath.startsWith("hdfs:")) { if (!uploadPath.startsWith("hdfs:")) {
uploadPath = defaultFS + uploadPath; uploadPath = defaultFS + uploadPath;
} }
sqls.add(String.format("add jar %s%s", uploadPath, entry.getKey().getResourceName())); resourceFullName = entry.getKey().getResourceName();
resourceFullName = resourceFullName.startsWith("/") ? resourceFullName : String.format("/%s",resourceFullName);
sqls.add(String.format("add jar %s%s", uploadPath, resourceFullName));
} }
} }

11
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java

@ -22,9 +22,12 @@ import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.processor.DBTaskAckProcessor;
import org.apache.dolphinscheduler.server.worker.processor.DBTaskResponseProcessor;
import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor;
import org.apache.dolphinscheduler.server.worker.processor.TaskKillProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskKillProcessor;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.worker.runner.RetryReportTaskStatusThread;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -70,6 +73,9 @@ public class WorkerServer {
@Autowired @Autowired
private SpringApplicationContext springApplicationContext; private SpringApplicationContext springApplicationContext;
@Autowired
private RetryReportTaskStatusThread retryReportTaskStatusThread;
/** /**
* worker server startup * worker server startup
* *
@ -95,11 +101,16 @@ public class WorkerServer {
this.nettyRemotingServer = new NettyRemotingServer(serverConfig); this.nettyRemotingServer = new NettyRemotingServer(serverConfig);
this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, new TaskExecuteProcessor()); this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, new TaskExecuteProcessor());
this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_REQUEST, new TaskKillProcessor()); this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_REQUEST, new TaskKillProcessor());
this.nettyRemotingServer.registerProcessor(CommandType.DB_TASK_ACK, new DBTaskAckProcessor());
this.nettyRemotingServer.registerProcessor(CommandType.DB_TASK_RESPONSE, new DBTaskResponseProcessor());
this.nettyRemotingServer.start(); this.nettyRemotingServer.start();
// worker registry // worker registry
this.workerRegistry.registry(); this.workerRegistry.registry();
// retry report task status
this.retryReportTaskStatusThread.start();
/** /**
* register hooks, which are called before the process exits * register hooks, which are called before the process exits
*/ */

94
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java vendored

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.cache;
import org.apache.dolphinscheduler.common.enums.Event;
import org.apache.dolphinscheduler.remote.command.Command;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Responce Cache : cache worker send master result
*/
public class ResponceCache {
private static final ResponceCache instance = new ResponceCache();
private ResponceCache(){}
public static ResponceCache get(){
return instance;
}
private Map<Integer,Command> ackCache = new ConcurrentHashMap<>();
private Map<Integer,Command> responseCache = new ConcurrentHashMap<>();
/**
* cache response
* @param taskInstanceId taskInstanceId
* @param command command
* @param event event ACK/RESULT
*/
public void cache(Integer taskInstanceId, Command command, Event event){
switch (event){
case ACK:
ackCache.put(taskInstanceId,command);
break;
case RESULT:
responseCache.put(taskInstanceId,command);
break;
default:
throw new IllegalArgumentException("invalid event type : " + event);
}
}
/**
* remove ack cache
* @param taskInstanceId taskInstanceId
*/
public void removeAckCache(Integer taskInstanceId){
ackCache.remove(taskInstanceId);
}
/**
* remove reponse cache
* @param taskInstanceId taskInstanceId
*/
public void removeResponseCache(Integer taskInstanceId){
responseCache.remove(taskInstanceId);
}
/**
* getAckCache
* @return getAckCache
*/
public Map<Integer,Command> getAckCache(){
return ackCache;
}
/**
* getResponseCache
* @return getResponseCache
*/
public Map<Integer,Command> getResponseCache(){
return responseCache;
}
}

56
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java

@ -0,0 +1,56 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.remote.command.*;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.server.worker.cache.ResponceCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* db task ack processor
*/
public class DBTaskAckProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(DBTaskAckProcessor.class);
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.DB_TASK_ACK == command.getType(),
String.format("invalid command type : %s", command.getType()));
DBTaskAckCommand taskAckCommand = JSONUtils.parseObject(
command.getBody(), DBTaskAckCommand.class);
if (taskAckCommand == null){
return;
}
if (taskAckCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){
ResponceCache.get().removeAckCache(taskAckCommand.getTaskInstanceId());
}
}
}

58
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.server.worker.cache.ResponceCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* db task response processor
*/
public class DBTaskResponseProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(DBTaskResponseProcessor.class);
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.DB_TASK_RESPONSE == command.getType(),
String.format("invalid command type : %s", command.getType()));
DBTaskResponseCommand taskResponseCommand = JSONUtils.parseObject(
command.getBody(), DBTaskResponseCommand.class);
if (taskResponseCommand == null){
return;
}
if (taskResponseCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){
ResponceCache.get().removeResponseCache(taskResponseCommand.getTaskInstanceId());
}
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save