Browse Source

merge from 1.3.3-release

pull/3/MERGE
baoliang 4 years ago
parent
commit
381d23ea4f
  1. 128
      .gitignore
  2. BIN
      .mvn/wrapper/maven-wrapper.jar
  3. 467
      ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml
  4. 6
      ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json
  5. 7
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml
  6. 16
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml
  7. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml
  8. 206
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml
  9. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml
  10. 88
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml
  11. 23
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml
  12. 67
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml
  13. 76
      ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml
  14. 4
      ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml
  15. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py
  16. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py
  17. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py
  18. 46
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py
  19. 4
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py
  20. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py
  21. 3
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py
  22. 85
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py
  23. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py
  24. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py
  25. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2
  26. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2
  27. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2
  28. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2
  29. 13
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2
  30. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2
  31. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2
  32. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2
  33. 20
      ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2
  34. 0
      ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json
  35. 84
      ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json
  36. 13
      docker/kubernetes/dolphinscheduler/requirements.yaml
  37. 19
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java
  38. 24
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java
  39. 1
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java
  40. 105
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  41. 19
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java
  42. 8
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java
  43. 23
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java
  44. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java
  45. 1
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java
  46. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java
  47. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java
  48. 14
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java
  49. 67
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java
  50. 13
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java
  51. 230
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java
  52. 72
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java
  53. 71
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java
  54. 39
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java
  55. 162
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java
  56. 7
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java
  57. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java
  58. 94
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java
  59. 56
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java
  60. 58
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java
  61. 94
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java
  62. 76
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java
  63. 16
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java
  64. 25
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java
  65. 7
      dolphinscheduler-ui/build/webpack.config.prod.js
  66. 23
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue
  67. 19
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue
  68. 19
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue
  69. 1
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue
  70. 2
      dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue
  71. 10
      dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue
  72. 10
      dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue
  73. 2
      dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue
  74. 2
      dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/list.vue
  75. 6
      dolphinscheduler-ui/src/js/conf/home/store/projects/mutations.js
  76. 2
      dolphinscheduler-ui/src/js/conf/home/store/projects/state.js
  77. 336
      dolphinscheduler-ui/src/js/module/components/fileUpdate/fileChildReUpdate.vue
  78. 335
      dolphinscheduler-ui/src/js/module/components/fileUpdate/fileReUpload.vue
  79. 39
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteProjectTest.java
  80. 38
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteTenantTest.java
  81. 42
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteUserTest.java
  82. 43
      e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteWorkflowTest.java
  83. 19
      sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql
  84. 17
      sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql

128
.gitignore vendored

@ -4,25 +4,27 @@
.zip .zip
.gz .gz
.DS_Store .DS_Store
.idea
.idea/ .idea/
dist/ .idea/*
all-dependencies.txt .target
self-modules.txt .target/
third-party-dependencies.txt **/**/target/**
**/target/ target/*
*/target
*/target/*
.settings .settings
.nbproject .nbproject
.classpath .classpath
.project .project
**/*.iml *.iml
*.ipr *.ipr
*.iws *.iws
*.tgz *.tgz
.*.swp .*.swp
.factorypath
.vim .vim
.tmp .tmp
**/node_modules node_modules
npm-debug.log npm-debug.log
.vscode .vscode
logs/* logs/*
@ -39,10 +41,110 @@ dolphinscheduler-alert/logs/
dolphinscheduler-alert/src/main/resources/alert.properties_bak dolphinscheduler-alert/src/main/resources/alert.properties_bak
dolphinscheduler-alert/src/main/resources/logback.xml dolphinscheduler-alert/src/main/resources/logback.xml
dolphinscheduler-server/src/main/resources/logback.xml dolphinscheduler-server/src/main/resources/logback.xml
dolphinscheduler-ui/dist/ dolphinscheduler-ui/dist
dolphinscheduler-ui/node dolphinscheduler-ui/node
dolphinscheduler-dao/src/main/resources/dao/data_source.properties dolphinscheduler-ui/dist/css/common.16ac5d9.css
dolphinscheduler-ui/dist/css/home/index.b444b91.css
dolphinscheduler-ui/dist/css/login/index.5866c64.css
dolphinscheduler-ui/dist/js/0.ac94e5d.js
dolphinscheduler-ui/dist/js/0.ac94e5d.js.map
dolphinscheduler-ui/dist/js/1.0b043a3.js
dolphinscheduler-ui/dist/js/1.0b043a3.js.map
dolphinscheduler-ui/dist/js/10.1bce3dc.js
dolphinscheduler-ui/dist/js/10.1bce3dc.js.map
dolphinscheduler-ui/dist/js/11.79f04d8.js
dolphinscheduler-ui/dist/js/11.79f04d8.js.map
dolphinscheduler-ui/dist/js/12.420daa5.js
dolphinscheduler-ui/dist/js/12.420daa5.js.map
dolphinscheduler-ui/dist/js/13.e5bae1c.js
dolphinscheduler-ui/dist/js/13.e5bae1c.js.map
dolphinscheduler-ui/dist/js/14.f2a0dca.js
dolphinscheduler-ui/dist/js/14.f2a0dca.js.map
dolphinscheduler-ui/dist/js/15.45373e8.js
dolphinscheduler-ui/dist/js/15.45373e8.js.map
dolphinscheduler-ui/dist/js/16.fecb0fc.js
dolphinscheduler-ui/dist/js/16.fecb0fc.js.map
dolphinscheduler-ui/dist/js/17.84be279.js
dolphinscheduler-ui/dist/js/17.84be279.js.map
dolphinscheduler-ui/dist/js/18.307ea70.js
dolphinscheduler-ui/dist/js/18.307ea70.js.map
dolphinscheduler-ui/dist/js/19.144db9c.js
dolphinscheduler-ui/dist/js/19.144db9c.js.map
dolphinscheduler-ui/dist/js/2.8b4ef29.js
dolphinscheduler-ui/dist/js/2.8b4ef29.js.map
dolphinscheduler-ui/dist/js/20.4c527e9.js
dolphinscheduler-ui/dist/js/20.4c527e9.js.map
dolphinscheduler-ui/dist/js/21.831b2a2.js
dolphinscheduler-ui/dist/js/21.831b2a2.js.map
dolphinscheduler-ui/dist/js/22.2b4bb2a.js
dolphinscheduler-ui/dist/js/22.2b4bb2a.js.map
dolphinscheduler-ui/dist/js/23.81467ef.js
dolphinscheduler-ui/dist/js/23.81467ef.js.map
dolphinscheduler-ui/dist/js/24.54a00e4.js
dolphinscheduler-ui/dist/js/24.54a00e4.js.map
dolphinscheduler-ui/dist/js/25.8d7bd36.js
dolphinscheduler-ui/dist/js/25.8d7bd36.js.map
dolphinscheduler-ui/dist/js/26.2ec5e78.js
dolphinscheduler-ui/dist/js/26.2ec5e78.js.map
dolphinscheduler-ui/dist/js/27.3ab48c2.js
dolphinscheduler-ui/dist/js/27.3ab48c2.js.map
dolphinscheduler-ui/dist/js/28.363088a.js
dolphinscheduler-ui/dist/js/28.363088a.js.map
dolphinscheduler-ui/dist/js/29.6c5853a.js
dolphinscheduler-ui/dist/js/29.6c5853a.js.map
dolphinscheduler-ui/dist/js/3.a0edb5b.js
dolphinscheduler-ui/dist/js/3.a0edb5b.js.map
dolphinscheduler-ui/dist/js/30.940fdd3.js
dolphinscheduler-ui/dist/js/30.940fdd3.js.map
dolphinscheduler-ui/dist/js/31.168a460.js
dolphinscheduler-ui/dist/js/31.168a460.js.map
dolphinscheduler-ui/dist/js/32.8df6594.js
dolphinscheduler-ui/dist/js/32.8df6594.js.map
dolphinscheduler-ui/dist/js/33.4480bbe.js
dolphinscheduler-ui/dist/js/33.4480bbe.js.map
dolphinscheduler-ui/dist/js/34.b407fe1.js
dolphinscheduler-ui/dist/js/34.b407fe1.js.map
dolphinscheduler-ui/dist/js/35.f340b0a.js
dolphinscheduler-ui/dist/js/35.f340b0a.js.map
dolphinscheduler-ui/dist/js/36.8880c2d.js
dolphinscheduler-ui/dist/js/36.8880c2d.js.map
dolphinscheduler-ui/dist/js/37.ea2a25d.js
dolphinscheduler-ui/dist/js/37.ea2a25d.js.map
dolphinscheduler-ui/dist/js/38.98a59ee.js
dolphinscheduler-ui/dist/js/38.98a59ee.js.map
dolphinscheduler-ui/dist/js/39.a5e958a.js
dolphinscheduler-ui/dist/js/39.a5e958a.js.map
dolphinscheduler-ui/dist/js/4.4ca44db.js
dolphinscheduler-ui/dist/js/4.4ca44db.js.map
dolphinscheduler-ui/dist/js/40.e187b1e.js
dolphinscheduler-ui/dist/js/40.e187b1e.js.map
dolphinscheduler-ui/dist/js/41.0e89182.js
dolphinscheduler-ui/dist/js/41.0e89182.js.map
dolphinscheduler-ui/dist/js/42.341047c.js
dolphinscheduler-ui/dist/js/42.341047c.js.map
dolphinscheduler-ui/dist/js/43.27b8228.js
dolphinscheduler-ui/dist/js/43.27b8228.js.map
dolphinscheduler-ui/dist/js/44.e8869bc.js
dolphinscheduler-ui/dist/js/44.e8869bc.js.map
dolphinscheduler-ui/dist/js/45.8d54901.js
dolphinscheduler-ui/dist/js/45.8d54901.js.map
dolphinscheduler-ui/dist/js/5.e1ed7f3.js
dolphinscheduler-ui/dist/js/5.e1ed7f3.js.map
dolphinscheduler-ui/dist/js/6.241ba07.js
dolphinscheduler-ui/dist/js/6.241ba07.js.map
dolphinscheduler-ui/dist/js/7.ab2e297.js
dolphinscheduler-ui/dist/js/7.ab2e297.js.map
dolphinscheduler-ui/dist/js/8.83ff814.js
dolphinscheduler-ui/dist/js/8.83ff814.js.map
dolphinscheduler-ui/dist/js/9.39cb29f.js
dolphinscheduler-ui/dist/js/9.39cb29f.js.map
dolphinscheduler-ui/dist/js/common.733e342.js
dolphinscheduler-ui/dist/js/common.733e342.js.map
dolphinscheduler-ui/dist/js/home/index.78a5d12.js
dolphinscheduler-ui/dist/js/home/index.78a5d12.js.map
dolphinscheduler-ui/dist/js/login/index.291b8e3.js
dolphinscheduler-ui/dist/js/login/index.291b8e3.js.map
dolphinscheduler-ui/dist/lib/external/
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue
/dolphinscheduler-dao/src/main/resources/dao/data_source.properties
.mvn/wrapper/*.jar
!/zookeeper_data/

BIN
.mvn/wrapper/maven-wrapper.jar vendored

Binary file not shown.

467
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml

@ -1,467 +0,0 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>spring.datasource.initialSize</name>
<value>5</value>
<description>
Init connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minIdle</name>
<value>5</value>
<description>
Min connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxActive</name>
<value>50</value>
<description>
Max connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxWait</name>
<value>60000</value>
<description>
Max wait time for get a connection in milliseconds.
If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases.
If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenEvictionRunsMillis</name>
<value>60000</value>
<description>
Milliseconds for check to close free connections
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenConnectErrorMillis</name>
<value>60000</value>
<description>
The Destroy thread detects the connection interval and closes the physical connection in milliseconds
if the connection idle time is greater than or equal to minEvictableIdleTimeMillis.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minEvictableIdleTimeMillis</name>
<value>300000</value>
<description>
The longest time a connection remains idle without being evicted, in milliseconds
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQuery</name>
<value>SELECT 1</value>
<description>
The SQL used to check whether the connection is valid requires a query statement.
If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work.
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQueryTimeout</name>
<value>3</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
Check whether the connection is valid for timeout, in seconds
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testWhileIdle</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
When applying for a connection,
if it is detected that the connection is idle longer than time Between Eviction Runs Millis,
validation Query is performed to check whether the connection is valid
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnBorrow</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when applying for a connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnReturn</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when the connection is returned
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.defaultAutoCommit</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.keepAlive</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.poolPreparedStatements</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Open PSCache, specify count PSCache for every connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxPoolPreparedStatementPerConnectionSize</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.spring.datasource.filters</name>
<value>stat,wall,log4j</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.connectionProperties</name>
<value>druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.mapper-locations</name>
<value>classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.typeEnumsPackage</name>
<value>org.apache.dolphinscheduler.*.enums</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.typeAliasesPackage</name>
<value>org.apache.dolphinscheduler.dao.entity</value>
<description>
Entity scan, where multiple packages are separated by a comma or semicolon
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.id-type</name>
<value>AUTO</value>
<value-attributes>
<type>value-list</type>
<entries>
<entry>
<value>AUTO</value>
<label>AUTO</label>
</entry>
<entry>
<value>INPUT</value>
<label>INPUT</label>
</entry>
<entry>
<value>ID_WORKER</value>
<label>ID_WORKER</label>
</entry>
<entry>
<value>UUID</value>
<label>UUID</label>
</entry>
</entries>
<selection-cardinality>1</selection-cardinality>
</value-attributes>
<description>
Primary key type AUTO:" database ID AUTO ",
INPUT:" user INPUT ID",
ID_WORKER:" global unique ID (numeric type unique ID)",
UUID:" global unique ID UUID";
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.field-strategy</name>
<value>NOT_NULL</value>
<value-attributes>
<type>value-list</type>
<entries>
<entry>
<value>IGNORED</value>
<label>IGNORED</label>
</entry>
<entry>
<value>NOT_NULL</value>
<label>NOT_NULL</label>
</entry>
<entry>
<value>NOT_EMPTY</value>
<label>NOT_EMPTY</label>
</entry>
</entries>
<selection-cardinality>1</selection-cardinality>
</value-attributes>
<description>
Field policy IGNORED:" ignore judgment ",
NOT_NULL:" not NULL judgment "),
NOT_EMPTY:" not NULL judgment"
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.column-underline</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.logic-delete-value</name>
<value>1</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.logic-not-delete-value</name>
<value>0</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.global-config.db-config.banner</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.map-underscore-to-camel-case</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.cache-enabled</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.call-setters-on-nulls</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>mybatis-plus.configuration.jdbc-type-for-null</name>
<value>null</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.exec.task.num</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.retryTimes</name>
<value>5</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.interval</name>
<value>1000</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.reserved.memory</name>
<value>0.1</value>
<value-attributes>
<type>float</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.fetch.task.num</name>
<value>3</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.reserved.memory</name>
<value>0.1</value>
<value-attributes>
<type>float</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

6
ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json → ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json

@ -65,7 +65,7 @@
"enabled": true, "enabled": true,
"source": { "source": {
"type": "SCRIPT", "type": "SCRIPT",
"path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py",
"parameters": [ "parameters": [
{ {
@ -98,7 +98,7 @@
"enabled": true, "enabled": true,
"source": { "source": {
"type": "SCRIPT", "type": "SCRIPT",
"path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py",
"parameters": [ "parameters": [
{ {
@ -131,7 +131,7 @@
"enabled": true, "enabled": true,
"source": { "source": {
"type": "SCRIPT", "type": "SCRIPT",
"path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py",
"parameters": [ "parameters": [
{ {

7
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml

@ -90,13 +90,6 @@
<on-ambari-upgrade add="true"/> <on-ambari-upgrade add="true"/>
</property> </property>
<property>
<name>xls.file.path</name>
<value>/tmp/xls</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property> <property>
<name>enterprise.wechat.enable</name> <name>enterprise.wechat.enable</name>
<value>false</value> <value>false</value>

16
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml

@ -34,6 +34,12 @@
<description> <description>
</description> </description>
</property> </property>
<property>
<name>server.servlet.context-path</name>
<value>/dolphinscheduler/</value>
<description>
</description>
</property>
<property> <property>
<name>spring.servlet.multipart.max-file-size</name> <name>spring.servlet.multipart.max-file-size</name>
<value>1024</value> <value>1024</value>
@ -68,4 +74,14 @@
<value>UTF-8</value> <value>UTF-8</value>
<description></description> <description></description>
</property> </property>
<property>
<name>spring.messages.basename</name>
<value>i18n/messages</value>
<description></description>
</property>
<property>
<name>security.authentication.type</name>
<value>PASSWORD</value>
<description></description>
</property>
</configuration> </configuration>

0
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml

206
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml

@ -0,0 +1,206 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>spring.datasource.initialSize</name>
<value>5</value>
<description>
Init connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minIdle</name>
<value>5</value>
<description>
Min connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxActive</name>
<value>50</value>
<description>
Max connection number
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxWait</name>
<value>60000</value>
<description>
Max wait time for get a connection in milliseconds.
If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases.
If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenEvictionRunsMillis</name>
<value>60000</value>
<description>
Milliseconds for check to close free connections
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.timeBetweenConnectErrorMillis</name>
<value>60000</value>
<description>
The Destroy thread detects the connection interval and closes the physical connection in milliseconds
if the connection idle time is greater than or equal to minEvictableIdleTimeMillis.
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.minEvictableIdleTimeMillis</name>
<value>300000</value>
<description>
The longest time a connection remains idle without being evicted, in milliseconds
</description>
<value-attributes>
<type>int</type>
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQuery</name>
<value>SELECT 1</value>
<description>
The SQL used to check whether the connection is valid requires a query statement.
If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work.
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.validationQueryTimeout</name>
<value>3</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
Check whether the connection is valid for timeout, in seconds
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testWhileIdle</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
When applying for a connection,
if it is detected that the connection is idle longer than time Between Eviction Runs Millis,
validation Query is performed to check whether the connection is valid
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnBorrow</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when applying for a connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.testOnReturn</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Execute validation to check if the connection is valid when the connection is returned
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.defaultAutoCommit</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.keepAlive</name>
<value>false</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.poolPreparedStatements</name>
<value>true</value>
<value-attributes>
<type>boolean</type>
</value-attributes>
<description>
Open PSCache, specify count PSCache for every connection
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.maxPoolPreparedStatementPerConnectionSize</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.spring.datasource.filters</name>
<value>stat,wall,log4j</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>spring.datasource.connectionProperties</name>
<value>druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000</value>
<description></description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

0
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml

88
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml

@ -0,0 +1,88 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>master.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master execute thread num</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.exec.task.num</name>
<value>20</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master execute task number in parallel</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master heartbeat interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.retryTimes</name>
<value>5</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master commit task retry times</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.task.commit.interval</name>
<value>1000</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master commit task interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>only less than cpu avg load, master server can work. default value : the number of cpu cores * 2</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.reserved.memory</name>
<value>0.3</value>
<description>only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G.</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>master.listen.port</name>
<value>5678</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>master listen port</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

23
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml

@ -106,26 +106,21 @@
<description></description> <description></description>
</property> </property>
<property> <property>
<name>org.quartz.jobStore.dataSource</name> <name>org.quartz.jobStore.acquireTriggersWithinLock</name>
<value>myDs</value> <value>true</value>
<description></description> <value-attributes>
</property> <type>boolean</type>
<property> </value-attributes>
<name>org.quartz.dataSource.myDs.connectionProvider.class</name>
<value>org.apache.dolphinscheduler.server.quartz.DruidConnectionProvider</value>
<description></description> <description></description>
</property> </property>
<property> <property>
<name>org.quartz.dataSource.myDs.maxConnections</name> <name>org.quartz.jobStore.dataSource</name>
<value>10</value> <value>myDs</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description></description> <description></description>
</property> </property>
<property> <property>
<name>org.quartz.dataSource.myDs.validationQuery</name> <name>org.quartz.dataSource.myDs.connectionProvider.class</name>
<value>select 1</value> <value>org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider</value>
<description></description> <description></description>
</property> </property>
</configuration> </configuration>

67
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml

@ -0,0 +1,67 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>worker.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker execute thread num</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker heartbeat interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.reserved.memory</name>
<value>0.3</value>
<description>only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G.</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.listen.port</name>
<value>1234</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker listen port</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.groups</name>
<value>default</value>
<description>default worker group</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

76
ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml

@ -0,0 +1,76 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>zookeeper.dolphinscheduler.root</name>
<value>/dolphinscheduler</value>
<description>
dolphinscheduler root directory
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.session.timeout</name>
<value>300</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.connection.timeout</name>
<value>300</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.retry.base.sleep</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.retry.max.sleep</name>
<value>30000</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>zookeeper.retry.maxtime</name>
<value>5</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>
</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration>

4
ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml → ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml

@ -22,7 +22,7 @@
<name>DOLPHIN</name> <name>DOLPHIN</name>
<displayName>Dolphin Scheduler</displayName> <displayName>Dolphin Scheduler</displayName>
<comment>分布式易扩展的可视化DAG工作流任务调度系统</comment> <comment>分布式易扩展的可视化DAG工作流任务调度系统</comment>
<version>1.2.1</version> <version>1.3.3</version>
<components> <components>
<component> <component>
<name>DOLPHIN_MASTER</name> <name>DOLPHIN_MASTER</name>
@ -103,7 +103,7 @@
<osFamily>any</osFamily> <osFamily>any</osFamily>
<packages> <packages>
<package> <package>
<name>apache-dolphinscheduler-incubating-1.2.1*</name> <name>apache-dolphinscheduler-incubating*</name>
</package> </package>
</packages> </packages>
</osSpecific> </osSpecific>

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py

@ -26,7 +26,8 @@ class DolphinAlertService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py

@ -26,7 +26,8 @@ class DolphinApiService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

46
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py

@ -42,31 +42,12 @@ def dolphin_env():
create_parents=True create_parents=True
) )
Directory(params.dolphin_alert_map['xls.file.path'],
mode=0777,
owner=params.dolphin_user,
group=params.dolphin_group,
create_parents=True
)
Directory(params.dolphin_common_map['data.basedir.path'], Directory(params.dolphin_common_map['data.basedir.path'],
mode=0777, mode=0777,
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group, group=params.dolphin_group,
create_parents=True create_parents=True
) )
Directory(params.dolphin_common_map['data.download.basedir.path'],
mode=0777,
owner=params.dolphin_user,
group=params.dolphin_group,
create_parents=True
)
Directory(params.dolphin_common_map['process.exec.basepath'],
mode=0777,
owner=params.dolphin_user,
group=params.dolphin_group,
create_parents=True
)
File(format(params.dolphin_env_path), File(format(params.dolphin_env_path),
@ -79,7 +60,21 @@ def dolphin_env():
File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"),
mode=0755, mode=0755,
content=Template("dolphin-daemon.j2"), content=Template("dolphin-daemon.sh.j2"),
owner=params.dolphin_user,
group=params.dolphin_group
)
File(format(params.dolphin_conf_dir + "/master.properties"),
mode=0755,
content=Template("master.properties.j2"),
owner=params.dolphin_user,
group=params.dolphin_group
)
File(format(params.dolphin_conf_dir + "/worker.properties"),
mode=0755,
content=Template("worker.properties.j2"),
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group group=params.dolphin_group
) )
@ -92,9 +87,9 @@ def dolphin_env():
group=params.dolphin_group group=params.dolphin_group
) )
File(format(params.dolphin_conf_dir + "/application.properties"), File(format(params.dolphin_conf_dir + "/datasource.properties"),
mode=0755, mode=0755,
content=Template("application.properties.j2"), content=Template("datasource.properties.j2"),
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group group=params.dolphin_group
) )
@ -119,3 +114,10 @@ def dolphin_env():
owner=params.dolphin_user, owner=params.dolphin_user,
group=params.dolphin_group group=params.dolphin_group
) )
File(format(params.dolphin_conf_dir + "/zookeeper.properties"),
mode=0755,
content=Template("zookeeper.properties.j2"),
owner=params.dolphin_user,
group=params.dolphin_group
)

4
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py

@ -26,8 +26,8 @@ class DolphinLoggerService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params
params.pika_slave = True params.pika_slave = True

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py

@ -27,7 +27,8 @@ class DolphinMasterService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

3
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py

@ -26,7 +26,8 @@ class DolphinWorkerService(Script):
import params import params
env.set_params(params) env.set_params(params)
self.install_packages(env) self.install_packages(env)
Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) Execute(('chmod', '-R', '777', params.dolphin_home))
Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home))
def configure(self, env): def configure(self, env):
import params import params

85
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py

@ -54,11 +54,8 @@ dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content']
# database config # database config
dolphin_database_config = {} dolphin_database_config = {}
dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type']
dolphin_database_config['dolphin_database_host'] = dolphin_env_map['dolphin.database.host']
dolphin_database_config['dolphin_database_port'] = dolphin_env_map['dolphin.database.port']
dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username']
dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password']
if 'mysql' == dolphin_database_config['dolphin_database_type']: if 'mysql' == dolphin_database_config['dolphin_database_type']:
dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver'
dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate'
@ -72,6 +69,10 @@ else:
+ ':' + dolphin_env_map['dolphin.database.port'] \ + ':' + dolphin_env_map['dolphin.database.port'] \
+ '/dolphinscheduler' + '/dolphinscheduler'
# application-alert.properties # application-alert.properties
dolphin_alert_map = {} dolphin_alert_map = {}
wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token'
@ -79,27 +80,22 @@ wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&
wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}' wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}'
wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}' wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}'
dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url dolphin_alert_config_map = config['configurations']['dolphin-alert']
dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url
dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg if dolphin_alert_config_map['enterprise.wechat.enable']:
dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url
dolphin_alert_map.update(config['configurations']['dolphin-alert']) dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url
dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg
dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg
dolphin_alert_map.update(dolphin_alert_config_map)
# application-api.properties # application-api.properties
dolphin_app_api_map = {} dolphin_app_api_map = {}
dolphin_app_api_map['logging.config'] = 'classpath:apiserver_logback.xml'
dolphin_app_api_map['spring.messages.basename'] = 'i18n/messages'
dolphin_app_api_map['server.servlet.context-path'] = '/dolphinscheduler/'
dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) dolphin_app_api_map.update(config['configurations']['dolphin-application-api'])
# application-dao.properties
dolphin_application_map = {}
dolphin_application_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource'
dolphin_application_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver']
dolphin_application_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url']
dolphin_application_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username']
dolphin_application_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password']
dolphin_application_map.update(config['configurations']['dolphin-application'])
# common.properties # common.properties
dolphin_common_map = {} dolphin_common_map = {}
@ -118,33 +114,42 @@ else:
dolphin_common_map_tmp = config['configurations']['dolphin-common'] dolphin_common_map_tmp = config['configurations']['dolphin-common']
data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] data_basedir_path = dolphin_common_map_tmp['data.basedir.path']
process_exec_basepath = data_basedir_path + '/exec'
data_download_basedir_path = data_basedir_path + '/download'
dolphin_common_map['process.exec.basepath'] = process_exec_basepath
dolphin_common_map['data.download.basedir.path'] = data_download_basedir_path
dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path
dolphin_common_map.update(config['configurations']['dolphin-common'])
zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) # datasource.properties
if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: dolphin_datasource_map = {}
clientPort = config['configurations']['zoo.cfg']['clientPort'] dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource'
zookeeperPort = ":" + clientPort + "," dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver']
dolphin_common_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url']
dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username']
dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password']
dolphin_datasource_map.update(config['configurations']['dolphin-datasource'])
dolphin_common_map.update(config['configurations']['dolphin-common']) # master.properties
dolphin_master_map = config['configurations']['dolphin-master']
# quartz.properties # quartz.properties
dolphin_quartz_map = {} dolphin_quartz_map = {}
dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass']
dolphin_quartz_map['org.quartz.dataSource.myDs.driver'] = dolphin_database_config['dolphin_database_driver']
dolphin_quartz_map['org.quartz.dataSource.myDs.URL'] = dolphin_database_config['dolphin_database_url']
dolphin_quartz_map['org.quartz.dataSource.myDs.user'] = dolphin_database_config['dolphin_database_username']
dolphin_quartz_map['org.quartz.dataSource.myDs.password'] = dolphin_database_config['dolphin_database_password']
dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) dolphin_quartz_map.update(config['configurations']['dolphin-quartz'])
# if 'ganglia_server_host' in config['clusterHostInfo'] and \ # worker.properties
# len(config['clusterHostInfo']['ganglia_server_host'])>0: dolphin_worker_map = config['configurations']['dolphin-worker']
# ganglia_installed = True
# ganglia_server = config['clusterHostInfo']['ganglia_server_host'][0] # zookeeper.properties
# ganglia_report_interval = 60 dolphin_zookeeper_map={}
# else: zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", [])
# ganglia_installed = False if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']:
clientPort = config['configurations']['zoo.cfg']['clientPort']
zookeeperPort = ":" + clientPort + ","
dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort
dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper'])
if 'spring.servlet.multipart.max-file-size' in dolphin_app_api_map:
file_size = dolphin_app_api_map['spring.servlet.multipart.max-file-size']
dolphin_app_api_map['spring.servlet.multipart.max-file-size'] = file_size + "MB"
if 'spring.servlet.multipart.max-request-size' in dolphin_app_api_map:
request_size = dolphin_app_api_map['spring.servlet.multipart.max-request-size']
dolphin_app_api_map['spring.servlet.multipart.max-request-size'] = request_size + "MB"

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py → ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_datasource_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

13
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2

@ -48,22 +48,19 @@ pid={{dolphin_pidfile_dir}}/$command.pid
cd $DOLPHINSCHEDULER_HOME cd $DOLPHINSCHEDULER_HOME
if [ "$command" = "api-server" ]; then if [ "$command" = "api-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/apiserver_logback.xml -Dspring.profiles.active=api" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api"
CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer
elif [ "$command" = "master-server" ]; then elif [ "$command" = "master-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/master_logback.xml -Ddruid.mysql.usePingMethod=false" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false"
CLASS=org.apache.dolphinscheduler.server.master.MasterServer CLASS=org.apache.dolphinscheduler.server.master.MasterServer
elif [ "$command" = "worker-server" ]; then elif [ "$command" = "worker-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/worker_logback.xml -Ddruid.mysql.usePingMethod=false" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false"
CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer
elif [ "$command" = "alert-server" ]; then elif [ "$command" = "alert-server" ]; then
LOG_FILE="-Dlogback.configurationFile={{dolphin_conf_dir}}/alert_logback.xml" LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml"
CLASS=org.apache.dolphinscheduler.alert.AlertServer CLASS=org.apache.dolphinscheduler.alert.AlertServer
elif [ "$command" = "logger-server" ]; then elif [ "$command" = "logger-server" ]; then
CLASS=org.apache.dolphinscheduler.server.rpc.LoggerServer CLASS=org.apache.dolphinscheduler.server.log.LoggerServer
elif [ "$command" = "combined-server" ]; then
LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/combined_logback.xml -Dspring.profiles.active=api -Dserver.is-combined-server=true"
CLASS=org.apache.dolphinscheduler.api.CombinedApplicationServer
else else
echo "Error: No command named \`$command' was found." echo "Error: No command named \`$command' was found."
exit 1 exit 1

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_master_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

0
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 → ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_worker_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

20
ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2

@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
{% for key, value in dolphin_zookeeper_map.iteritems() -%}
{{key}}={{value}}
{% endfor %}

0
ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json → ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json

84
ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json → ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json

@ -151,18 +151,40 @@
"subsection-name": "env-row1-col2" "subsection-name": "env-row1-col2"
}, },
{ {
"config": "dolphin-common/res.upload.startup.type", "config": "dolphin-common/resource.storage.type",
"subsection-name": "dynamic-row1-col1" "subsection-name": "dynamic-row1-col1"
}, },
{
"config": "dolphin-common/resource.upload.path",
"subsection-name": "dynamic-row1-col1",
"depends-on": [
{
"configs":[
"dolphin-common/resource.storage.type"
],
"if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3",
"then": {
"property_value_attributes": {
"visible": true
}
},
"else": {
"property_value_attributes": {
"visible": false
}
}
}
]
},
{ {
"config": "dolphin-common/hdfs.root.user", "config": "dolphin-common/hdfs.root.user",
"subsection-name": "dynamic-row1-col1", "subsection-name": "dynamic-row1-col1",
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === HDFS", "if": "${dolphin-common/resource.storage.type} === HDFS",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -182,9 +204,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === HDFS", "if": "${dolphin-common/resource.storage.type} === HDFS",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -204,9 +226,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === HDFS", "if": "${dolphin-common/resource.storage.type} === HDFS",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -226,9 +248,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === S3", "if": "${dolphin-common/resource.storage.type} === S3",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -248,9 +270,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === S3", "if": "${dolphin-common/resource.storage.type} === S3",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -270,9 +292,9 @@
"depends-on": [ "depends-on": [
{ {
"configs":[ "configs":[
"dolphin-common/res.upload.startup.type" "dolphin-common/resource.storage.type"
], ],
"if": "${dolphin-common/res.upload.startup.type} === S3", "if": "${dolphin-common/resource.storage.type} === S3",
"then": { "then": {
"property_value_attributes": { "property_value_attributes": {
"visible": true "visible": true
@ -356,6 +378,28 @@
} }
] ]
}, },
{
"config": "dolphin-common/kerberos.expire.time",
"subsection-name": "dynamic-row1-col2",
"depends-on": [
{
"configs":[
"dolphin-common/hadoop.security.authentication.startup.state"
],
"if": "${dolphin-common/hadoop.security.authentication.startup.state}",
"then": {
"property_value_attributes": {
"visible": true
}
},
"else": {
"property_value_attributes": {
"visible": false
}
}
}
]
},
{ {
"config": "dolphin-alert/enterprise.wechat.enable", "config": "dolphin-alert/enterprise.wechat.enable",
"subsection-name": "dynamic-row1-col3" "subsection-name": "dynamic-row1-col3"
@ -505,11 +549,17 @@
} }
}, },
{ {
"config": "dolphin-common/res.upload.startup.type", "config": "dolphin-common/resource.storage.type",
"widget": { "widget": {
"type": "combo" "type": "combo"
} }
}, },
{
"config": "dolphin-common/resource.upload.path",
"widget": {
"type": "text-field"
}
},
{ {
"config": "dolphin-common/hdfs.root.user", "config": "dolphin-common/hdfs.root.user",
"widget": { "widget": {
@ -570,6 +620,12 @@
"type": "text-field" "type": "text-field"
} }
}, },
{
"config": "dolphin-common/kerberos.expire.time",
"widget": {
"type": "text-field"
}
},
{ {
"config": "dolphin-alert/enterprise.wechat.enable", "config": "dolphin-alert/enterprise.wechat.enable",
"widget": { "widget": {

13
ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 → docker/kubernetes/dolphinscheduler/requirements.yaml

@ -14,7 +14,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# #
dependencies:
{% for key, value in dolphin_application_map.iteritems() -%} - name: postgresql
{{key}}={{value}} version: 8.x.x
{% endfor %} repository: https://charts.bitnami.com/bitnami
condition: postgresql.enabled
- name: zookeeper
version: 5.x.x
repository: https://charts.bitnami.com/bitnami
condition: redis.enabled

19
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java

@ -95,6 +95,25 @@ public class BaseService {
} }
/**
* check
*
* @param result result
* @param bool bool
* @param userNoOperationPerm status
* @return check result
*/
protected boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) {
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/** /**
* get cookie info by name * get cookie info by name
* *

24
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java

@ -86,9 +86,14 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @param token token string * @param token token string
* @return create result code * @return create result code
*/ */
public Map<String, Object> createToken(int userId, String expireTime, String token) { public Map<String, Object> createToken(User loginUser, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (userId <= 0) { if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0."); throw new IllegalArgumentException("User id should not less than or equals to 0.");
} }
@ -118,8 +123,12 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @param expireTime token expire time * @param expireTime token expire time
* @return token string * @return token string
*/ */
public Map<String, Object> generateToken(int userId, String expireTime) { public Map<String, Object> generateToken(User loginUser, int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
String token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis()); String token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis());
result.put(Constants.DATA_LIST, token); result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
@ -144,8 +153,8 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
return result; return result;
} }
if (loginUser.getId() != accessToken.getUserId() &&
loginUser.getUserType() != UserType.ADMIN_USER) { if (!hasPerm(loginUser,accessToken.getUserId())){
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
} }
@ -164,9 +173,12 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @param token token string * @param token token string
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateToken(int id, int userId, String expireTime, String token) { public Map<String, Object> updateToken(User loginUser, int id, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>(5);
if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
AccessToken accessToken = accessTokenMapper.selectById(id); AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) { if (accessToken == null) {
logger.error("access token not exist, access token id {}", id); logger.error("access token not exist, access token id {}", id);

1
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java

@ -130,7 +130,6 @@ public class LoggerServiceImpl implements LoggerService {
logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()));
} }
/** /**
* get host * get host
* *

105
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -49,6 +49,7 @@ import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.DateUtils;
@ -166,7 +167,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
String processDefinitionJson, String processDefinitionJson,
String desc, String desc,
String locations, String locations,
String connects) throws JsonProcessingException { String connects) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
@ -229,23 +230,34 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/** /**
* get resource ids * get resource ids
*
* @param processData process data * @param processData process data
* @return resource ids * @return resource ids
*/ */
private String getResourceIds(ProcessData processData) { private String getResourceIds(ProcessData processData) {
return Optional.ofNullable(processData.getTasks()) List<TaskNode> tasks = processData.getTasks();
.orElse(Collections.emptyList()) Set<Integer> resourceIds = new HashSet<>();
.stream() for (TaskNode taskNode : tasks) {
.map(taskNode -> TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams())) String taskParameter = taskNode.getParams();
.filter(Objects::nonNull) AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter);
.flatMap(parameters -> parameters.getResourceFilesList().stream()) if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) {
Set<Integer> tempSet = params.getResourceFilesList().
stream()
.filter(t -> t.getId() != 0)
.map(ResourceInfo::getId) .map(ResourceInfo::getId)
.distinct() .collect(Collectors.toSet());
.map(Objects::toString) resourceIds.addAll(tempSet);
.collect(Collectors.joining(",")); }
} }
StringBuilder sb = new StringBuilder();
for (int i : resourceIds) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(i);
}
return sb.toString();
}
/** /**
* query process definition list * query process definition list
* *
@ -255,7 +267,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/ */
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) { public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(); HashMap<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -348,10 +360,15 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
* @param connects connects for nodes * @param connects connects for nodes
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateProcessDefinition(User loginUser, String projectName, int id, String name, public Map<String, Object> updateProcessDefinition(User loginUser,
String processDefinitionJson, String desc, String projectName,
String locations, String connects) { int id,
Map<String, Object> result = new HashMap<>(); String name,
String processDefinitionJson,
String desc,
String locations,
String connects) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -462,7 +479,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -674,6 +691,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
} }
/**
* get export process metadata string
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition));
}
/** /**
* get export process metadata string * get export process metadata string
* *
@ -758,7 +786,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
String processMetaJson = FileUtils.file2String(file); String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class); List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
@ -927,7 +955,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
//recursive sub-process parameter correction map key for old process id value for new process id //recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(); Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements()) List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
@ -1215,7 +1243,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/ */
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) { public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(); HashMap<String, Object> result = new HashMap<>(5);
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId); List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId);
result.put(Constants.DATA_LIST, resourceList); result.put(Constants.DATA_LIST, resourceList);
@ -1425,7 +1453,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
Integer processId, Integer processId,
Project targetProject) throws JsonProcessingException { Project targetProject) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>(5);
ProcessDefinition processDefinition = processDefineMapper.selectById(processId); ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) { if (processDefinition == null) {
@ -1444,6 +1472,41 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
} }
/**
* copy process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return copy result code
*/
public Map<String, Object> copyProcessDefinition(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
} else {
return createProcessDefinition(
loginUser,
projectName,
processDefinition.getName() + "_copy_" + System.currentTimeMillis(),
processDefinition.getProcessDefinitionJson(),
processDefinition.getDescription(),
processDefinition.getLocations(),
processDefinition.getConnects());
}
}
/** /**
* batch copy process definition * batch copy process definition
* *

19
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java

@ -291,6 +291,25 @@ public class TenantServiceImpl extends BaseService implements TenantService {
return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES); return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES);
} }
/**
* query tenant list
*
* @param tenantCode tenant code
* @return tenant list
*/
public Map<String, Object> queryTenantList(String tenantCode) {
Map<String, Object> result = new HashMap<>(5);
List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode);
if (CollectionUtils.isNotEmpty(resourceList)) {
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.TENANT_NOT_EXIST);
}
}
/** /**
* query tenant list * query tenant list
* *

8
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java

@ -149,9 +149,11 @@ public class UdfFuncServiceTest {
} }
@Test @Test
public void testQueryResourceList(){ public void testQueryUdfFuncList(){
Mockito.when(udfFuncMapper.getUdfFuncByType(1, 1)).thenReturn(getList()); User user = getLoginUser();
Map<String, Object> result = udfFuncService.queryResourceList(getLoginUser(),1); user.setUserType(UserType.GENERAL_USER);
Mockito.when(udfFuncMapper.getUdfFuncByType(user.getId(), UdfType.HIVE.ordinal())).thenReturn(getList());
Map<String, Object> result = udfFuncService.queryUdfFuncList(user,UdfType.HIVE.ordinal());
logger.info(result.toString()); logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
List<UdfFunc> udfFuncList = (List<UdfFunc>) result.get(Constants.DATA_LIST); List<UdfFunc> udfFuncList = (List<UdfFunc>) result.get(Constants.DATA_LIST);

23
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
public enum Event {
ACK,
RESULT;
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java

@ -42,6 +42,4 @@ public class ResourceInfo {
public void setRes(String res) { public void setRes(String res) {
this.res = res; this.res = res;
} }
} }

1
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.common.task.flink;
import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java

@ -216,7 +216,7 @@ public class SparkParameters extends AbstractParameters {
@Override @Override
public boolean checkParameters() { public boolean checkParameters() {
return mainJar != null && programType != null && sparkVersion != null; return mainJar != null && programType != null;
} }
@Override @Override

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java

@ -131,6 +131,7 @@ public class Command {
WarningType warningType, WarningType warningType,
int warningGroupId, int warningGroupId,
Date scheduleTime, Date scheduleTime,
String workerGroup,
Priority processInstancePriority) { Priority processInstancePriority) {
this.commandType = commandType; this.commandType = commandType;
this.executorId = executorId; this.executorId = executorId;
@ -143,6 +144,7 @@ public class Command {
this.failureStrategy = failureStrategy; this.failureStrategy = failureStrategy;
this.startTime = new Date(); this.startTime = new Date();
this.updateTime = new Date(); this.updateTime = new Date();
this.workerGroup = workerGroup;
this.processInstancePriority = processInstancePriority; this.processInstancePriority = processInstancePriority;
} }

14
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java

@ -82,6 +82,20 @@ public interface ProcessInstanceMapper extends BaseMapper<ProcessInstance> {
* @param endTime endTime * @param endTime endTime
* @return process instance IPage * @return process instance IPage
*/ */
/**
* process instance page
* @param page page
* @param projectId projectId
* @param processDefinitionId processDefinitionId
* @param searchVal searchVal
* @param executorId executorId
* @param statusArray statusArray
* @param host host
* @param startTime startTime
* @param endTime endTime
* @return process instance page
*/
IPage<ProcessInstance> queryProcessInstanceListPaging(Page<ProcessInstance> page, IPage<ProcessInstance> queryProcessInstanceListPaging(Page<ProcessInstance> page,
@Param("projectId") int projectId, @Param("projectId") int projectId,
@Param("processDefinitionId") Integer processDefinitionId, @Param("processDefinitionId") Integer processDefinitionId,

67
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java

@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.upgrade;
import org.apache.dolphinscheduler.common.utils.ConnectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.Map;
/**
* resource dao
*/
public class ResourceDao {
public static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDao.class);
/**
* list all resources
* @param conn connection
* @return map that key is full_name and value is id
*/
Map<String,Integer> listAllResources(Connection conn){
Map<String,Integer> resourceMap = new HashMap<>();
String sql = String.format("SELECT id,full_name FROM t_ds_resources");
ResultSet rs = null;
PreparedStatement pstmt = null;
try {
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
while (rs.next()){
Integer id = rs.getInt(1);
String fullName = rs.getString(2);
resourceMap.put(fullName,id);
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("sql: " + sql, e);
} finally {
ConnectionUtils.releaseResource(rs, pstmt, conn);
}
return resourceMap;
}
}

13
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java

@ -33,6 +33,7 @@ import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -303,4 +304,16 @@ public class UdfFuncMapperTest {
authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds);
Assert.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds))); Assert.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds)));
} }
@Test
public void batchUpdateUdfFuncTest(){
//create general user
User generalUser1 = createGeneralUser("user1");
UdfFunc udfFunc = insertOne(generalUser1);
udfFunc.setResourceName("/updateTest");
List<UdfFunc> udfFuncList = new ArrayList<>();
udfFuncList.add(udfFunc);
Assert.assertTrue(udfFuncMapper.batchUpdateUdfFunc(udfFuncList)>0);
}
} }

230
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.dao.utils;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNode;
@ -27,6 +28,7 @@ import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -34,6 +36,8 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
/** /**
* dag helper test * dag helper test
@ -41,15 +45,17 @@ import java.util.Map;
public class DagHelperTest { public class DagHelperTest {
/** /**
* test task node can submit * test task node can submit
*
* @throws JsonProcessingException if error throws JsonProcessingException * @throws JsonProcessingException if error throws JsonProcessingException
*/ */
@Test @Test
public void testTaskNodeCanSubmit() throws JsonProcessingException { public void testTaskNodeCanSubmit() throws JsonProcessingException {
//1->2->3->5 //1->2->3->5->7
//4->3 //4->3->6
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag(); DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
TaskNode taskNode3 = dag.getNode("3"); TaskNode taskNode3 = dag.getNode("3");
Map<String, TaskInstance > completeTaskList = new HashMap<>(); Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
completeTaskList.putIfAbsent("1", new TaskInstance()); completeTaskList.putIfAbsent("1", new TaskInstance());
Boolean canSubmit = false; Boolean canSubmit = false;
@ -58,27 +64,199 @@ public class DagHelperTest {
node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
TaskNode nodex = dag.getNode("4"); TaskNode nodex = dag.getNode("4");
nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
canSubmit = DagHelper.taskNodeCanSubmit(taskNode3, dag, completeTaskList); canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList);
Assert.assertEquals(canSubmit, true); Assert.assertEquals(canSubmit, true);
// 2forbidden, 3 cannot be submit // 2forbidden, 3 cannot be submit
completeTaskList.putIfAbsent("2", new TaskInstance()); completeTaskList.putIfAbsent("2", new TaskInstance());
TaskNode nodey = dag.getNode("4"); TaskNode nodey = dag.getNode("4");
nodey.setRunFlag(""); nodey.setRunFlag("");
canSubmit = DagHelper.taskNodeCanSubmit(taskNode3, dag, completeTaskList); canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList);
Assert.assertEquals(canSubmit, false); Assert.assertEquals(canSubmit, false);
// 2/3 forbidden submit 5 // 2/3 forbidden submit 5
TaskNode node3 = dag.getNode("3"); TaskNode node3 = dag.getNode("3");
node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
TaskNode node8 = dag.getNode("8");
node8.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
TaskNode node5 = dag.getNode("5"); TaskNode node5 = dag.getNode("5");
canSubmit = DagHelper.taskNodeCanSubmit(node5, dag, completeTaskList); canSubmit = DagHelper.allDependsForbiddenOrEnd(node5, dag, skipNodeList, completeTaskList);
Assert.assertEquals(canSubmit, true); Assert.assertEquals(canSubmit, true);
} }
/** /**
* 1->2->3->5 * test parse post node list
* 4->3 */
@Test
public void testParsePostNodeList() throws JsonProcessingException {
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
Set<String> postNodes = null;
//complete : null
// expect post: 1/4
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("1"));
Assert.assertTrue(postNodes.contains("4"));
//complete : 1
// expect post: 2/4
completeTaskList.put("1", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("2"));
Assert.assertTrue(postNodes.contains("4"));
// complete : 1/2
// expect post: 4
completeTaskList.put("2", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("4"));
Assert.assertTrue(postNodes.contains("8"));
// complete : 1/2/4
// expect post: 3
completeTaskList.put("4", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("3"));
Assert.assertTrue(postNodes.contains("8"));
// complete : 1/2/4/3
// expect post: 8/6
completeTaskList.put("3", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("8"));
Assert.assertTrue(postNodes.contains("6"));
// complete : 1/2/4/3/8
// expect post: 6/5
completeTaskList.put("8", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("5"));
Assert.assertTrue(postNodes.contains("6"));
// complete : 1/2/4/3/5/6/8
// expect post: 7
completeTaskList.put("6", new TaskInstance());
completeTaskList.put("5", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("7"));
}
/**
* test forbidden post node
* @throws JsonProcessingException
*/
@Test
public void testForbiddenPostNode() throws JsonProcessingException {
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
Set<String> postNodes = null;
// dag: 1-2-3-5-7 4-3-6 2-8-5-7
// forbid:2 complete:1 post:4/8
completeTaskList.put("1", new TaskInstance());
TaskNode node2 = dag.getNode("2");
node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("4"));
Assert.assertTrue(postNodes.contains("8"));
//forbid:2/4 complete:1 post:3/8
TaskNode node4 = dag.getNode("4");
node4.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(2, postNodes.size());
Assert.assertTrue(postNodes.contains("3"));
Assert.assertTrue(postNodes.contains("8"));
//forbid:2/4/5 complete:1/8 post:3
completeTaskList.put("8", new TaskInstance());
TaskNode node5 = dag.getNode("5");
node5.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("3"));
}
/**
* test condition post node
* @throws JsonProcessingException
*/
@Test
public void testConditionPostNode() throws JsonProcessingException {
DAG<String, TaskNode, TaskNodeRelation> dag = generateDag();
Map<String, TaskInstance> completeTaskList = new HashMap<>();
Map<String, TaskNode> skipNodeList = new HashMap<>();
Set<String> postNodes = null;
// dag: 1-2-3-5-7 4-3-6 2-8-5-7
// 3-if
completeTaskList.put("1", new TaskInstance());
completeTaskList.put("2", new TaskInstance());
completeTaskList.put("4", new TaskInstance());
TaskNode node3 = dag.getNode("3");
node3.setType("CONDITIONS");
node3.setConditionResult("{\n" +
" \"successNode\": [5\n" +
" ],\n" +
" \"failedNode\": [6\n" +
" ]\n" +
" }");
completeTaskList.remove("3");
TaskInstance taskInstance = new TaskInstance();
taskInstance.setState(ExecutionStatus.SUCCESS);
//complete 1/2/3/4 expect:8
completeTaskList.put("3", taskInstance);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("8"));
//2.complete 1/2/3/4/8 expect:5 skip:6
completeTaskList.put("8", new TaskInstance());
postNodes = DagHelper.parsePostNodes(null ,skipNodeList, dag, completeTaskList);
Assert.assertTrue(postNodes.contains("5"));
Assert.assertEquals(1, skipNodeList.size());
Assert.assertTrue(skipNodeList.containsKey("6"));
// 3.complete 1/2/3/4/5/8 expect post:7 skip:6
skipNodeList.clear();
TaskInstance taskInstance1 = new TaskInstance();
taskInstance.setState(ExecutionStatus.SUCCESS);
completeTaskList.put("5", taskInstance1);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("7"));
Assert.assertEquals(1, skipNodeList.size());
Assert.assertTrue(skipNodeList.containsKey("6"));
// dag: 1-2-3-5-7 4-3-6
// 3-if , complete:1/2/3/4
// 1.failure:3 expect post:6 skip:5/7
skipNodeList.clear();
completeTaskList.remove("3");
taskInstance = new TaskInstance();
taskInstance.setState(ExecutionStatus.FAILURE);
completeTaskList.put("3", taskInstance);
postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList);
Assert.assertEquals(1, postNodes.size());
Assert.assertTrue(postNodes.contains("6"));
Assert.assertEquals(2, skipNodeList.size());
Assert.assertTrue(skipNodeList.containsKey("5"));
Assert.assertTrue(skipNodeList.containsKey("7"));
}
/**
* 1->2->3->5->7
* 4->3->6
* 2->8->5->7
*
* @return dag * @return dag
* @throws JsonProcessingException if error throws JsonProcessingException * @throws JsonProcessingException if error throws JsonProcessingException
*/ */
@ -87,11 +265,13 @@ public class DagHelperTest {
TaskNode node1 = new TaskNode(); TaskNode node1 = new TaskNode();
node1.setId("1"); node1.setId("1");
node1.setName("1"); node1.setName("1");
node1.setType("SHELL");
taskNodeList.add(node1); taskNodeList.add(node1);
TaskNode node2 = new TaskNode(); TaskNode node2 = new TaskNode();
node2.setId("2"); node2.setId("2");
node2.setName("2"); node2.setName("2");
node2.setType("SHELL");
List<String> dep2 = new ArrayList<>(); List<String> dep2 = new ArrayList<>();
dep2.add("1"); dep2.add("1");
node2.setDepList(dep2); node2.setDepList(dep2);
@ -101,11 +281,13 @@ public class DagHelperTest {
TaskNode node4 = new TaskNode(); TaskNode node4 = new TaskNode();
node4.setId("4"); node4.setId("4");
node4.setName("4"); node4.setName("4");
node4.setType("SHELL");
taskNodeList.add(node4); taskNodeList.add(node4);
TaskNode node3 = new TaskNode(); TaskNode node3 = new TaskNode();
node3.setId("3"); node3.setId("3");
node3.setName("3"); node3.setName("3");
node3.setType("SHELL");
List<String> dep3 = new ArrayList<>(); List<String> dep3 = new ArrayList<>();
dep3.add("2"); dep3.add("2");
dep3.add("4"); dep3.add("4");
@ -115,20 +297,48 @@ public class DagHelperTest {
TaskNode node5 = new TaskNode(); TaskNode node5 = new TaskNode();
node5.setId("5"); node5.setId("5");
node5.setName("5"); node5.setName("5");
node5.setType("SHELL");
List<String> dep5 = new ArrayList<>(); List<String> dep5 = new ArrayList<>();
dep5.add("3"); dep5.add("3");
dep5.add("8");
node5.setDepList(dep5); node5.setDepList(dep5);
taskNodeList.add(node5); taskNodeList.add(node5);
TaskNode node6 = new TaskNode();
node6.setId("6");
node6.setName("6");
node6.setType("SHELL");
List<String> dep6 = new ArrayList<>();
dep6.add("3");
node6.setDepList(dep6);
taskNodeList.add(node6);
TaskNode node7 = new TaskNode();
node7.setId("7");
node7.setName("7");
node7.setType("SHELL");
List<String> dep7 = new ArrayList<>();
dep7.add("5");
node7.setDepList(dep7);
taskNodeList.add(node7);
TaskNode node8 = new TaskNode();
node8.setId("8");
node8.setName("8");
node8.setType("SHELL");
List<String> dep8 = new ArrayList<>();
dep8.add("2");
node8.setDepList(dep8);
taskNodeList.add(node8);
List<String> startNodes = new ArrayList<>(); List<String> startNodes = new ArrayList<>();
List<String> recoveryNodes = new ArrayList<>(); List<String> recoveryNodes = new ArrayList<>();
List<TaskNode> destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, List<TaskNode> destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList,
startNodes, recoveryNodes, TaskDependType.TASK_POST); startNodes, recoveryNodes, TaskDependType.TASK_POST);
List<TaskNodeRelation> taskNodeRelations =DagHelper.generateRelationListByFlowNodes(destTaskNodeList); List<TaskNodeRelation> taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList);
ProcessDag processDag = new ProcessDag(); ProcessDag processDag = new ProcessDag();
processDag.setEdges(taskNodeRelations); processDag.setEdges(taskNodeRelations);
processDag.setNodes(destTaskNodeList); processDag.setNodes(destTaskNodeList);
return DagHelper.buildDagGraph(processDag); return DagHelper.buildDagGraph(processDag);
} }

72
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java

@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
/**
* db task ack request command
*/
public class DBTaskAckCommand implements Serializable {
private int taskInstanceId;
private int status;
public DBTaskAckCommand(int status,int taskInstanceId) {
this.status = status;
this.taskInstanceId = taskInstanceId;
}
public int getTaskInstanceId() {
return taskInstanceId;
}
public void setTaskInstanceId(int taskInstanceId) {
this.taskInstanceId = taskInstanceId;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
/**
* package response command
* @return command
*/
public Command convert2Command(){
Command command = new Command();
command.setType(CommandType.DB_TASK_ACK);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
@Override
public String toString() {
return "DBTaskAckCommand{" +
"taskInstanceId=" + taskInstanceId +
", status=" + status +
'}';
}
}

71
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java

@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
/**
* db task final result response command
*/
public class DBTaskResponseCommand implements Serializable {
private int taskInstanceId;
private int status;
public DBTaskResponseCommand(int status,int taskInstanceId) {
this.status = status;
this.taskInstanceId = taskInstanceId;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public int getTaskInstanceId() {
return taskInstanceId;
}
public void setTaskInstanceId(int taskInstanceId) {
this.taskInstanceId = taskInstanceId;
}
/**
* package response command
* @return command
*/
public Command convert2Command(){
Command command = new Command();
command.setType(CommandType.DB_TASK_RESPONSE);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
@Override
public String toString() {
return "DBTaskResponseCommand{" +
"taskInstanceId=" + taskInstanceId +
", status=" + status +
'}';
}
}

39
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.log;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.FileAppender;
import org.slf4j.Marker;
import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER;
/**
* Task log appender
*/
public class TaskLogAppender extends FileAppender<ILoggingEvent>{
@Override
protected void append(ILoggingEvent event) {
Marker marker = event.getMarker();
if (marker !=null) {
if (marker.equals(FINALIZE_SESSION_MARKER)) {
stop();
}
}
super.subAppend(event);
}
}

162
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java

@ -1,162 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.runner;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.DependentItem;
import org.apache.dolphinscheduler.common.model.DependentTaskModel;
import org.apache.dolphinscheduler.common.task.dependent.DependentParameters;
import org.apache.dolphinscheduler.common.utils.DependentUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.utils.LogUtils;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class ConditionsTaskExecThread extends MasterBaseTaskExecThread {
/**
* dependent parameters
*/
private DependentParameters dependentParameters;
/**
* complete task map
*/
private Map<String, ExecutionStatus> completeTaskList = new ConcurrentHashMap<>();
/**
* condition result
*/
private DependResult conditionResult;
/**
* constructor of MasterBaseTaskExecThread
*
* @param taskInstance task instance
*/
public ConditionsTaskExecThread(TaskInstance taskInstance) {
super(taskInstance);
taskInstance.setStartTime(new Date());
}
@Override
public Boolean submitWaitComplete() {
try{
this.taskInstance = submit();
logger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX,
taskInstance.getProcessDefinitionId(),
taskInstance.getProcessInstanceId(),
taskInstance.getId()));
String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, processService.formatTaskAppId(this.taskInstance));
Thread.currentThread().setName(threadLoggerInfoName);
initTaskParameters();
logger.info("dependent task start");
waitTaskQuit();
updateTaskState();
}catch (Exception e){
logger.error("conditions task run exception" , e);
}
return true;
}
private void waitTaskQuit() {
List<TaskInstance> taskInstances = processService.findValidTaskListByProcessId(
taskInstance.getProcessInstanceId()
);
for(TaskInstance task : taskInstances){
completeTaskList.putIfAbsent(task.getName(), task.getState());
}
List<DependResult> modelResultList = new ArrayList<>();
for(DependentTaskModel dependentTaskModel : dependentParameters.getDependTaskList()){
List<DependResult> itemDependResult = new ArrayList<>();
for(DependentItem item : dependentTaskModel.getDependItemList()){
itemDependResult.add(getDependResultForItem(item));
}
DependResult modelResult = DependentUtils.getDependResultForRelation(dependentTaskModel.getRelation(), itemDependResult);
modelResultList.add(modelResult);
}
conditionResult = DependentUtils.getDependResultForRelation(
dependentParameters.getRelation(), modelResultList
);
logger.info("the conditions task depend result : {}", conditionResult);
}
/**
*
*/
private void updateTaskState() {
ExecutionStatus status;
if(this.cancel){
status = ExecutionStatus.KILL;
}else{
status = (conditionResult == DependResult.SUCCESS) ? ExecutionStatus.SUCCESS : ExecutionStatus.FAILURE;
}
taskInstance.setState(status);
taskInstance.setEndTime(new Date());
processService.updateTaskInstance(taskInstance);
}
private void initTaskParameters() {
this.taskInstance.setLogPath(LogUtils.getTaskLogPath(taskInstance));
this.taskInstance.setHost(NetUtils.getHost() + Constants.COLON + masterConfig.getListenPort());
taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
taskInstance.setStartTime(new Date());
this.processService.saveTaskInstance(taskInstance);
this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(), DependentParameters.class);
}
/**
* depend result for depend item
* @param item
* @return
*/
private DependResult getDependResultForItem(DependentItem item){
DependResult dependResult = DependResult.SUCCESS;
if(!completeTaskList.containsKey(item.getDepTasks())){
logger.info("depend item: {} have not completed yet.", item.getDepTasks());
dependResult = DependResult.FAILED;
return dependResult;
}
ExecutionStatus executionStatus = completeTaskList.get(item.getDepTasks());
if(executionStatus != item.getStatus()){
logger.info("depend item : {} expect status: {}, actual status: {}" ,item.getDepTasks(), item.getStatus(), executionStatus);
dependResult = DependResult.FAILED;
}
logger.info("dependent item complete {} {},{}",
Constants.DEPENDENT_SPLIT, item.getDepTasks(), dependResult);
return dependResult;
}
}

7
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java

@ -130,19 +130,20 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
while (Stopper.isRunning()) { while (Stopper.isRunning()) {
// waiting for subflow process instance establishment // waiting for subflow process instance establishment
if (subProcessInstance == null) { if (subProcessInstance == null) {
Thread.sleep(Constants.SLEEP_TIME_MILLIS); Thread.sleep(Constants.SLEEP_TIME_MILLIS);
if(!setTaskInstanceState()){ if(!setTaskInstanceState()){
continue; continue;
} }
} }
subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId()); subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId());
if (checkTaskTimeout()) {
this.checkTimeoutFlag = !alertTimeout();
handleTimeoutFailed();
}
updateParentProcessState(); updateParentProcessState();
if (subProcessInstance.getState().typeIsFinished()){ if (subProcessInstance.getState().typeIsFinished()){
break; break;
} }
if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){ if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){
// parent process "ready to pause" , child process "pause" // parent process "ready to pause" , child process "pause"
pauseSubProcess(); pauseSubProcess();

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java

@ -104,9 +104,7 @@ public class DependentExecute {
ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(),
dateInterval); dateInterval);
if(processInstance == null){ if(processInstance == null){
logger.error("cannot find the right process instance: definition id:{}, start:{}, end:{}", return DependResult.WAITING;
dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() );
return DependResult.FAILED;
} }
// need to check workflow for updates, so get all task and check the task state // need to check workflow for updates, so get all task and check the task state
if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){

94
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java vendored

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.cache;
import org.apache.dolphinscheduler.common.enums.Event;
import org.apache.dolphinscheduler.remote.command.Command;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Responce Cache : cache worker send master result
*/
public class ResponceCache {
private static final ResponceCache instance = new ResponceCache();
private ResponceCache(){}
public static ResponceCache get(){
return instance;
}
private Map<Integer,Command> ackCache = new ConcurrentHashMap<>();
private Map<Integer,Command> responseCache = new ConcurrentHashMap<>();
/**
* cache response
* @param taskInstanceId taskInstanceId
* @param command command
* @param event event ACK/RESULT
*/
public void cache(Integer taskInstanceId, Command command, Event event){
switch (event){
case ACK:
ackCache.put(taskInstanceId,command);
break;
case RESULT:
responseCache.put(taskInstanceId,command);
break;
default:
throw new IllegalArgumentException("invalid event type : " + event);
}
}
/**
* remove ack cache
* @param taskInstanceId taskInstanceId
*/
public void removeAckCache(Integer taskInstanceId){
ackCache.remove(taskInstanceId);
}
/**
* remove reponse cache
* @param taskInstanceId taskInstanceId
*/
public void removeResponseCache(Integer taskInstanceId){
responseCache.remove(taskInstanceId);
}
/**
* getAckCache
* @return getAckCache
*/
public Map<Integer,Command> getAckCache(){
return ackCache;
}
/**
* getResponseCache
* @return getResponseCache
*/
public Map<Integer,Command> getResponseCache(){
return responseCache;
}
}

56
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java

@ -0,0 +1,56 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.remote.command.*;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.JsonSerializer;
import org.apache.dolphinscheduler.server.worker.cache.ResponceCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* db task ack processor
*/
public class DBTaskAckProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(DBTaskAckProcessor.class);
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.DB_TASK_ACK == command.getType(),
String.format("invalid command type : %s", command.getType()));
DBTaskAckCommand taskAckCommand = JsonSerializer.deserialize(
command.getBody(), DBTaskAckCommand.class);
if (taskAckCommand == null){
return;
}
if (taskAckCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){
ResponceCache.get().removeAckCache(taskAckCommand.getTaskInstanceId());
}
}
}

58
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.JsonSerializer;
import org.apache.dolphinscheduler.server.worker.cache.ResponceCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* db task response processor
*/
public class DBTaskResponseProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(DBTaskResponseProcessor.class);
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.DB_TASK_RESPONSE == command.getType(),
String.format("invalid command type : %s", command.getType()));
DBTaskResponseCommand taskResponseCommand = JsonSerializer.deserialize(
command.getBody(), DBTaskResponseCommand.class);
if (taskResponseCommand == null){
return;
}
if (taskResponseCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){
ResponceCache.get().removeResponseCache(taskResponseCommand.getTaskInstanceId());
}
}
}

94
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.runner;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.server.worker.cache.ResponceCache;
import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* Retry Report Task Status Thread
*/
@Component
public class RetryReportTaskStatusThread implements Runnable {
private final Logger logger = LoggerFactory.getLogger(RetryReportTaskStatusThread.class);
/**
* every 5 minutes
*/
private static long RETRY_REPORT_TASK_STATUS_INTERVAL = 5 * 60 * 1000L;
/**
* task callback service
*/
private final TaskCallbackService taskCallbackService;
public void start(){
Thread thread = new Thread(this,"RetryReportTaskStatusThread");
thread.start();
}
public RetryReportTaskStatusThread(){
this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class);
}
/**
* retry ack/response
*/
@Override
public void run() {
ResponceCache responceCache = ResponceCache.get();
while (Stopper.isRunning()){
// sleep 5 minutes
ThreadUtils.sleep(RETRY_REPORT_TASK_STATUS_INTERVAL);
try {
if (!responceCache.getAckCache().isEmpty()){
Map<Integer,Command> ackCache = responceCache.getAckCache();
for (Map.Entry<Integer, Command> entry : ackCache.entrySet()){
Integer taskInstanceId = entry.getKey();
Command ackCommand = entry.getValue();
taskCallbackService.sendAck(taskInstanceId,ackCommand);
}
}
if (!responceCache.getResponseCache().isEmpty()){
Map<Integer,Command> responseCache = responceCache.getResponseCache();
for (Map.Entry<Integer, Command> entry : responseCache.entrySet()){
Integer taskInstanceId = entry.getKey();
Command responseCommand = entry.getValue();
taskCallbackService.sendResult(taskInstanceId,responseCommand);
}
}
}catch (Exception e){
logger.warn("retry report task status error", e);
}
}
}
}

76
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java

@ -16,46 +16,34 @@
*/ */
package org.apache.dolphinscheduler.server.worker.task; package org.apache.dolphinscheduler.server.worker.task;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.process.ProcessBuilderForWin32; import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import java.io.BufferedReader; import java.io.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.slf4j.Logger; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS;
import com.sun.jna.platform.win32.Kernel32;
import com.sun.jna.platform.win32.WinNT;
/** /**
* abstract command executor * abstract command executor
@ -117,26 +105,6 @@ public abstract class AbstractCommandExecutor {
// setting up user to run commands // setting up user to run commands
List<String> command = new LinkedList<>(); List<String> command = new LinkedList<>();
if (OSUtils.isWindows()) {
//init process builder
ProcessBuilderForWin32 processBuilder = new ProcessBuilderForWin32();
// setting up a working directory
processBuilder.directory(new File(taskExecutionContext.getExecutePath()));
// setting up a username and password
processBuilder.user(taskExecutionContext.getTenantCode(), StringUtils.EMPTY);
// merge error information to standard output stream
processBuilder.redirectErrorStream(true);
// setting up user to run commands
command.add(commandInterpreter());
command.add("/c");
command.addAll(commandOptions());
command.add(commandFile);
// setting commands
processBuilder.command(command);
process = processBuilder.start();
} else {
//init process builder //init process builder
ProcessBuilder processBuilder = new ProcessBuilder(); ProcessBuilder processBuilder = new ProcessBuilder();
// setting up a working directory // setting up a working directory
@ -155,7 +123,6 @@ public abstract class AbstractCommandExecutor {
// setting commands // setting commands
processBuilder.command(command); processBuilder.command(command);
process = processBuilder.start(); process = processBuilder.start();
}
// print command // print command
printCommand(command); printCommand(command);
@ -235,9 +202,6 @@ public abstract class AbstractCommandExecutor {
return result; return result;
} }
public String getVarPool() {
return varPool.toString();
}
/** /**
* cancel application * cancel application
@ -316,20 +280,30 @@ public abstract class AbstractCommandExecutor {
* @param commands process builder * @param commands process builder
*/ */
private void printCommand(List<String> commands) { private void printCommand(List<String> commands) {
String cmdStr = ProcessUtils.buildCommandStr(commands); String cmdStr;
try {
cmdStr = ProcessUtils.buildCommandStr(commands);
logger.info("task run command:\n{}", cmdStr); logger.info("task run command:\n{}", cmdStr);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
} }
/** /**
* clear * clear
*/ */
private void clear() { private void clear() {
List<String> markerList = new ArrayList<>();
markerList.add(ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER.toString());
if (!logBuffer.isEmpty()) { if (!logBuffer.isEmpty()) {
// log handle // log handle
logHandler.accept(logBuffer); logHandler.accept(logBuffer);
logBuffer.clear(); logBuffer.clear();
} }
logHandler.accept(markerList);
} }
/** /**
@ -479,12 +453,13 @@ public abstract class AbstractCommandExecutor {
/** /**
* get remain time?s? * get remain times
* *
* @return remain time * @return remain time
*/ */
private long getRemaintime() { private long getRemaintime() {
long remainTime = DateUtils.getRemainTime(taskExecutionContext.getStartTime(), taskExecutionContext.getTaskTimeout()); long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000;
long remainTime = taskExecutionContext.getTaskTimeout() - usedTime;
if (remainTime < 0) { if (remainTime < 0) {
throw new RuntimeException("task execution time out"); throw new RuntimeException("task execution time out");
@ -506,12 +481,7 @@ public abstract class AbstractCommandExecutor {
Field f = process.getClass().getDeclaredField(Constants.PID); Field f = process.getClass().getDeclaredField(Constants.PID);
f.setAccessible(true); f.setAccessible(true);
if (OSUtils.isWindows()) {
WinNT.HANDLE handle = (WinNT.HANDLE) f.get(process);
processId = Kernel32.INSTANCE.GetProcessId(handle);
} else {
processId = f.getInt(process); processId = f.getInt(process);
}
} catch (Throwable e) { } catch (Throwable e) {
logger.error(e.getMessage(), e); logger.error(e.getMessage(), e);
} }

16
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java

@ -17,7 +17,10 @@
package org.apache.dolphinscheduler.server.worker.task; package org.apache.dolphinscheduler.server.worker.task;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskRecordStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
@ -30,18 +33,18 @@ import org.apache.dolphinscheduler.common.task.shell.ShellParameters;
import org.apache.dolphinscheduler.common.task.spark.SparkParameters; import org.apache.dolphinscheduler.common.task.spark.SparkParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.TaskRecordDao; import org.apache.dolphinscheduler.dao.TaskRecordDao;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER;
/** /**
* executive task * executive task
*/ */
@ -123,13 +126,16 @@ public abstract class AbstractTask {
*/ */
public void logHandle(List<String> logs) { public void logHandle(List<String> logs) {
// note that the "new line" is added here to facilitate log parsing // note that the "new line" is added here to facilitate log parsing
if (logs.contains(FINALIZE_SESSION_MARKER.toString())) {
logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString());
} else {
logger.info(" -> {}", String.join("\n\t", logs)); logger.info(" -> {}", String.join("\n\t", logs));
} }
}
public void setVarPool(String varPool) { public void setVarPool(String varPool) {
this.varPool = varPool; this.varPool = varPool;
} }
public String getVarPool() { public String getVarPool() {
return varPool; return varPool;
} }

25
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java

@ -111,29 +111,4 @@ public class MasterCommandTest {
} }
@Test
public void testDagHelper(){
ProcessDefinition processDefinition = processDefinitionMapper.selectById(19);
try {
ProcessDag processDag = DagHelper.generateFlowDag(processDefinition.getProcessDefinitionJson(),
new ArrayList<>(), new ArrayList<>(), TaskDependType.TASK_POST);
DAG<String,TaskNode,TaskNodeRelation> dag = DagHelper.buildDagGraph(processDag);
Collection<String> start = DagHelper.getStartVertex("1", dag, null);
System.out.println(start.toString());
Map<String, TaskNode> forbidden = DagHelper.getForbiddenTaskNodeMaps(processDefinition.getProcessDefinitionJson());
System.out.println(forbidden);
} catch (Exception e) {
e.printStackTrace();
}
}
} }

7
dolphinscheduler-ui/build/webpack.config.prod.js

@ -51,12 +51,7 @@ const config = merge.smart(baseConfig, {
minimizer: [ minimizer: [
new TerserPlugin({ new TerserPlugin({
terserOptions: { terserOptions: {
compress: { compress: {}
warnings: false,
drop_console: true,
drop_debugger: true,
pure_funcs: ['console.log']
}
}, },
cache: true, cache: true,
parallel: true, parallel: true,

23
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue

@ -303,25 +303,27 @@
color: #0097e0; color: #0097e0;
font-size: 12px; font-size: 12px;
margin-left: 10px; margin-left: 10px;
i { em {
text-decoration: none !important;
vertical-align: middle; vertical-align: middle;
} }
} }
.clock { .clock {
>i { >em {
font-size: 20px; font-size: 20px;
vertical-align: middle; vertical-align: middle;
transform: scale(1); transform: scale(1);
} }
} }
.refresh-log { .refresh-log {
>i { >em {
text-decoration: none;
font-size: 20px; font-size: 20px;
vertical-align: middle; vertical-align: middle;
transform: scale(1); transform: scale(1);
} }
&.active { &.active {
>i { >em {
-webkit-transition-property: -webkit-transform; -webkit-transition-property: -webkit-transform;
-webkit-transition-duration: 1s; -webkit-transition-duration: 1s;
-moz-transition-property: -moz-transform; -moz-transition-property: -moz-transform;
@ -368,5 +370,16 @@
} }
} }
} }
@-webkit-keyframes rotateloading{from{-webkit-transform: rotate(0deg)}
to{-webkit-transform: rotate(360deg)}
}
@-moz-keyframes rotateloading{from{-moz-transform: rotate(0deg)}
to{-moz-transform: rotate(359deg)}
}
@-o-keyframes rotateloading{from{-o-transform: rotate(0deg)}
to{-o-transform: rotate(359deg)}
}
@keyframes rotateloading{from{transform: rotate(0deg)}
to{transform: rotate(359deg)}
}
</style> </style>

19
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue

@ -19,7 +19,7 @@
<div v-show="!msg"> <div v-show="!msg">
<div class="data-area" v-spin="isSpin" style="height: 430px;"> <div class="data-area" v-spin="isSpin" style="height: 430px;">
<div class="col-md-7"> <div class="col-md-7">
<div id="process-state-pie" style="height:260px;margin-top: 100px;"></div> <div id="process-state-pie" style="width:100%;height:260px;margin-top: 100px;"></div>
</div> </div>
<div class="col-md-5"> <div class="col-md-5">
<div class="table-small-model"> <div class="table-small-model">
@ -31,7 +31,10 @@
</tr> </tr>
<tr v-for="(item,$index) in processStateList" :key="$index"> <tr v-for="(item,$index) in processStateList" :key="$index">
<td><span>{{$index+1}}</span></td> <td><span>{{$index+1}}</span></td>
<td><span><a href="javascript:" @click="searchParams.projectId && _goProcess(item.key)" :class="searchParams.projectId ?'links':''">{{item.value}}</a></span></td> <td>
<a v-if="currentName === 'home'" style="cursor: default">{{item.value}}</a>
<span v-else><a href="javascript:" @click="searchParams.projectId && _goProcess(item.key)">{{item.value}}</a></span>
</td>
<td><span class="ellipsis" style="width: 98%;" :title="item.key">{{item.key}}</span></td> <td><span class="ellipsis" style="width: 98%;" :title="item.key">{{item.key}}</span></td>
</tr> </tr>
</table> </table>
@ -49,6 +52,8 @@
import { mapActions } from 'vuex' import { mapActions } from 'vuex'
import { pie } from './chartConfig' import { pie } from './chartConfig'
import Chart from '@/module/ana-charts' import Chart from '@/module/ana-charts'
import echarts from 'echarts'
import store from '@/conf/home/store'
import mNoData from '@/module/components/noData/noData' import mNoData from '@/module/components/noData/noData'
import { stateType } from '@/conf/home/pages/projects/pages/_source/instanceConditions/common' import { stateType } from '@/conf/home/pages/projects/pages/_source/instanceConditions/common'
export default { export default {
@ -57,7 +62,8 @@
return { return {
isSpin: true, isSpin: true,
msg: '', msg: '',
processStateList: [] processStateList: [],
currentName: ''
} }
}, },
props: { props: {
@ -108,11 +114,15 @@
this.isSpin = false this.isSpin = false
}) })
} }
},
'$store.state.projects.sideBar': function() {
echarts.init(document.getElementById('process-state-pie')).resize()
} }
}, },
beforeCreate () { beforeCreate () {
}, },
created () { created () {
this.currentName = this.$router.currentRoute.name
}, },
beforeMount () { beforeMount () {
}, },
@ -132,7 +142,4 @@
</script> </script>
<style lang="scss" rel="stylesheet/scss"> <style lang="scss" rel="stylesheet/scss">
.process-state-count-model {
}
</style> </style>

19
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue

@ -19,7 +19,7 @@
<div v-show="!msg"> <div v-show="!msg">
<div class="data-area" v-spin="isSpin" style="height: 430px;"> <div class="data-area" v-spin="isSpin" style="height: 430px;">
<div class="col-md-7"> <div class="col-md-7">
<div id="task-status-pie" style="height:260px;margin-top: 100px;"></div> <div id="task-status-pie" style="width:100%;height:260px;margin-top: 100px;"></div>
</div> </div>
<div class="col-md-5"> <div class="col-md-5">
<div class="table-small-model"> <div class="table-small-model">
@ -32,8 +32,9 @@
<tr v-for="(item,$index) in taskCtatusList" :key="$index"> <tr v-for="(item,$index) in taskCtatusList" :key="$index">
<td><span>{{$index+1}}</span></td> <td><span>{{$index+1}}</span></td>
<td> <td>
<span> <a v-if="currentName === 'home'" style="cursor: default">{{item.value}}</a>
<a href="javascript:" @click="searchParams.projectId && _goTask(item.key)" :class="searchParams.projectId ?'links':''">{{item.value}}</a> <span v-else>
<a href="javascript:" @click="searchParams.projectId && _goTask(item.key)">{{item.value}}</a>
</span> </span>
</td> </td>
<td><span class="ellipsis" style="width: 98%;" :title="item.key">{{item.key}}</span></td> <td><span class="ellipsis" style="width: 98%;" :title="item.key">{{item.key}}</span></td>
@ -53,6 +54,8 @@
import { mapActions } from 'vuex' import { mapActions } from 'vuex'
import { pie } from './chartConfig' import { pie } from './chartConfig'
import Chart from '@/module/ana-charts' import Chart from '@/module/ana-charts'
import echarts from 'echarts'
import store from '@/conf/home/store'
import mNoData from '@/module/components/noData/noData' import mNoData from '@/module/components/noData/noData'
import { stateType } from '@/conf/home/pages/projects/pages/_source/instanceConditions/common' import { stateType } from '@/conf/home/pages/projects/pages/_source/instanceConditions/common'
@ -62,7 +65,8 @@
return { return {
isSpin: true, isSpin: true,
msg: '', msg: '',
taskCtatusList: [] taskCtatusList: [],
currentName: ''
} }
}, },
props: { props: {
@ -115,11 +119,15 @@
this.isSpin = false this.isSpin = false
}) })
} }
},
'$store.state.projects.sideBar': function() {
echarts.init(document.getElementById('task-status-pie')).resize()
} }
}, },
beforeCreate () { beforeCreate () {
}, },
created () { created () {
this.currentName = this.$router.currentRoute.name
}, },
beforeMount () { beforeMount () {
}, },
@ -139,7 +147,4 @@
</script> </script>
<style lang="scss" rel="stylesheet/scss"> <style lang="scss" rel="stylesheet/scss">
.task-ctatus-count-model {
}
</style> </style>

1
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue

@ -108,6 +108,7 @@
data-toggle="tooltip" data-toggle="tooltip"
:title="$t('View log')" :title="$t('View log')"
icon="ans-icon-log" icon="ans-icon-log"
:disabled="item.taskType==='SUB_PROCESS'? true: false"
@click="_refreshLog(item)"> @click="_refreshLog(item)">
</x-button> </x-button>
</td> </td>

2
dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue

@ -232,7 +232,7 @@
position: absolute; position: absolute;
right: 0; right: 0;
top: 0; top: 0;
>i { >em {
font-size: 20px; font-size: 20px;
color: #2d8cf0; color: #2d8cf0;
cursor: pointer; cursor: pointer;

10
dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue

@ -94,11 +94,11 @@ v-ps<template>
<x-poptip <x-poptip
:ref="'poptip-' + $index" :ref="'poptip-' + $index"
placement="bottom-end" placement="bottom-end"
width="90"> width="190">
<p>{{$t('Delete?')}}</p> <p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;"> <div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button> <x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button> <x-button type="primary" size="xsmall" shape="circle" :loading="spinnerLoading" @click="_delete(item,$index)">{{spinnerLoading ? 'Loading' : $t('Confirm')}}</x-button>
</div> </div>
<template slot="reference"> <template slot="reference">
<x-button <x-button
@ -125,7 +125,8 @@ v-ps<template>
name: 'udf-manage-list', name: 'udf-manage-list',
data () { data () {
return { return {
list: [] list: [],
spinnerLoading: false
} }
}, },
props: { props: {
@ -139,15 +140,18 @@ v-ps<template>
this.$refs[`poptip-${i}`][0].doClose() this.$refs[`poptip-${i}`][0].doClose()
}, },
_delete (item, i) { _delete (item, i) {
this.spinnerLoading = true
this.deleteUdf({ this.deleteUdf({
id: item.id id: item.id
}).then(res => { }).then(res => {
this.$refs[`poptip-${i}`][0].doClose() this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update') this.$emit('on-update')
this.$message.success(res.msg) this.$message.success(res.msg)
this.spinnerLoading = false
}).catch(e => { }).catch(e => {
this.$refs[`poptip-${i}`][0].doClose() this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '') this.$message.error(e.msg || '')
this.spinnerLoading = false
}) })
}, },
_edit (item) { _edit (item) {

10
dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue

@ -30,7 +30,7 @@
:disabled="item ? true : false" :disabled="item ? true : false"
v-model="tenantCode" v-model="tenantCode"
maxlength="60" maxlength="60"
:placeholder="$t('Please enter name')"> :placeholder="$t('Please enter tenant code')">
</x-input> </x-input>
</template> </template>
</m-list-box-f> </m-list-box-f>
@ -41,7 +41,7 @@
type="input" type="input"
v-model="tenantName" v-model="tenantName"
maxlength="60" maxlength="60"
:placeholder="$t('Please enter name')" :placeholder="$t('Please enter tenant Name')"
autocomplete="off"> autocomplete="off">
</x-input> </x-input>
</template> </template>
@ -88,7 +88,7 @@
store, store,
queueList: [], queueList: [],
queueId: '', queueId: '',
tenantCode: '', tenantCode: null,
tenantName: '', tenantName: '',
description: '', description: '',
} }
@ -138,12 +138,14 @@
this.$message.warning(`${i18n.$t('Please enter the tenant code in English')}`) this.$message.warning(`${i18n.$t('Please enter the tenant code in English')}`)
return false return false
} }
if (!isEn.test(this.tenantCode) || _.startsWith(this.tenantCode, '_', 0) || _.startsWith(this.tenantCode, '.', 0)) { if (!isEn.test(this.tenantCode) || _.startsWith(this.tenantCode, '_', 0) || _.startsWith(this.tenantCode, '.', 0)) {
this.$message.warning(`${i18n.$t('Please enter tenant code in English')}`) this.$message.warning(`${i18n.$t('Please enter tenant code in English')}`)
return false return false
} }
if (!this.tenantName.replace(/\s*/g,"")) { if (!this.tenantName.replace(/\s*/g,"")) {
this.$message.warning(`${i18n.$t('Please enter name')}`) this.$message.warning(`${i18n.$t('Please enter tenant Name')}`)
return false return false
} }
// Verify tenant name cannot contain special characters // Verify tenant name cannot contain special characters

2
dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue

@ -23,7 +23,7 @@
<template slot="content"> <template slot="content">
<div class="create-token-model"> <div class="create-token-model">
<m-list-box-f> <m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Failure time')}}</template> <template slot="name"><strong>*</strong>{{$t('Expiration time')}}</template>
<template slot="content"> <template slot="content">
<x-datepicker <x-datepicker
:disabled-date="disabledDate" :disabled-date="disabledDate"

2
dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/list.vue

@ -29,7 +29,7 @@
<span>Token</span> <span>Token</span>
</th> </th>
<th> <th>
<span>{{$t('Failure time')}}</span> <span>{{$t('Expiration time')}}</span>
</th> </th>
<th> <th>
<span>{{$t('Create Time')}}</span> <span>{{$t('Create Time')}}</span>

6
dolphinscheduler-ui/src/js/conf/home/store/projects/mutations.js

@ -16,4 +16,10 @@
*/ */
export default { export default {
/**
* set sideBar
* */
setSideBar (state, payload) {
state.sideBar = payload
},
} }

2
dolphinscheduler-ui/src/js/conf/home/store/projects/state.js

@ -15,5 +15,5 @@
* limitations under the License. * limitations under the License.
*/ */
export default { export default {
sideBar: 1
} }

336
dolphinscheduler-ui/src/js/module/components/fileUpdate/fileChildReUpdate.vue

@ -0,0 +1,336 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-popup
ref="popup"
:ok-text="$t('Upload')"
:nameText="$t('File Upload')"
@ok="_ok"
:disabled="progress === 0 ? false : true">
<template slot="content">
<form name="files" enctype="multipart/form-data" method="post">
<div class="file-update-model"
@drop.prevent="_onDrop"
@dragover.prevent="dragOver = true"
@dragleave.prevent="dragOver = false"
id="file-update-model">
<div class="tooltip-info">
<em class="ans ans-icon-warn-solid"></em>
<span>{{$t('Drag the file into the current upload window')}}</span>
</div>
<!--<div class="hide-archive" v-if="progress !== 0" @click="_ckArchive">
<em class="fa fa-minus" data-toggle="tooltip" title="关闭窗口 继续上传" data-container="body" ></em>
</div>-->
<div class="update-popup" v-if="dragOver">
<div class="icon-box">
<em class="ans ans-icon-upload"></em>
</div>
<p class="p1">
<span>{{$t('Drag area upload')}}</span>
</p>
</div>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('File Name')}}</template>
<template slot="content">
<x-input
type="input"
v-model="name"
:disabled="progress !== 0"
:placeholder="$t('Please enter name')"
autocomplete="off">
</x-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('Description')}}</template>
<template slot="content">
<x-input
type="textarea"
v-model="description"
:disabled="progress !== 0"
:placeholder="$t('Please enter description')"
autocomplete="off">
</x-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Upload Files')}}</template>
<template slot="content">
<div class="file-update-box">
<template v-if="progress === 0">
<input name="file" id="file" type="file" class="file-update">
<x-button type="dashed" size="xsmall"> {{$t('Upload')}} </x-button>
</template>
<div class="progress-box" v-if="progress !== 0">
<m-progress-bar :value="progress" text-placement="left-right"></m-progress-bar>
</div>
</div>
</template>
</m-list-box-f>
</div>
</form>
</template>
</m-popup>
</template>
<script>
import io from '@/module/io'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import localStore from '@/module/util/localStorage'
import mPopup from '@/module/components/popup/popup'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
import mProgressBar from '@/module/components/progressBar/progressBar'
export default {
name: 'file-update',
data () {
return {
store,
// name
name: '',
// description
description: '',
// progress
progress: 0,
// file
file: null,
currentDir: localStore.getItem('currentDir'),
// Whether to drag upload
dragOver: false
}
},
watch: {
},
props: {
type: String,
fileName: String,
desc: String,
id: Number
},
methods: {
/**
* submit
*/
_ok () {
this.$refs['popup'].spinnerLoading = true
if (this._validation()) {
if(this.fileName===this.name) {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if(isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs['popup'].spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs['popup'].spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs['popup'].spinnerLoading = false
}
} else {
this.store.dispatch('resource/resourceVerifyName', {
fullName: this.currentDir+'/'+this.name,
type: this.type
}).then(res => {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if(isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs['popup'].spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs['popup'].spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs['popup'].spinnerLoading = false
}
}).catch(e => {
this.$message.error(e.msg || '')
this.$refs['popup'].spinnerLoading = false
})
}
} else {
this.$refs['popup'].spinnerLoading = false
}
},
/**
* validation
*/
_validation () {
if (!this.name) {
this.$message.warning(`${i18n.$t('Please enter file name')}`)
return false
}
if (!this.file) {
this.$message.warning(`${i18n.$t('Please select the file to upload')}`)
return false
}
return true
},
/**
* update file
*/
_formDataUpdate () {
return new Promise((resolve, reject) => {
let self = this
let formData = new FormData()
formData.append('file', this.file)
formData.append('name', this.name)
formData.append('description', this.description)
formData.append('id', this.id)
formData.append('type', this.type)
io.post(`resources/update`, res => {
this.$message.success(res.msg)
resolve()
self.$emit('onUpdate')
}, e => {
reject(e)
self.$emit('close')
this.$message.error(e.msg || '')
}, {
data: formData,
emulateJSON: false,
onUploadProgress (progressEvent) {
// Size has been uploaded
let loaded = progressEvent.loaded
// Total attachment size
let total = progressEvent.total
self.progress = Math.floor(100 * loaded / total)
self.$emit('onProgress', self.progress)
}
})
})
},
/**
* Archive to the top right corner Continue uploading
*/
_ckArchive () {
$('.update-file-modal').hide()
this.$emit('onArchive')
},
/**
* Drag and drop upload
*/
_onDrop (e) {
let file = e.dataTransfer.files[0]
this.file = file
this.name = file.name
this.dragOver = false
}
},
mounted () {
this.name = this.fileName
this.description = this.desc
$('#file').change(() => {
let file = $('#file')[0].files[0]
this.file = file
this.name = file.name
})
},
components: { mPopup, mListBoxF, mProgressBar }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.file-update-model {
.tooltip-info {
position: absolute;
left: 20px;
bottom: 26px;
span {
font-size: 12px;
color: #666;
vertical-align: middle;
}
.fa,.ans {
color: #0097e0;
font-size: 14px;
vertical-align: middle;
}
}
.hide-archive {
position: absolute;
right: 22px;
top: 17px;
.fa,.ans{
font-size: 16px;
color: #333;
font-weight: normal;
cursor: pointer;
&:hover {
color: #0097e0;
}
}
}
.file-update-box {
padding-top: 4px;
position: relative;
.file-update {
width: 70px;
height: 40px;
position: absolute;
left: 0;
top: 0;
cursor: pointer;
filter: alpha(opacity=0);
-moz-opacity: 0;
opacity: 0;
}
&:hover {
.v-btn-dashed {
background-color: transparent;
border-color: #47c3ff;
color: #47c3ff;
cursor: pointer;
}
}
.progress-box {
width: 200px;
position: absolute;
left: 70px;
top: 14px;
}
}
.update-popup {
width: calc(100% - 20px);
height: calc(100% - 20px);
background: rgba(255,253,239,.7);
position: absolute;
top: 10px;
left: 10px;
border-radius: 3px;
z-index: 1;
border: .18rem dashed #cccccc;
.icon-box {
text-align: center;
margin-top: 96px;
.fa,.ans {
font-size: 50px;
color: #2d8cf0;
}
}
.p1 {
text-align: center;
font-size: 16px;
color: #333;
padding-top: 8px;
}
}
}
</style>

335
dolphinscheduler-ui/src/js/module/components/fileUpdate/fileReUpload.vue

@ -0,0 +1,335 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-popup
ref="popup"
:ok-text="$t('Upload')"
:nameText="$t('ReUpload File')"
@ok="_ok"
:disabled="progress === 0 ? false : true">
<template slot="content">
<form name="files" enctype="multipart/form-data" method="post">
<div class="file-update-model"
@drop.prevent="_onDrop"
@dragover.prevent="dragOver = true"
@dragleave.prevent="dragOver = false"
id="file-update-model">
<div class="tooltip-info">
<em class="ans ans-icon-warn-solid"></em>
<span>{{$t('Drag the file into the current upload window')}}</span>
</div>
<!--<div class="hide-archive" v-if="progress !== 0" @click="_ckArchive">
<em class="fa fa-minus" data-toggle="tooltip" title="关闭窗口 继续上传" data-container="body" ></em>
</div>-->
<div class="update-popup" v-if="dragOver">
<div class="icon-box">
<em class="ans ans-icon-upload"></em>
</div>
<p class="p1">
<span>{{$t('Drag area upload')}}</span>
</p>
</div>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('File Name')}}</template>
<template slot="content">
<x-input
type="input"
v-model="name"
:disabled="progress !== 0"
:placeholder="$t('Please enter name')"
autocomplete="off">
</x-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('Description')}}</template>
<template slot="content">
<x-input
type="textarea"
v-model="description"
:disabled="progress !== 0"
:placeholder="$t('Please enter description')"
autocomplete="off">
</x-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Upload Files')}}</template>
<template slot="content">
<div class="file-update-box">
<template v-if="progress === 0">
<input name="file" id="file" type="file" class="file-update">
<x-button type="dashed" size="xsmall"> {{$t('Upload')}} </x-button>
</template>
<div class="progress-box" v-if="progress !== 0">
<m-progress-bar :value="progress" text-placement="left-right"></m-progress-bar>
</div>
</div>
</template>
</m-list-box-f>
</div>
</form>
</template>
</m-popup>
</template>
<script>
import io from '@/module/io'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import mPopup from '@/module/components/popup/popup'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
import mProgressBar from '@/module/components/progressBar/progressBar'
export default {
name: 'file-update',
data () {
return {
store,
// name
name: '',
// description
description: '',
// progress
progress: 0,
// file
file: null,
currentDir: '/',
// Whether to drag upload
dragOver: false
}
},
watch: {
},
props: {
type: String,
fileName: String,
desc: String,
id: Number
},
methods: {
/**
* submit
*/
_ok () {
this.$refs['popup'].spinnerLoading = true
if (this._validation()) {
if(this.fileName===this.name) {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if(isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs['popup'].spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs['popup'].spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs['popup'].spinnerLoading = false
}
} else {
this.store.dispatch('resource/resourceVerifyName', {
fullName: '/'+this.name,
type: this.type
}).then(res => {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if(isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs['popup'].spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs['popup'].spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs['popup'].spinnerLoading = false
}
}).catch(e => {
this.$message.error(e.msg || '')
this.$refs['popup'].spinnerLoading = false
})
}
} else {
this.$refs['popup'].spinnerLoading = false
}
},
/**
* validation
*/
_validation () {
if (!this.name) {
this.$message.warning(`${i18n.$t('Please enter file name')}`)
return false
}
if (!this.file) {
this.$message.warning(`${i18n.$t('Please select the file to upload')}`)
return false
}
return true
},
/**
* update file
*/
_formDataUpdate () {
return new Promise((resolve, reject) => {
let self = this
let formData = new FormData()
formData.append('file', this.file)
formData.append('name', this.name)
formData.append('description', this.description)
formData.append('id', this.id)
formData.append('type', this.type)
io.post(`resources/update`, res => {
this.$message.success(res.msg)
resolve()
self.$emit('onUpdate')
}, e => {
reject(e)
self.$emit('close')
this.$message.error(e.msg || '')
}, {
data: formData,
emulateJSON: false,
onUploadProgress (progressEvent) {
// Size has been uploaded
let loaded = progressEvent.loaded
// Total attachment size
let total = progressEvent.total
self.progress = Math.floor(100 * loaded / total)
self.$emit('onProgress', self.progress)
}
})
})
},
/**
* Archive to the top right corner Continue uploading
*/
_ckArchive () {
$('.update-file-modal').hide()
this.$emit('onArchive')
},
/**
* Drag and drop upload
*/
_onDrop (e) {
let file = e.dataTransfer.files[0]
this.file = file
this.name = file.name
this.dragOver = false
}
},
mounted () {
this.name = this.fileName
this.description = this.desc
$('#file').change(() => {
let file = $('#file')[0].files[0]
this.file = file
this.name = file.name
})
},
components: { mPopup, mListBoxF, mProgressBar }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.file-update-model {
.tooltip-info {
position: absolute;
left: 20px;
bottom: 26px;
span {
font-size: 12px;
color: #666;
vertical-align: middle;
}
.fa,.ans {
color: #0097e0;
font-size: 14px;
vertical-align: middle;
}
}
.hide-archive {
position: absolute;
right: 22px;
top: 17px;
.fa,.ans{
font-size: 16px;
color: #333;
font-weight: normal;
cursor: pointer;
&:hover {
color: #0097e0;
}
}
}
.file-update-box {
padding-top: 4px;
position: relative;
.file-update {
width: 70px;
height: 40px;
position: absolute;
left: 0;
top: 0;
cursor: pointer;
filter: alpha(opacity=0);
-moz-opacity: 0;
opacity: 0;
}
&:hover {
.v-btn-dashed {
background-color: transparent;
border-color: #47c3ff;
color: #47c3ff;
cursor: pointer;
}
}
.progress-box {
width: 200px;
position: absolute;
left: 70px;
top: 14px;
}
}
.update-popup {
width: calc(100% - 20px);
height: calc(100% - 20px);
background: rgba(255,253,239,.7);
position: absolute;
top: 10px;
left: 10px;
border-radius: 3px;
z-index: 1;
border: .18rem dashed #cccccc;
.icon-box {
text-align: center;
margin-top: 96px;
.fa,.ans {
font-size: 50px;
color: #2d8cf0;
}
}
.p1 {
text-align: center;
font-size: 16px;
color: #333;
padding-top: 8px;
}
}
}
</style>

39
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteProjectTest.java

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.project.CreateProjectPage;
import org.testng.annotations.Test;
public class DeleteProjectTest extends BaseTest {
private CreateProjectPage createProjectPage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","project"},description = "DeleteProjectTest",priority=7)
public void testDeleteProject() throws InterruptedException {
createProjectPage = new CreateProjectPage(driver);
//jump to project manage page
System.out.println("jump to the project manage page to delete project");
createProjectPage.jumpProjectManagePage();
//assert tenant manage page
System.out.println("start delete project");
assert createProjectPage.deleteProject();
System.out.println("end delete project");
System.out.println("===================================");
}
}

38
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteTenantTest.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.testng.annotations.Test;
public class DeleteTenantTest extends BaseTest {
private TenantManagePage tenantManagePage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","createTenant"},description = "DeleteTenantTest",priority=9)
public void testDeleteTenant() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
//assert tenant manage page
System.out.println("jump to security to delete tenant");
tenantManagePage.jumpSecurity();
System.out.println("start delete tenant");
assert tenantManagePage.deleteTenant();
System.out.println("end delete tenant");
System.out.println("===================================");
}
}

42
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteUserTest.java

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.security.TenantManagePage;
import org.apache.dolphinscheduler.page.security.UserManagePage;
import org.testng.annotations.Test;
public class DeleteUserTest extends BaseTest {
private UserManagePage userManagePage;
private TenantManagePage tenantManagePage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","user" },description = "DeleteUserTest",priority=8)
public void testDeleteUser() throws InterruptedException {
tenantManagePage = new TenantManagePage(driver);
System.out.println("jump to security to delete user");
tenantManagePage.jumpSecurity();
userManagePage = new UserManagePage(driver);
//assert user manage page
System.out.println("start delete user");
assert userManagePage.deleteUser();
System.out.println("end delete user");
System.out.println("===================================");
}
}

43
e2e/src/test/java/org/apache/dolphinscheduler/testcase/deleteData/DeleteWorkflowTest.java

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.testcase.deleteData;
import org.apache.dolphinscheduler.base.BaseTest;
import org.apache.dolphinscheduler.page.project.CreateProjectPage;
import org.apache.dolphinscheduler.page.project.CreateWorkflowPage;
import org.testng.annotations.Test;
public class DeleteWorkflowTest extends BaseTest {
private CreateWorkflowPage createWorkflowPage;
private CreateProjectPage createProjectPage;
@Test(groups={"functionTests"},dependsOnGroups = { "login","workflow"},description = "DeleteWorkflowTest",priority=6)
public void testDeleteWorkflow() throws InterruptedException {
createProjectPage = new CreateProjectPage(driver);
//jump to project manage page
System.out.println("jump to the project manage page to delete workflow");
createProjectPage.jumpProjectManagePage();
createWorkflowPage = new CreateWorkflowPage(driver);
createWorkflowPage.jumpWorkflowPage();
//assert tenant manage page
System.out.println("start delete workflow");
assert createWorkflowPage.deleteWorkflow();
System.out.println("end delete workflow");
System.out.println("===================================");
}
}

19
sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql

@ -0,0 +1,19 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
SET FOREIGN_KEY_CHECKS=0;
UPDATE t_ds_user SET phone = '' WHERE phone = 'xx';

17
sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql

@ -0,0 +1,17 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
UPDATE t_ds_user SET phone = '' WHERE phone = 'xx';
Loading…
Cancel
Save