Browse Source

Merge pull request #6 from apache/dev

update
pull/2/head
samz406 5 years ago committed by GitHub
parent
commit
aeeddbd523
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 10
      .asf.yaml
  2. 18
      .github/workflows/ci_backend.yml
  3. 12
      .github/workflows/ci_e2e.yml
  4. 16
      .github/workflows/ci_frontend.yml
  5. 20
      .github/workflows/ci_ut.yml
  6. 2
      README.md
  7. 19
      dockerfile/Dockerfile
  8. 40
      dockerfile/README.md
  9. 38
      dockerfile/README_zh_CN.md
  10. 27
      dockerfile/checkpoint.sh
  11. 2
      dockerfile/conf/dolphinscheduler/application.properties.tpl
  12. 12
      dockerfile/conf/dolphinscheduler/common.properties.tpl
  13. 0
      dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env.sh
  14. 2
      dockerfile/conf/dolphinscheduler/quartz.properties.tpl
  15. 2
      dockerfile/hooks/check
  16. 4
      dockerfile/startup-init-conf.sh
  17. 8
      dockerfile/startup.sh
  18. 4
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java
  19. 1
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/impl/DefaultHTMLTemplate.java
  20. 38
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java
  21. 2
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java
  22. 2
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java
  23. 1
      dolphinscheduler-alert/src/main/resources/alert.properties
  24. 8
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java
  25. 2
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/FuncUtilsTest.java
  26. 1
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java
  27. 6
      dolphinscheduler-api/pom.xml
  28. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java
  29. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java
  30. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
  31. 422
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  32. 15
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
  33. 1
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java
  34. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java
  35. 1
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java
  36. 1
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java
  37. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java
  38. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java
  39. 5
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java
  40. 1
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java
  41. 12
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java
  42. 8
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java
  43. 1
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java
  44. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitorUtilsTest.java
  45. 2
      dolphinscheduler-common/pom.xml
  46. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  47. 3
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/WorkerLogFilter.java
  48. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java
  49. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java
  50. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
  51. 26
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
  52. 12
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
  53. 16
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java
  54. 36
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java
  55. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java
  56. 1065
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java
  57. 286
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java
  58. 785
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java
  59. 40
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/ConstantsTest.java
  60. 2
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminatorTest.java
  61. 1
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogFilterTest.java
  62. 55
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java
  63. 17
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java
  64. 3
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java
  65. 24
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java
  66. 3
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PreconditionsTest.java
  67. 3
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SchemaUtilsTest.java
  68. 2
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java
  69. 3
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtilsTest.java
  70. 210
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32Test.java
  71. 124
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32Test.java
  72. 70
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32Test.java
  73. 21
      dolphinscheduler-dao/pom.xml
  74. 6
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java
  75. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java
  76. 1
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java
  77. 1
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java
  78. 1
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java
  79. 1
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java
  80. 4
      dolphinscheduler-dao/src/main/resources/application.properties
  81. 4
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java
  82. 2
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java
  83. 1
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java
  84. 2
      dolphinscheduler-remote/pom.xml
  85. 2
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java
  86. 2
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java
  87. 2
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java
  88. 2
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java
  89. 1
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java
  90. 1
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java
  91. 1
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java
  92. 1
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java
  93. 2
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java
  94. 2
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java
  95. 8
      dolphinscheduler-server/pom.xml
  96. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
  97. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java
  98. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java
  99. 16
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java
  100. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java
  101. Some files were not shown because too many files have changed in this diff Show More

10
.asf.yaml

@ -0,0 +1,10 @@
staging:
profile: ~
whoami: dev
foo: trigger
publish:
whoami: dev
github:
description: “Dolphin Scheduler is a distributed and easy-to-extend visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.(分布式易扩展的可视化工作流任务调度)”

18
.github/workflows/ci_backend.yml

@ -45,17 +45,29 @@ jobs:
Compile-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Compile
run: mvn -U -B -T 1C clean install -Prelease -Dmaven.compile.fork=true -Dmaven.test.skip=true
run: mvn -B clean compile package -Prelease -Dmaven.test.skip=true
License-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:

12
.github/workflows/ci_e2e.yml

@ -15,7 +15,7 @@
# limitations under the License.
#
on: ["push", "pull_request"]
on: ["pull_request"]
env:
DOCKER_DIR: ./docker
LOG_DIR: /tmp/dolphinscheduler
@ -29,9 +29,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
with:
submodules: true
- uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- uses: actions/cache@v1
with:
path: ~/.m2/repository

16
.github/workflows/ci_frontend.yml

@ -34,7 +34,13 @@ jobs:
matrix:
os: [ubuntu-latest, macos-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Set up Node.js
uses: actions/setup-node@v1
with:
@ -49,7 +55,13 @@ jobs:
License-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:

20
.github/workflows/ci_ut.yml

@ -15,7 +15,7 @@
# limitations under the License.
#
on: ["push", "pull_request"]
on: ["pull_request", "push"]
env:
DOCKER_DIR: ./docker
LOG_DIR: /tmp/dolphinscheduler
@ -29,9 +29,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
with:
submodules: true
- uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- uses: actions/cache@v1
with:
path: ~/.m2/repository
@ -48,7 +52,15 @@ jobs:
run: |
export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx3g'
mvn test -B -Dmaven.test.skip=false
- name: Upload coverage report to codecov
if: github.event_name == 'pull_request'
run: |
CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash)
- name: Git fetch unshallow
run: |
git fetch --unshallow
git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"
git fetch origin
- name: Run SonarCloud Analysis
run: >
mvn verify --batch-mode

2
README.md

@ -17,7 +17,7 @@ Dolphin Scheduler Official Website
### Design features:
A distributed and easy-to-expand visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing.
A distributed and easy-to-extend visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing.
Its main objectives are as follows:
- Associate the Tasks according to the dependencies of the tasks in a DAG graph, which can visualize the running state of task in real time.

19
dockerfile/Dockerfile

@ -23,11 +23,11 @@ ENV TZ Asia/Shanghai
ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND noninteractive
#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip kazoo.
#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip tini kazoo.
#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
RUN apk update && \
apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip && \
apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip tini && \
apk add --update procps && \
openrc boot && \
pip install kazoo
@ -63,19 +63,22 @@ RUN echo "daemon off;" >> /etc/nginx/nginx.conf && \
ADD ./conf/nginx/dolphinscheduler.conf /etc/nginx/conf.d
#7. add configuration and modify permissions and set soft links
ADD ./checkpoint.sh /root/checkpoint.sh
ADD ./startup-init-conf.sh /root/startup-init-conf.sh
ADD ./startup.sh /root/startup.sh
ADD ./conf/dolphinscheduler/*.tpl /opt/dolphinscheduler/conf/
ADD ./conf/dolphinscheduler/env/dolphinscheduler_env /opt/dolphinscheduler/conf/env/
RUN chmod +x /root/startup-init-conf.sh && \
ADD conf/dolphinscheduler/env/dolphinscheduler_env.sh /opt/dolphinscheduler/conf/env/
RUN chmod +x /root/checkpoint.sh && \
chmod +x /root/startup-init-conf.sh && \
chmod +x /root/startup.sh && \
chmod +x /opt/dolphinscheduler/conf/env/dolphinscheduler_env && \
chmod +x /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh && \
chmod +x /opt/dolphinscheduler/script/*.sh && \
chmod +x /opt/dolphinscheduler/bin/*.sh && \
chmod +x /opt/zookeeper/bin/*.sh && \
dos2unix /root/checkpoint.sh && \
dos2unix /root/startup-init-conf.sh && \
dos2unix /root/startup.sh && \
dos2unix /opt/dolphinscheduler/conf/env/dolphinscheduler_env && \
dos2unix /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh && \
dos2unix /opt/dolphinscheduler/script/*.sh && \
dos2unix /opt/dolphinscheduler/bin/*.sh && \
dos2unix /opt/zookeeper/bin/*.sh && \
@ -87,6 +90,6 @@ RUN chmod +x /root/startup-init-conf.sh && \
RUN rm -rf /var/cache/apk/*
#9. expose port
EXPOSE 2181 2888 3888 5432 12345 8888
EXPOSE 2181 2888 3888 5432 12345 50051 8888
ENTRYPOINT ["/root/startup.sh"]
ENTRYPOINT ["/sbin/tini", "--", "/root/startup.sh"]

40
dockerfile/README.md

@ -16,7 +16,7 @@ Official Website: https://dolphinscheduler.apache.org
#### You can start a dolphinscheduler instance
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test \
-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -e POSTGRESQL_DATABASE=dolphinscheduler \
-p 8888:8888 \
dolphinscheduler all
```
@ -25,13 +25,13 @@ The default postgres user `root`, postgres password `root` and database `dolphin
The default zookeeper is created in the `startup.sh`.
#### Or via Environment Variables **`POSTGRESQL_HOST`** **`POSTGRESQL_PORT`** **`ZOOKEEPER_QUORUM`**
#### Or via Environment Variables **`POSTGRESQL_HOST`** **`POSTGRESQL_PORT`** **`POSTGRESQL_DATABASE`** **`ZOOKEEPER_QUORUM`**
You can specify **existing postgres service**. Example:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 8888:8888 \
dolphinscheduler all
@ -42,7 +42,7 @@ You can specify **existing zookeeper service**. Example:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-p 8888:8888 \
dolphinscheduler all
```
@ -56,7 +56,7 @@ You can start a standalone dolphinscheduler server.
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler master-server
```
@ -66,7 +66,7 @@ dolphinscheduler master-server
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler worker-server
```
@ -75,7 +75,7 @@ dolphinscheduler worker-server
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 12345:12345 \
dolphinscheduler api-server
@ -85,7 +85,7 @@ dolphinscheduler api-server
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler alert-server
```
@ -99,7 +99,7 @@ $ docker run -dit --name dolphinscheduler \
dolphinscheduler frontend
```
**Note**: You must be specify `POSTGRESQL_HOST` `POSTGRESQL_PORT` `ZOOKEEPER_QUORUM` when start a standalone dolphinscheduler server.
**Note**: You must be specify `POSTGRESQL_HOST` `POSTGRESQL_PORT` `POSTGRESQL_DATABASE` `POSTGRESQL_USERNAME` `POSTGRESQL_PASSWORD` `ZOOKEEPER_QUORUM` when start a standalone dolphinscheduler server.
## How to build a docker image
@ -140,14 +140,36 @@ This environment variable sets the port for PostgreSQL. The default value is `54
This environment variable sets the username for PostgreSQL. The default value is `root`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`POSTGRESQL_PASSWORD`**
This environment variable sets the password for PostgreSQL. The default value is `root`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`POSTGRESQL_DATABASE`**
This environment variable sets the database for PostgreSQL. The default value is `dolphinscheduler`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DOLPHINSCHEDULER_ENV_PATH`**
This environment variable sets the runtime environment for task. The default value is `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`.
**`DOLPHINSCHEDULER_DATA_BASEDIR_PATH`**
User data directory path, self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler`
**`DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH`**
Directory path for user data download. self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler/download`
**`DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH`**
Process execute directory. self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler/exec`
**`TASK_QUEUE`**
This environment variable sets the task queue for `master-server` and `worker-serverr`. The default value is `zookeeper`.

38
dockerfile/README_zh_CN.md

@ -16,7 +16,7 @@ Official Website: https://dolphinscheduler.apache.org
#### 你可以运行一个dolphinscheduler实例
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test \
-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -e POSTGRESQL_DATABASE=dolphinscheduler \
-p 8888:8888 \
dolphinscheduler all
```
@ -31,7 +31,7 @@ dolphinscheduler all
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 8888:8888 \
dolphinscheduler all
@ -42,7 +42,7 @@ dolphinscheduler all
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-p 8888:8888 \
dolphinscheduler all
```
@ -56,7 +56,7 @@ dolphinscheduler all
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler master-server
```
@ -66,7 +66,7 @@ dolphinscheduler master-server
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler worker-server
```
@ -75,7 +75,7 @@ dolphinscheduler worker-server
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 12345:12345 \
dolphinscheduler api-server
@ -85,7 +85,7 @@ dolphinscheduler api-server
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler alert-server
```
@ -99,7 +99,7 @@ $ docker run -dit --name dolphinscheduler \
dolphinscheduler frontend
```
**注意**: 当你运行dolphinscheduler中的部分服务时,你必须指定这些环境变量 `POSTGRESQL_HOST` `POSTGRESQL_PORT` `ZOOKEEPER_QUORUM`
**注意**: 当你运行dolphinscheduler中的部分服务时,你必须指定这些环境变量 `POSTGRESQL_HOST` `POSTGRESQL_PORT` `POSTGRESQL_DATABASE` `POSTGRESQL_USERNAME` `POSTGRESQL_PASSWORD` `ZOOKEEPER_QUORUM`
## 如何构建一个docker镜像
@ -140,14 +140,36 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些
配置`PostgreSQL`的`USERNAME`, 默认值 `root`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`POSTGRESQL_PASSWORD`**
配置`PostgreSQL`的`PASSWORD`, 默认值 `root`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`POSTGRESQL_DATABASE`**
配置`PostgreSQL`的`DATABASE`, 默认值 `dolphinscheduler`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`DOLPHINSCHEDULER_ENV_PATH`**
任务执行时的环境变量配置文件, 默认值 `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`
**`DOLPHINSCHEDULER_DATA_BASEDIR_PATH`**
用户数据目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler`
**`DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH`**
用户数据下载目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler/download`
**`DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH`**
任务执行目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler/exec`
**`TASK_QUEUE`**
配置`master-server`和`worker-serverr`的`Zookeeper`任务队列名, 默认值 `zookeeper`

27
dockerfile/checkpoint.sh

@ -0,0 +1,27 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
if [ "$(ps -ef | grep java | grep -c $1)" -eq 0 ]; then
echo "[ERROR] $1 process not exits."
exit 1
else
echo "[INFO] $1 process exits."
exit 0
fi

2
dockerfile/conf/dolphinscheduler/application.properties.tpl

@ -19,7 +19,7 @@
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
# postgre
spring.datasource.driver-class-name=org.postgresql.Driver
spring.datasource.url=jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/dolphinscheduler?characterEncoding=utf8
spring.datasource.url=jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/${POSTGRESQL_DATABASE}?characterEncoding=utf8
# mysql
#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8

12
dockerfile/conf/dolphinscheduler/common.properties.tpl

@ -38,6 +38,12 @@ dolphinscheduler.env.path=${DOLPHINSCHEDULER_ENV_PATH}
resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties
# is development state? default "false"
development.state=true
# user data directory path, self configuration, please make sure the directory exists and have read write permissions
data.basedir.path=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH}
# directory path for user data download. self configuration, please make sure the directory exists and have read write permissions
data.download.basedir.path=${DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH}
# process execute directory. self configuration, please make sure the directory exists and have read write permissions
process.exec.basepath=${DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH}
# resource upload startup type : HDFS,S3,NONE
res.upload.startup.type=NONE
@ -49,12 +55,6 @@ res.upload.startup.type=NONE
hdfs.root.user=hdfs
# data base dir, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended
data.store2hdfs.basepath=/dolphinscheduler
# user data directory path, self configuration, please make sure the directory exists and have read write permissions
data.basedir.path=/tmp/dolphinscheduler
# directory path for user data download. self configuration, please make sure the directory exists and have read write permissions
data.download.basedir.path=/tmp/dolphinscheduler/download
# process execute directory. self configuration, please make sure the directory exists and have read write permissions
process.exec.basepath=/tmp/dolphinscheduler/exec
# whether kerberos starts
hadoop.security.authentication.startup.state=false
# java.security.krb5.conf path

0
dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env → dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env.sh vendored

2
dockerfile/conf/dolphinscheduler/quartz.properties.tpl

@ -22,7 +22,7 @@
org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
# postgre
org.quartz.dataSource.myDs.driver = org.postgresql.Driver
org.quartz.dataSource.myDs.URL = jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/dolphinscheduler?characterEncoding=utf8
org.quartz.dataSource.myDs.URL = jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/${POSTGRESQL_DATABASE}?characterEncoding=utf8
org.quartz.dataSource.myDs.user = ${POSTGRESQL_USERNAME}
org.quartz.dataSource.myDs.password = ${POSTGRESQL_PASSWORD}
org.quartz.scheduler.instanceName = DolphinScheduler

2
dockerfile/hooks/check

@ -17,7 +17,7 @@
#
echo "------ dolphinscheduler check - server - status -------"
sleep 20
server_num=$(docker top `docker container list | grep startup | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l)
server_num=$(docker top `docker container list | grep '/sbin/tini' | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l)
if [ $server_num -eq 5 ]
then
echo "Server all start successfully"

4
dockerfile/startup-init-conf.sh

@ -28,11 +28,15 @@ export POSTGRESQL_HOST=${POSTGRESQL_HOST:-"127.0.0.1"}
export POSTGRESQL_PORT=${POSTGRESQL_PORT:-"5432"}
export POSTGRESQL_USERNAME=${POSTGRESQL_USERNAME:-"root"}
export POSTGRESQL_PASSWORD=${POSTGRESQL_PASSWORD:-"root"}
export POSTGRESQL_DATABASE=${POSTGRESQL_DATABASE:-"dolphinscheduler"}
#============================================================================
# System
#============================================================================
export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"}
export DOLPHINSCHEDULER_DATA_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH:-"/tmp/dolphinscheduler"}
export DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH:-"/tmp/dolphinscheduler/download"}
export DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH=${DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH:-"/tmp/dolphinscheduler/exec"}
#============================================================================
# Zookeeper

8
dockerfile/startup.sh

@ -164,6 +164,7 @@ case "$1" in
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-worker.log
;;
(api-server)
initZK
initPostgreSQL
initApiServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-api-server.log
@ -187,6 +188,9 @@ case "$1" in
;;
esac
echo "tee begin"
exec tee ${LOGFILE}
# init directories and log files
mkdir -p ${DOLPHINSCHEDULER_LOGS} && mkdir -p /var/log/nginx/ && cat /dev/null >> ${LOGFILE}
echo "tail begin"
exec bash -c "tail -n 1 -f ${LOGFILE}"

4
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java

@ -42,8 +42,8 @@ public class EnterpriseWeChatManager {
public Map<String,Object> send(Alert alert, String token){
Map<String,Object> retMap = new HashMap<>();
retMap.put(Constants.STATUS, false);
String agentId = EnterpriseWeChatUtils.enterpriseWeChatAgentId;
String users = EnterpriseWeChatUtils.enterpriseWeChatUsers;
String agentId = EnterpriseWeChatUtils.ENTERPRISE_WE_CHAT_AGENT_ID;
String users = EnterpriseWeChatUtils.ENTERPRISE_WE_CHAT_USERS;
List<String> userList = Arrays.asList(users.split(","));
logger.info("send message {}",alert);
String msg = EnterpriseWeChatUtils.makeUserSendMsg(userList, agentId,EnterpriseWeChatUtils.markdownByAlert(alert));

1
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/impl/DefaultHTMLTemplate.java

@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.alert.template.impl;
import org.apache.dolphinscheduler.alert.template.AlertTemplate;
import org.apache.dolphinscheduler.alert.utils.Constants;
import org.apache.dolphinscheduler.alert.utils.JSONUtils;
import org.apache.dolphinscheduler.alert.utils.MailUtils;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;

38
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java

@ -43,24 +43,24 @@ public class EnterpriseWeChatUtils {
public static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatUtils.class);
private static final String enterpriseWeChatCorpId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_CORP_ID);
private static final String ENTERPRISE_WE_CHAT_CORP_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_CORP_ID);
private static final String enterpriseWeChatSecret = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET);
private static final String ENTERPRISE_WE_CHAT_SECRET = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET);
private static final String enterpriseWeChatTokenUrl = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL);
private static String enterpriseWeChatTokenUrlReplace = enterpriseWeChatTokenUrl
.replaceAll("\\$corpId", enterpriseWeChatCorpId)
.replaceAll("\\$secret", enterpriseWeChatSecret);
private static final String ENTERPRISE_WE_CHAT_TOKEN_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL);
private static final String ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE = ENTERPRISE_WE_CHAT_TOKEN_URL
.replaceAll("\\$corpId", ENTERPRISE_WE_CHAT_CORP_ID)
.replaceAll("\\$secret", ENTERPRISE_WE_CHAT_SECRET);
private static final String enterpriseWeChatPushUrl = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_PUSH_URL);
private static final String ENTERPRISE_WE_CHAT_PUSH_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_PUSH_URL);
private static final String enterpriseWeChatTeamSendMsg = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG);
private static final String ENTERPRISE_WE_CHAT_TEAM_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG);
private static final String enterpriseWeChatUserSendMsg = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG);
private static final String ENTERPRISE_WE_CHAT_USER_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG);
public static final String enterpriseWeChatAgentId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID);
public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID);
public static final String enterpriseWeChatUsers = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS);
public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS);
/**
* get Enterprise WeChat is enable
@ -87,7 +87,7 @@ public class EnterpriseWeChatUtils {
CloseableHttpClient httpClient = HttpClients.createDefault();
try {
HttpGet httpGet = new HttpGet(enterpriseWeChatTokenUrlReplace);
HttpGet httpGet = new HttpGet(ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE);
CloseableHttpResponse response = httpClient.execute(httpGet);
try {
HttpEntity entity = response.getEntity();
@ -114,7 +114,7 @@ public class EnterpriseWeChatUtils {
* @return Enterprise WeChat send message
*/
public static String makeTeamSendMsg(String toParty, String agentId, String msg) {
return enterpriseWeChatTeamSendMsg.replaceAll("\\$toParty", toParty)
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\$toParty", toParty)
.replaceAll("\\$agentId", agentId)
.replaceAll("\\$msg", msg);
}
@ -128,7 +128,7 @@ public class EnterpriseWeChatUtils {
*/
public static String makeTeamSendMsg(Collection<String> toParty, String agentId, String msg) {
String listParty = FuncUtils.mkString(toParty, "|");
return enterpriseWeChatTeamSendMsg.replaceAll("\\$toParty", listParty)
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\$toParty", listParty)
.replaceAll("\\$agentId", agentId)
.replaceAll("\\$msg", msg);
}
@ -141,7 +141,7 @@ public class EnterpriseWeChatUtils {
* @return Enterprise WeChat send message
*/
public static String makeUserSendMsg(String toUser, String agentId, String msg) {
return enterpriseWeChatUserSendMsg.replaceAll("\\$toUser", toUser)
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\$toUser", toUser)
.replaceAll("\\$agentId", agentId)
.replaceAll("\\$msg", msg);
}
@ -155,7 +155,7 @@ public class EnterpriseWeChatUtils {
*/
public static String makeUserSendMsg(Collection<String> toUser, String agentId, String msg) {
String listUser = FuncUtils.mkString(toUser, "|");
return enterpriseWeChatUserSendMsg.replaceAll("\\$toUser", listUser)
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\$toUser", listUser)
.replaceAll("\\$agentId", agentId)
.replaceAll("\\$msg", msg);
}
@ -169,7 +169,7 @@ public class EnterpriseWeChatUtils {
* @throws IOException the IOException
*/
public static String sendEnterpriseWeChat(String charset, String data, String token) throws IOException {
String enterpriseWeChatPushUrlReplace = enterpriseWeChatPushUrl.replaceAll("\\$token", token);
String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\$token", token);
CloseableHttpClient httpClient = HttpClients.createDefault();
try {
@ -184,8 +184,8 @@ public class EnterpriseWeChatUtils {
} finally {
response.close();
}
logger.info("Enterprise WeChat send [{}], param:{}, resp:{}",
enterpriseWeChatPushUrl, data, resp);
logger.info("Enterprise WeChat send [{}], param:{}, resp:{}",
ENTERPRISE_WE_CHAT_PUSH_URL, data, resp);
return resp;
} finally {
httpClient.close();

2
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java

@ -20,7 +20,7 @@ import org.apache.dolphinscheduler.common.utils.StringUtils;
public class FuncUtils {
static public String mkString(Iterable<String> list, String split) {
public static String mkString(Iterable<String> list, String split) {
if (null == list || StringUtils.isEmpty(split)){
return null;

2
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java

@ -334,7 +334,7 @@ public class MailUtils {
* @param e the exception
*/
private static void handleException(Collection<String> receivers, Map<String, Object> retMap, Exception e) {
logger.error("Send email to {} failed {}", receivers, e);
logger.error("Send email to {} failed", receivers, e);
retMap.put(Constants.MESSAGE, "Send email to {" + String.join(",", receivers) + "} failed," + e.toString());
}

1
dolphinscheduler-alert/src/main/resources/alert.properties

@ -28,7 +28,6 @@ mail.server.port=25
mail.sender=xxx@xxx.com
mail.user=xxx@xxx.com
mail.passwd=111111
# TLS
mail.smtp.starttls.enable=true
# SSL

8
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java

@ -53,7 +53,7 @@ public class EnterpriseWeChatUtilsTest {
String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
String errmsg = JSON.parseObject(resp).getString("errmsg");
Assert.assertEquals(errmsg, "ok");
Assert.assertEquals("ok",errmsg);
} catch (IOException e) {
e.printStackTrace();
}
@ -68,7 +68,7 @@ public class EnterpriseWeChatUtilsTest {
String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
String errmsg = JSON.parseObject(resp).getString("errmsg");
Assert.assertEquals(errmsg, "ok");
Assert.assertEquals("ok",errmsg);
} catch (IOException e) {
e.printStackTrace();
}
@ -95,7 +95,7 @@ public class EnterpriseWeChatUtilsTest {
String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
String errmsg = JSON.parseObject(resp).getString("errmsg");
Assert.assertEquals(errmsg, "ok");
Assert.assertEquals("ok",errmsg);
} catch (IOException e) {
e.printStackTrace();
}
@ -110,7 +110,7 @@ public class EnterpriseWeChatUtilsTest {
String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
String errmsg = JSON.parseObject(resp).getString("errmsg");
Assert.assertEquals(errmsg, "ok");
Assert.assertEquals("ok",errmsg);
} catch (IOException e) {
e.printStackTrace();
}

2
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/FuncUtilsTest.java

@ -46,7 +46,7 @@ public class FuncUtilsTest {
logger.info(result);
//Expected result string
assertEquals(result, "user1|user2|user3");
assertEquals("user1|user2|user3", result);
//Null list expected return null
result = FuncUtils.mkString(null, split);

1
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.alert.utils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;

6
dolphinscheduler-api/pom.xml

@ -140,6 +140,12 @@
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- hadoop -->

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java

@ -103,7 +103,7 @@ public class DataAnalysisController extends BaseController{
@RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id",
logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id:{}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate);
return returnDataList(result);
@ -129,7 +129,7 @@ public class DataAnalysisController extends BaseController{
public Result countDefinitionByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count process definition , user:{}, project id",
logger.info("count process definition , user:{}, project id:{}",
loginUser.getUserName(), projectId);
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(loginUser, projectId);
return returnDataList(result);

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java

@ -391,8 +391,8 @@ public class DataSourceController extends BaseController {
try {
return dataSourceService.verifyDataSourceName(loginUser, name);
} catch (Exception e) {
logger.error(VERFIY_DATASOURCE_NAME_FAILURE.getMsg(),e);
return error(VERFIY_DATASOURCE_NAME_FAILURE.getCode(), VERFIY_DATASOURCE_NAME_FAILURE.getMsg());
logger.error(VERIFY_DATASOURCE_NAME_FAILURE.getMsg(), e);
return error(VERIFY_DATASOURCE_NAME_FAILURE.getCode(), VERIFY_DATASOURCE_NAME_FAILURE.getMsg());
}
}

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java

@ -149,7 +149,7 @@ public class ExecutorController extends BaseController {
) {
try {
logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}",
loginUser.getUserName(), projectName, processInstanceId, executeType.toString());
loginUser.getUserName(), projectName, processInstanceId, executeType);
Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType);
return returnDataList(result);
} catch (Exception e) {
@ -173,7 +173,7 @@ public class ExecutorController extends BaseController {
@ResponseStatus(HttpStatus.OK)
public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
logger.info("login user {}, check process definition", loginUser.getUserName(), processDefinitionId);
logger.info("login user {}, check process definition {}", loginUser.getUserName(), processDefinitionId);
try {
Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId);
return returnDataList(result);

422
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -16,244 +16,248 @@
*/
package org.apache.dolphinscheduler.api.enums;
import org.springframework.context.i18n.LocaleContextHolder;
import java.util.Locale;
/**
* status enum
*/
public enum Status {
SUCCESS(0, "success"),
SUCCESS(0, "success", "成功"),
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid"),
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid"),
USER_NAME_EXIST(10003, "user name already exists"),
USER_NAME_NULL(10004,"user name is null"),
HDFS_OPERATION_ERROR(10006, "hdfs operation error"),
TASK_INSTANCE_NOT_FOUND(10008, "task instance not found"),
TENANT_NAME_EXIST(10009, "tenant code already exists"),
USER_NOT_EXIST(10010, "user {0} not exists"),
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found"),
ALERT_GROUP_EXIST(10012, "alarm group already exists"),
USER_NAME_PASSWD_ERROR(10013,"user name or password error"),
LOGIN_SESSION_FAILED(10014,"create session failed!"),
DATASOURCE_EXIST(10015, "data source name already exists"),
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed"),
TENANT_NOT_EXIST(10017, "tenant not exists"),
PROJECT_NOT_FOUNT(10018, "project {0} not found "),
PROJECT_ALREADY_EXISTS(10019, "project {0} already exists"),
TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist"),
TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance"),
SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist"),
SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow updateProcessInstance operations"),
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}"),
MASTER_NOT_EXISTS(10025, "master does not exist"),
SCHEDULE_STATUS_UNKNOWN(10026, "unknown command: {0}"),
CREATE_ALERT_GROUP_ERROR(10027,"create alert group error"),
QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error"),
LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error"),
UPDATE_ALERT_GROUP_ERROR(10030,"updateProcessInstance alert group error"),
DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error"),
ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error"),
CREATE_DATASOURCE_ERROR(10033,"create datasource error"),
UPDATE_DATASOURCE_ERROR(10034,"updateProcessInstance datasource error"),
QUERY_DATASOURCE_ERROR(10035,"query datasource error"),
CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure"),
CONNECTION_TEST_FAILURE(10037,"connection test failure"),
DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure"),
VERFIY_DATASOURCE_NAME_FAILURE(10039,"verfiy datasource name failure"),
UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource"),
AUTHORIZED_DATA_SOURCE(10041,"authorized data source"),
LOGIN_SUCCESS(10042,"login success"),
USER_LOGIN_FAILURE(10043,"user login failure"),
LIST_WORKERS_ERROR(10044,"list workers error"),
LIST_MASTERS_ERROR(10045,"list masters error"),
UPDATE_PROJECT_ERROR(10046,"updateProcessInstance project error"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error"),
CREATE_PROJECT_ERROR(10048,"create project error"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error"),
DELETE_PROJECT_ERROR(10050,"delete project error"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error"),
QUERY_AUTHORIZED_PROJECT(10052,"query authorized project"),
QUERY_QUEUE_LIST_ERROR(10053,"query queue list error"),
CREATE_RESOURCE_ERROR(10054,"create resource error"),
UPDATE_RESOURCE_ERROR(10055,"updateProcessInstance resource error"),
QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error"),
QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging"),
DELETE_RESOURCE_ERROR(10058,"delete resource error"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error"),
RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error"),
CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error"),
VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error"),
UPDATE_UDF_FUNCTION_ERROR(10067,"updateProcessInstance udf function error"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error"),
QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error"),
VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error"),
DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error"),
AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error"),
UNAUTHORIZED_FILE_RESOURCE_ERROR( 10073,"unauthorized file resource error"),
UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error"),
CREATE_SCHEDULE_ERROR(10076,"create schedule error"),
UPDATE_SCHEDULE_ERROR(10077,"updateProcessInstance schedule error"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error"),
OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error"),
QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error"),
QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error"),
CREATE_TENANT_ERROR(10084,"create tenant error"),
QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error"),
QUERY_TENANT_LIST_ERROR(10086,"query tenant list error"),
UPDATE_TENANT_ERROR(10087,"updateProcessInstance tenant error"),
DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error"),
VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error"),
CREATE_USER_ERROR(10090,"create user error"),
QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error"),
UPDATE_USER_ERROR(10092,"updateProcessInstance user error"),
DELETE_USER_BY_ID_ERROR(10093,"delete user by id error"),
GRANT_PROJECT_ERROR(10094,"grant project error"),
GRANT_RESOURCE_ERROR(10095,"grant resource error"),
GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error"),
GRANT_DATASOURCE_ERROR(10097,"grant datasource error"),
GET_USER_INFO_ERROR(10098,"get user info error"),
USER_LIST_ERROR(10099,"user list error"),
VERIFY_USERNAME_ERROR(10100,"verify username error"),
UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error"),
AUTHORIZED_USER_ERROR(10102,"authorized user error"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error"),
CREATE_PROCESS_DEFINITION(10105,"create process definition"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error"),
UPDATE_PROCESS_DEFINITION_ERROR(10107,"updateProcessInstance process definition error"),
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error"),
QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error"),
UPDATE_PROCESS_INSTANCE_ERROR(10115,"updateProcessInstance process instance error"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error"),
QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error"),
SIGN_OUT_ERROR(10123,"sign out error"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists"),
IP_IS_EMPTY(10125,"ip is empty"),
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}"),
CREATE_QUEUE_ERROR(10127, "create queue error"),
QUEUE_NOT_EXIST(10128, "queue {0} not exists"),
QUEUE_VALUE_EXIST(10129, "queue value {0} already exists"),
QUEUE_NAME_EXIST(10130, "queue name {0} already exists"),
UPDATE_QUEUE_ERROR(10131, "update queue error"),
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required"),
VERIFY_QUEUE_ERROR(10133,"verify queue error"),
NAME_NULL(10134,"name must be not null"),
NAME_EXIST(10135, "name {0} already exists"),
SAVE_ERROR(10136, "save error"),
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error"),
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end"),
DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it"),
DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it"),
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"),
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"),
USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"),
USER_NAME_NULL(10004,"user name is null", "用户名不能为空"),
HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"),
TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"),
TENANT_NAME_EXIST(10009, "tenant code already exists", "租户编码不能为空"),
USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"),
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"),
ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"),
USER_NAME_PASSWD_ERROR(10013,"user name or password error", "用户名或密码错误"),
LOGIN_SESSION_FAILED(10014,"create session failed!", "创建session失败"),
DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"),
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"),
TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"),
PROJECT_NOT_FOUNT(10018, "project {0} not found ", "项目[{0}]不存在"),
PROJECT_ALREADY_EXISTS(10019, "project {0} already exists", "项目名称[{0}]已存在"),
TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist", "任务实例[{0}]不存在"),
TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance", "任务实例[{0}]不是子流程实例"),
SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist", "调度配置定时表达式[{0}]不存在"),
SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow update operations", "调度配置上线状态不允许修改"),
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"),
MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"),
SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"),
CREATE_ALERT_GROUP_ERROR(10027,"create alert group error", "创建告警组错误"),
QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error", "查询告警组错误"),
LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error", "分页查询告警组错误"),
UPDATE_ALERT_GROUP_ERROR(10030,"update alert group error", "更新告警组错误"),
DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error", "删除告警组错误"),
ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error", "告警组授权用户错误"),
CREATE_DATASOURCE_ERROR(10033,"create datasource error", "创建数据源错误"),
UPDATE_DATASOURCE_ERROR(10034,"update datasource error", "更新数据源错误"),
QUERY_DATASOURCE_ERROR(10035,"query datasource error", "查询数据源错误"),
CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure", "建立数据源连接失败"),
CONNECTION_TEST_FAILURE(10037,"connection test failure", "测试数据源连接失败"),
DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure", "删除数据源失败"),
VERIFY_DATASOURCE_NAME_FAILURE(10039,"verify datasource name failure", "验证数据源名称失败"),
UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource", "未经授权的数据源"),
AUTHORIZED_DATA_SOURCE(10041,"authorized data source", "授权数据源失败"),
LOGIN_SUCCESS(10042,"login success", "登录成功"),
USER_LOGIN_FAILURE(10043,"user login failure", "用户登录失败"),
LIST_WORKERS_ERROR(10044,"list workers error", "查询worker列表错误"),
LIST_MASTERS_ERROR(10045,"list masters error", "查询master列表错误"),
UPDATE_PROJECT_ERROR(10046,"update project error", "更新项目信息错误"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error", "查询项目详细信息错误"),
CREATE_PROJECT_ERROR(10048,"create project error", "创建项目错误"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error", "分页查询项目列表错误"),
DELETE_PROJECT_ERROR(10050,"delete project error", "删除项目错误"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error", "查询未授权项目错误"),
QUERY_AUTHORIZED_PROJECT(10052,"query authorized project", "查询授权项目错误"),
QUERY_QUEUE_LIST_ERROR(10053,"query queue list error", "查询队列列表错误"),
CREATE_RESOURCE_ERROR(10054,"create resource error", "创建资源错误"),
UPDATE_RESOURCE_ERROR(10055,"update resource error", "更新资源错误"),
QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error", "查询资源列表错误"),
QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging", "分页查询资源列表错误"),
DELETE_RESOURCE_ERROR(10058,"delete resource error", "删除资源错误"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error", "资源名称或类型验证错误"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error", "查看资源文件错误"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error", "创建资源文件错误"),
RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067,"update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error", "授权资源文件错误"),
UNAUTHORIZED_FILE_RESOURCE_ERROR( 10073,"unauthorized file resource error", "查询未授权资源错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076,"create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077,"update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error", "上线调度配置错误"),
OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error", "下线调度配置错误"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error", "分页查询调度配置列表错误"),
QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error", "查询调度配置列表错误"),
QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error", "分页查询任务列表错误"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error", "分页查询任务记录错误"),
CREATE_TENANT_ERROR(10084,"create tenant error", "创建租户错误"),
QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error", "分页查询租户列表错误"),
QUERY_TENANT_LIST_ERROR(10086,"query tenant list error", "查询租户列表错误"),
UPDATE_TENANT_ERROR(10087,"update tenant error", "更新租户错误"),
DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error", "删除租户错误"),
VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error", "租户编码验证错误"),
CREATE_USER_ERROR(10090,"create user error", "创建用户错误"),
QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error", "分页查询用户列表错误"),
UPDATE_USER_ERROR(10092,"update user error", "更新用户错误"),
DELETE_USER_BY_ID_ERROR(10093,"delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094,"grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095,"grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097,"grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098,"get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099,"user list error", "查询用户列表错误"),
VERIFY_USERNAME_ERROR(10100,"verify username error", "用户名验证错误"),
UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error", "查询未授权用户错误"),
AUTHORIZED_USER_ERROR(10102,"authorized user error", "查询授权用户错误"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error", "查询任务实例日志错误"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error", "下载任务日志文件错误"),
CREATE_PROCESS_DEFINITION(10105,"create process definition", "创建工作流错误"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error", "工作流名称已存在"),
UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115,"update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123,"sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"),
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"),
CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"),
QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"),
QUEUE_VALUE_EXIST(10129, "queue value {0} already exists", "队列值[{0}]已存在"),
QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"),
UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"),
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"),
VERIFY_QUEUE_ERROR(10133,"verify queue error", "验证队列信息错误"),
NAME_NULL(10134,"name must be not null", "名称不能为空"),
NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"),
SAVE_ERROR(10136, "save error", "保存错误"),
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it"),
DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail "),
DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail "),
QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists"),
RESOURCE_NOT_EXIST(20004, "resource not exist"),
RESOURCE_EXIST(20005, "resource already exists"),
RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing"),
RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit"),
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified"),
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"),
HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"),
RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!"),
RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"),
RESOURCE_EXIST(20005, "resource already exists", "资源已存在"),
RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing", "资源文件后缀不支持查看"),
RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"),
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"),
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"),
HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege"),
USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"),
PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist"),
PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists"),
PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist"),
PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line"),
PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}"),
PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation"),
SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist"),
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit"),
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ..."),
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error"),
START_PROCESS_INSTANCE_ERROR(50014,"start process instance error"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error"),
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error"),
DATA_IS_NOT_VALID(50017,"data %s not valid"),
DATA_IS_NULL(50018,"data %s is null"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error"),
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error"),
TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available."),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error"),
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error"),
PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist", "工作流实例[{0}]不存在"),
PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists", "工作流实例[{0}]已存在"),
PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist", "工作流定义[{0}]不存在"),
PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line", "工作流定义[{0}]不是上线状态"),
PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}", "工作流实例[{0}]的状态已经是[{1}]"),
PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation", "工作流实例[{0}]的状态是[{1}],无法执行[{2}]操作"),
SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist", "子工作流实例不存在"),
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"),
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."),
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error", "查询各用户流程定义数错误"),
START_PROCESS_INSTANCE_ERROR(50014,"start process instance error", "运行工作流实例错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017,"data %s not valid", "数据[%s]无效"),
DATA_IS_NULL(50018,"data %s is null", "数据[%s]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid", "流程节点[%s]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error", "导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"),
HDFS_NOT_STARTUP(60001,"hdfs not startup"),
HDFS_TERANT_RESOURCES_FILE_EXISTS(60002,"resource file exists,please delete resource first"),
HDFS_TERANT_UDFS_FILE_EXISTS(60003,"udf file exists,please delete resource first"),
HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"),
/**
* for monitor
*/
QUERY_DATABASE_STATE_ERROR(70001,"query database state error"),
QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error"),
QUERY_DATABASE_STATE_ERROR(70001,"query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error", "查询zookeeper状态错误"),
CREATE_ACCESS_TOKEN_ERROR(70010,"create access token error"),
GENERATE_TOKEN_ERROR(70011,"generate token error"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012,"query access token list paging error"),
UPDATE_ACCESS_TOKEN_ERROR(70013,"update access token error"),
DELETE_ACCESS_TOKEN_ERROR(70014,"delete access token error"),
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist"),
CREATE_ACCESS_TOKEN_ERROR(70010,"create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011,"generate token error", "生成token错误"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012,"query access token list paging error", "分页查询访问token列表错误"),
UPDATE_ACCESS_TOKEN_ERROR(70013,"update access token error", "更新访问token错误"),
DELETE_ACCESS_TOKEN_ERROR(70014,"delete access token error", "删除访问token错误"),
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"),
COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error"),
COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error", "查询各状态任务实例数错误"),
QUEUE_COUNT_ERROR(90001,"queue count error"),
QUEUE_COUNT_ERROR(90001,"queue count error", "查询队列数据错误"),
KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error"),
KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error", "获取kerberos启动状态错误"),
;
private final int code;
private final String msg;
private final String enMsg;
private final String zhMsg;
private Status(int code, String msg) {
private Status(int code, String enMsg, String zhMsg) {
this.code = code;
this.msg = msg;
this.enMsg = enMsg;
this.zhMsg = zhMsg;
}
public int getCode() {
@ -261,6 +265,10 @@ public enum Status {
}
public String getMsg() {
return this.msg;
if (Locale.SIMPLIFIED_CHINESE.getLanguage().equals(LocaleContextHolder.getLocale().getLanguage())) {
return this.zhMsg;
} else {
return this.enMsg;
}
}
}

15
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java

@ -259,10 +259,7 @@ public class ExecutorService extends BaseService{
// checkTenantExists();
Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(),
processDefinition.getUserId());
if(tenant == null){
return false;
}
return true;
return tenant != null;
}
/**
@ -298,6 +295,7 @@ public class ExecutorService extends BaseService{
if (executionStatus.typeIsPause()|| executionStatus.typeIsCancel()) {
checkResult = true;
}
break;
default:
break;
}
@ -369,7 +367,7 @@ public class ExecutorService extends BaseService{
* @return check result code
*/
public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) {
Map<String, Object> result = new HashMap<String, Object>();
Map<String, Object> result = new HashMap<>();
if (processDefineId == 0){
logger.error("process definition id is null");
@ -378,10 +376,9 @@ public class ExecutorService extends BaseService{
List<Integer> ids = new ArrayList<>();
processService.recurseFindSubProcessId(processDefineId, ids);
Integer[] idArray = ids.toArray(new Integer[ids.size()]);
if (ids.size() > 0){
List<ProcessDefinition> processDefinitionList;
processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (processDefinitionList != null && processDefinitionList.size() > 0){
if (!ids.isEmpty()){
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (processDefinitionList != null){
for (ProcessDefinition processDefinition : processDefinitionList){
/**
* if there is no online process, exit directly

1
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java

@ -28,7 +28,6 @@ import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java

@ -121,7 +121,7 @@ public class ZooKeeperState {
private class SendThread extends Thread {
private String cmd;
public String ret = "";
private String ret = "";
public SendThread(String cmd) {
this.cmd = cmd;

1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java

@ -28,7 +28,6 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;

1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java

@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import com.alibaba.fastjson.JSONObject;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java

@ -29,8 +29,6 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import javax.ws.rs.POST;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java

@ -18,13 +18,11 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import com.alibaba.fastjson.JSONObject;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

5
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java

@ -18,9 +18,6 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Test;
@ -31,9 +28,7 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.junit.Assert.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;

1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java

@ -29,7 +29,6 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.junit.Assert.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;

12
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java

@ -16,7 +16,12 @@
*/
package org.apache.dolphinscheduler.api.enums;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.context.i18n.LocaleContextHolder;
import java.util.Locale;
import static org.junit.Assert.*;
public class StatusTest {
@ -29,6 +34,11 @@ public class StatusTest {
@Test
public void testGetMsg() {
assertEquals("success", Status.SUCCESS.getMsg());
LocaleContextHolder.setLocale(Locale.US);
Assert.assertEquals("success", Status.SUCCESS.getMsg());
LocaleContextHolder.setLocale(Locale.SIMPLIFIED_CHINESE);
Assert.assertEquals("成功", Status.SUCCESS.getMsg());
}
}

8
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java

@ -175,6 +175,14 @@ public class TenantServiceTest {
logger.info(result.toString());
List<Tenant> tenantList = (List<Tenant>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(tenantList));
Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(getList());
Map<String, Object> successRes = tenantService.queryTenantList("1");
Assert.assertEquals(Status.SUCCESS,successRes.get(Constants.STATUS));
Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(null);
Map<String, Object> tenantNotExistRes = tenantService.queryTenantList("1");
Assert.assertEquals(Status.TENANT_NOT_EXIST,tenantNotExistRes.get(Constants.STATUS));
}
@Test

1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java

@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.utils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitorUtilsTest.java

@ -28,7 +28,7 @@ public class ZookeeperMonitorUtilsTest {
@Test
public void testGetMasterLsit(){
public void testGetMasterList(){
ZookeeperMonitor zookeeperMonitor = new ZookeeperMonitor();

2
dolphinscheduler-common/pom.xml

@ -25,7 +25,7 @@
</parent>
<artifactId>dolphinscheduler-common</artifactId>
<name>dolphinscheduler-common</name>
<url>http://maven.apache.org</url>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -746,7 +746,7 @@ public final class Constants {
* application regex
*/
public static final String APPLICATION_REGEX = "application_\\d+_\\d+";
public static final String PID = "pid";
public static final String PID = OSUtils.isWindows() ? "handle" : "pid";
/**
* month_begin
*/

3
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/WorkerLogFilter.java

@ -20,9 +20,6 @@ import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.filter.Filter;
import ch.qos.logback.core.spi.FilterReply;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import java.util.Arrays;
/**
* worker log filter

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java

@ -335,7 +335,7 @@ public abstract class AbstractShell {
try{
entry.getValue().destroy();
} catch (Exception e) {
e.printStackTrace();
logger.error("Destroy All Processes error", e);
}
}

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java

@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@ -207,12 +208,15 @@ public class FlinkParameters extends AbstractParameters {
@Override
public List<String> getResourceFilesList() {
if(resourceList !=null ) {
this.resourceList.add(mainJar);
return resourceList.stream()
.map(p -> p.getRes()).collect(Collectors.toList());
if(resourceList != null ) {
List<String> resourceFiles = resourceList.stream()
.map(ResourceInfo::getRes).collect(Collectors.toList());
if(mainJar != null) {
resourceFiles.add(mainJar.getRes());
}
return resourceFiles;
}
return null;
return Collections.emptyList();
}

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java

@ -44,7 +44,7 @@ public class FileUtils {
String fileSuffix = "";
if (StringUtils.isNotEmpty(filename)) {
int lastIndex = filename.lastIndexOf(".");
int lastIndex = filename.lastIndexOf('.');
if (lastIndex > 0) {
fileSuffix = filename.substring(lastIndex + 1);
}
@ -325,10 +325,8 @@ public class FileUtils {
}
} else {
File parent = file.getParentFile();
if (parent != null) {
if (!parent.mkdirs() && !parent.isDirectory()) {
if (parent != null && !parent.mkdirs() && !parent.isDirectory()) {
throw new IOException("Directory '" + parent + "' could not be created");
}
}
}
return new FileOutputStream(file, append);

26
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -32,7 +32,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@ -46,16 +48,14 @@ public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
private static volatile HadoopUtils instance = new HadoopUtils();
private static volatile Configuration configuration;
private static HadoopUtils instance = new HadoopUtils();
private static Configuration configuration;
private static FileSystem fs;
private String hdfsUser;
private HadoopUtils(){
if(StringUtils.isEmpty(hdfsUser)){
hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
}
hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
init();
initHdfsPath();
}
@ -129,7 +129,6 @@ public class HadoopUtils implements Closeable {
if (fs == null) {
if(StringUtils.isNotEmpty(hdfsUser)){
//UserGroupInformation ugi = UserGroupInformation.createProxyUser(hdfsUser,UserGroupInformation.getLoginUser());
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
@ -196,7 +195,7 @@ public class HadoopUtils implements Closeable {
if(StringUtils.isBlank(hdfsFilePath)){
logger.error("hdfs file path:{} is blank",hdfsFilePath);
return null;
return new byte[0];
}
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
@ -218,7 +217,7 @@ public class HadoopUtils implements Closeable {
if (StringUtils.isBlank(hdfsFilePath)){
logger.error("hdfs file path:{} is blank",hdfsFilePath);
return null;
return Collections.emptyList();
}
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))){
@ -293,7 +292,7 @@ public class HadoopUtils implements Closeable {
if (dstPath.exists()) {
if (dstPath.isFile()) {
if (overwrite) {
dstPath.delete();
Files.delete(dstPath.toPath());
}
} else {
logger.error("destination file must be a file");
@ -378,7 +377,7 @@ public class HadoopUtils implements Closeable {
String responseContent = HttpUtils.get(applicationUrl);
JSONObject jsonObject = JSONObject.parseObject(responseContent);
JSONObject jsonObject = JSON.parseObject(responseContent);
String result = jsonObject.getJSONObject("app").getString("finalStatus");
switch (result) {
@ -525,8 +524,6 @@ public class HadoopUtils implements Closeable {
*/
private static final class YarnHAAdminUtils extends RMAdminCLI {
private static final Logger logger = LoggerFactory.getLogger(YarnHAAdminUtils.class);
/**
* get active resourcemanager
*
@ -585,8 +582,7 @@ public class HadoopUtils implements Closeable {
JSONObject jsonObject = JSON.parseObject(retStr);
//get ResourceManager state
String state = jsonObject.getJSONObject("clusterInfo").getString("haState");
return state;
return jsonObject.getJSONObject("clusterInfo").getString("haState");
}
}

12
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java

@ -81,17 +81,15 @@ public class HttpUtils {
logger.error(e.getMessage(),e);
}
if (httpget != null && !httpget.isAborted()) {
if (!httpget.isAborted()) {
httpget.releaseConnection();
httpget.abort();
}
if (httpclient != null) {
try {
httpclient.close();
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
try {
httpclient.close();
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
}
return responseContent;

16
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java

@ -400,8 +400,7 @@ public class OSUtils {
* @return true if mac
*/
public static boolean isMacOS() {
String os = System.getProperty("os.name");
return os.startsWith("Mac");
return getOSName().startsWith("Mac");
}
@ -409,9 +408,16 @@ public class OSUtils {
* whether is windows
* @return true if windows
*/
public static boolean isWindows() {
String os = System.getProperty("os.name");
return os.startsWith("Windows");
public static boolean isWindows() { ;
return getOSName().startsWith("Windows");
}
/**
* get current OS name
* @return current OS name
*/
public static String getOSName() {
return System.getProperty("os.name");
}
/**

36
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java

@ -119,10 +119,15 @@ public class ParameterUtils {
*/
public static String curingGlobalParams(Map<String,String> globalParamMap, List<Property> globalParamList,
CommandType commandType, Date scheduleTime){
Map<String, String> globalMap = new HashMap<>();
if(globalParamMap!= null){
globalMap.putAll(globalParamMap);
}
if (globalParamList == null || globalParamList.isEmpty()) {
return null;
}
Map<String, String> globalMap = new HashMap<>();
if (globalParamMap!= null){
globalMap.putAll(globalParamMap);
}
Map<String,String> allParamMap = new HashMap<>();
//If it is a complement, a complement time needs to be passed in, according to the task type
Map<String,String> timeParams = BusinessTimeUtils
@ -132,9 +137,7 @@ public class ParameterUtils {
allParamMap.putAll(timeParams);
}
if (globalMap != null) {
allParamMap.putAll(globalMap);
}
allParamMap.putAll(globalMap);
Set<Map.Entry<String, String>> entries = allParamMap.entrySet();
@ -146,22 +149,15 @@ public class ParameterUtils {
resolveMap.put(entry.getKey(),str);
}
}
globalMap.putAll(resolveMap);
if (globalMap != null){
globalMap.putAll(resolveMap);
}
if (globalParamList != null && globalParamList.size() > 0){
for (Property property : globalParamList){
String val = globalMap.get(property.getProp());
if (val != null){
property.setValue(val);
}
for (Property property : globalParamList){
String val = globalMap.get(property.getProp());
if (val != null){
property.setValue(val);
}
return JSONObject.toJSONString(globalParamList);
}
return null;
return JSONObject.toJSONString(globalParamList);
}

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java

@ -125,7 +125,7 @@ public class PropertyUtils {
* @param key property name
* @return property value
*/
public static Boolean getBoolean(String key) {
public static boolean getBoolean(String key) {
String value = properties.getProperty(key.trim());
if(null != value){
return Boolean.parseBoolean(value);

1065
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java

File diff suppressed because it is too large Load Diff

286
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java

@ -0,0 +1,286 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils.process;
import com.sun.jna.platform.win32.Kernel32Util;
import java.util.*;
final class ProcessEnvironmentForWin32 extends HashMap<String,String> {
private static final long serialVersionUID = -8017839552603542824L;
private static String validateName(String name) {
// An initial `=' indicates a magic Windows variable name -- OK
if (name.indexOf('=', 1) != -1 ||
name.indexOf('\u0000') != -1)
throw new IllegalArgumentException
("Invalid environment variable name: \"" + name + "\"");
return name;
}
private static String validateValue(String value) {
if (value.indexOf('\u0000') != -1)
throw new IllegalArgumentException
("Invalid environment variable value: \"" + value + "\"");
return value;
}
private static String nonNullString(Object o) {
if (o == null)
throw new NullPointerException();
return (String) o;
}
public String put(String key, String value) {
return super.put(validateName(key), validateValue(value));
}
public String get(Object key) {
return super.get(nonNullString(key));
}
public boolean containsKey(Object key) {
return super.containsKey(nonNullString(key));
}
public boolean containsValue(Object value) {
return super.containsValue(nonNullString(value));
}
public String remove(Object key) {
return super.remove(nonNullString(key));
}
private static class CheckedEntry implements Entry<String,String> {
private final Entry<String,String> e;
public CheckedEntry(Entry<String,String> e) {this.e = e;}
public String getKey() { return e.getKey();}
public String getValue() { return e.getValue();}
public String setValue(String value) {
return e.setValue(validateValue(value));
}
public String toString() { return getKey() + "=" + getValue();}
public boolean equals(Object o) {return e.equals(o);}
public int hashCode() {return e.hashCode();}
}
private static class CheckedEntrySet extends AbstractSet<Entry<String,String>> {
private final Set<Entry<String,String>> s;
public CheckedEntrySet(Set<Entry<String,String>> s) {this.s = s;}
public int size() {return s.size();}
public boolean isEmpty() {return s.isEmpty();}
public void clear() { s.clear();}
public Iterator<Entry<String,String>> iterator() {
return new Iterator<Entry<String,String>>() {
Iterator<Entry<String,String>> i = s.iterator();
public boolean hasNext() { return i.hasNext();}
public Entry<String,String> next() {
return new CheckedEntry(i.next());
}
public void remove() { i.remove();}
};
}
private static Entry<String,String> checkedEntry(Object o) {
@SuppressWarnings("unchecked")
Entry<String,String> e = (Entry<String,String>) o;
nonNullString(e.getKey());
nonNullString(e.getValue());
return e;
}
public boolean contains(Object o) {return s.contains(checkedEntry(o));}
public boolean remove(Object o) {return s.remove(checkedEntry(o));}
}
private static class CheckedValues extends AbstractCollection<String> {
private final Collection<String> c;
public CheckedValues(Collection<String> c) {this.c = c;}
public int size() {return c.size();}
public boolean isEmpty() {return c.isEmpty();}
public void clear() { c.clear();}
public Iterator<String> iterator() {return c.iterator();}
public boolean contains(Object o) {return c.contains(nonNullString(o));}
public boolean remove(Object o) {return c.remove(nonNullString(o));}
}
private static class CheckedKeySet extends AbstractSet<String> {
private final Set<String> s;
public CheckedKeySet(Set<String> s) {this.s = s;}
public int size() {return s.size();}
public boolean isEmpty() {return s.isEmpty();}
public void clear() { s.clear();}
public Iterator<String> iterator() {return s.iterator();}
public boolean contains(Object o) {return s.contains(nonNullString(o));}
public boolean remove(Object o) {return s.remove(nonNullString(o));}
}
public Set<String> keySet() {
return new CheckedKeySet(super.keySet());
}
public Collection<String> values() {
return new CheckedValues(super.values());
}
public Set<Entry<String,String>> entrySet() {
return new CheckedEntrySet(super.entrySet());
}
private static final class NameComparator implements Comparator<String> {
public int compare(String s1, String s2) {
// We can't use String.compareToIgnoreCase since it
// canonicalizes to lower case, while Windows
// canonicalizes to upper case! For example, "_" should
// sort *after* "Z", not before.
int n1 = s1.length();
int n2 = s2.length();
int min = Math.min(n1, n2);
for (int i = 0; i < min; i++) {
char c1 = s1.charAt(i);
char c2 = s2.charAt(i);
if (c1 != c2) {
c1 = Character.toUpperCase(c1);
c2 = Character.toUpperCase(c2);
if (c1 != c2)
// No overflow because of numeric promotion
return c1 - c2;
}
}
return n1 - n2;
}
}
private static final class EntryComparator implements Comparator<Entry<String,String>> {
public int compare(Entry<String,String> e1,
Entry<String,String> e2) {
return nameComparator.compare(e1.getKey(), e2.getKey());
}
}
// Allow `=' as first char in name, e.g. =C:=C:\DIR
static final int MIN_NAME_LENGTH = 1;
private static final NameComparator nameComparator;
private static final EntryComparator entryComparator;
private static final ProcessEnvironmentForWin32 theEnvironment;
private static final Map<String,String> theUnmodifiableEnvironment;
private static final Map<String,String> theCaseInsensitiveEnvironment;
static {
nameComparator = new NameComparator();
entryComparator = new EntryComparator();
theEnvironment = new ProcessEnvironmentForWin32();
theUnmodifiableEnvironment = Collections.unmodifiableMap(theEnvironment);
theEnvironment.putAll(environmentBlock());
theCaseInsensitiveEnvironment = new TreeMap<>(nameComparator);
theCaseInsensitiveEnvironment.putAll(theEnvironment);
}
private ProcessEnvironmentForWin32() {
super();
}
private ProcessEnvironmentForWin32(int capacity) {
super(capacity);
}
// Only for use by System.getenv(String)
static String getenv(String name) {
// The original implementation used a native call to _wgetenv,
// but it turns out that _wgetenv is only consistent with
// GetEnvironmentStringsW (for non-ASCII) if `wmain' is used
// instead of `main', even in a process created using
// CREATE_UNICODE_ENVIRONMENT. Instead we perform the
// case-insensitive comparison ourselves. At least this
// guarantees that System.getenv().get(String) will be
// consistent with System.getenv(String).
return theCaseInsensitiveEnvironment.get(name);
}
// Only for use by System.getenv()
static Map<String,String> getenv() {
return theUnmodifiableEnvironment;
}
// Only for use by ProcessBuilder.environment()
@SuppressWarnings("unchecked")
static Map<String,String> environment() {
return (Map<String,String>) theEnvironment.clone();
}
// Only for use by ProcessBuilder.environment(String[] envp)
static Map<String,String> emptyEnvironment(int capacity) {
return new ProcessEnvironmentForWin32(capacity);
}
private static Map<String, String> environmentBlock() {
return Kernel32Util.getEnvironmentVariables();
}
// Only for use by ProcessImpl.start()
String toEnvironmentBlock() {
// Sort Unicode-case-insensitively by name
List<Entry<String,String>> list = new ArrayList<>(entrySet());
Collections.sort(list, entryComparator);
StringBuilder sb = new StringBuilder(size()*30);
int cmp = -1;
// Some versions of MSVCRT.DLL require SystemRoot to be set.
// So, we make sure that it is always set, even if not provided
// by the caller.
final String SYSTEMROOT = "SystemRoot";
for (Entry<String,String> e : list) {
String key = e.getKey();
String value = e.getValue();
if (cmp < 0 && (cmp = nameComparator.compare(key, SYSTEMROOT)) > 0) {
// Not set, so add it here
addToEnvIfSet(sb, SYSTEMROOT);
}
addToEnv(sb, key, value);
}
if (cmp < 0) {
// Got to end of list and still not found
addToEnvIfSet(sb, SYSTEMROOT);
}
if (sb.length() == 0) {
// Environment was empty and SystemRoot not set in parent
sb.append('\u0000');
}
// Block is double NUL terminated
sb.append('\u0000');
return sb.toString();
}
// add the environment variable to the child, if it exists in parent
private static void addToEnvIfSet(StringBuilder sb, String name) {
String s = getenv(name);
if (s != null)
addToEnv(sb, name, s);
}
private static void addToEnv(StringBuilder sb, String name, String val) {
sb.append(name).append('=').append(val).append('\u0000');
}
static String toEnvironmentBlock(Map<String,String> map) {
return map == null ? null : ((ProcessEnvironmentForWin32)map).toEnvironmentBlock();
}
}

785
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java

@ -0,0 +1,785 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils.process;
import com.sun.jna.Pointer;
import com.sun.jna.platform.win32.*;
import com.sun.jna.ptr.IntByReference;
import java.lang.reflect.Field;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import sun.security.action.GetPropertyAction;
import java.io.*;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.sun.jna.platform.win32.WinBase.STILL_ACTIVE;
import static java.util.Objects.requireNonNull;
public class ProcessImplForWin32 extends Process {
private static final Field FD_HANDLE;
static {
if (!OSUtils.isWindows()) {
throw new RuntimeException("ProcessImplForWin32 can be only initialized in " +
"Windows environment, but current OS is " + OSUtils.getOSName());
}
try {
FD_HANDLE = requireNonNull(FileDescriptor.class.getDeclaredField("handle"));
FD_HANDLE.setAccessible(true);
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
private static final int PIPE_SIZE = 4096 + 24;
private static final int HANDLE_STORAGE_SIZE = 6;
private static final int OFFSET_READ = 0;
private static final int OFFSET_WRITE = 1;
private static final WinNT.HANDLE JAVA_INVALID_HANDLE_VALUE = new WinNT.HANDLE(Pointer.createConstant(-1));
private static void setHandle(FileDescriptor obj, long handle) {
try {
FD_HANDLE.set(obj, handle);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private static long getHandle(FileDescriptor obj) {
try {
return (Long) FD_HANDLE.get(obj);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
/**
* Open a file for writing. If {@code append} is {@code true} then the file
* is opened for atomic append directly and a FileOutputStream constructed
* with the resulting handle. This is because a FileOutputStream created
* to append to a file does not open the file in a manner that guarantees
* that writes by the child process will be atomic.
*/
private static FileOutputStream newFileOutputStream(File f, boolean append)
throws IOException
{
if (append) {
String path = f.getPath();
SecurityManager sm = System.getSecurityManager();
if (sm != null)
sm.checkWrite(path);
long handle = openForAtomicAppend(path);
final FileDescriptor fd = new FileDescriptor();
setHandle(fd, handle);
return AccessController.doPrivileged(
new PrivilegedAction<FileOutputStream>() {
public FileOutputStream run() {
return new FileOutputStream(fd);
}
}
);
} else {
return new FileOutputStream(f);
}
}
// System-dependent portion of ProcessBuilderForWindows.start()
static Process start(String username,
String password,
String cmdarray[],
java.util.Map<String,String> environment,
String dir,
ProcessBuilderForWin32.Redirect[] redirects,
boolean redirectErrorStream)
throws IOException
{
String envblock = ProcessEnvironmentForWin32.toEnvironmentBlock(environment);
FileInputStream f0 = null;
FileOutputStream f1 = null;
FileOutputStream f2 = null;
try {
long[] stdHandles;
if (redirects == null) {
stdHandles = new long[] { -1L, -1L, -1L };
} else {
stdHandles = new long[3];
if (redirects[0] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[0] = -1L;
else if (redirects[0] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[0] = getHandle(FileDescriptor.in);
else {
f0 = new FileInputStream(redirects[0].file());
stdHandles[0] = getHandle(f0.getFD());
}
if (redirects[1] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[1] = -1L;
else if (redirects[1] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[1] = getHandle(FileDescriptor.out);
else {
f1 = newFileOutputStream(redirects[1].file(),
redirects[1].append());
stdHandles[1] = getHandle(f1.getFD());
}
if (redirects[2] == ProcessBuilderForWin32.Redirect.PIPE)
stdHandles[2] = -1L;
else if (redirects[2] == ProcessBuilderForWin32.Redirect.INHERIT)
stdHandles[2] = getHandle(FileDescriptor.err);
else {
f2 = newFileOutputStream(redirects[2].file(),
redirects[2].append());
stdHandles[2] = getHandle(f2.getFD());
}
}
return new ProcessImplForWin32(username, password, cmdarray, envblock, dir, stdHandles, redirectErrorStream);
} finally {
// In theory, close() can throw IOException
// (although it is rather unlikely to happen here)
try { if (f0 != null) f0.close(); }
finally {
try { if (f1 != null) f1.close(); }
finally { if (f2 != null) f2.close(); }
}
}
}
private static class LazyPattern {
// Escape-support version:
// "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)";
private static final Pattern PATTERN =
Pattern.compile("[^\\s\"]+|\"[^\"]*\"");
};
/* Parses the command string parameter into the executable name and
* program arguments.
*
* The command string is broken into tokens. The token separator is a space
* or quota character. The space inside quotation is not a token separator.
* There are no escape sequences.
*/
private static String[] getTokensFromCommand(String command) {
ArrayList<String> matchList = new ArrayList<>(8);
Matcher regexMatcher = ProcessImplForWin32.LazyPattern.PATTERN.matcher(command);
while (regexMatcher.find())
matchList.add(regexMatcher.group());
return matchList.toArray(new String[matchList.size()]);
}
private static final int VERIFICATION_CMD_BAT = 0;
private static final int VERIFICATION_WIN32 = 1;
private static final int VERIFICATION_WIN32_SAFE = 2; // inside quotes not allowed
private static final int VERIFICATION_LEGACY = 3;
// See Command shell overview for documentation of special characters.
// https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-xp/bb490954(v=technet.10)
private static final char ESCAPE_VERIFICATION[][] = {
// We guarantee the only command file execution for implicit [cmd.exe] run.
// http://technet.microsoft.com/en-us/library/bb490954.aspx
{' ', '\t', '<', '>', '&', '|', '^'},
{' ', '\t', '<', '>'},
{' ', '\t', '<', '>'},
{' ', '\t'}
};
private static String createCommandLine(int verificationType,
final String executablePath,
final String cmd[])
{
StringBuilder cmdbuf = new StringBuilder(80);
cmdbuf.append(executablePath);
for (int i = 1; i < cmd.length; ++i) {
cmdbuf.append(' ');
String s = cmd[i];
if (needsEscaping(verificationType, s)) {
cmdbuf.append('"');
if (verificationType == VERIFICATION_WIN32_SAFE) {
// Insert the argument, adding '\' to quote any interior quotes
int length = s.length();
for (int j = 0; j < length; j++) {
char c = s.charAt(j);
if (c == DOUBLEQUOTE) {
int count = countLeadingBackslash(verificationType, s, j);
while (count-- > 0) {
cmdbuf.append(BACKSLASH); // double the number of backslashes
}
cmdbuf.append(BACKSLASH); // backslash to quote the quote
}
cmdbuf.append(c);
}
} else {
cmdbuf.append(s);
}
// The code protects the [java.exe] and console command line
// parser, that interprets the [\"] combination as an escape
// sequence for the ["] char.
// http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
//
// If the argument is an FS path, doubling of the tail [\]
// char is not a problem for non-console applications.
//
// The [\"] sequence is not an escape sequence for the [cmd.exe]
// command line parser. The case of the [""] tail escape
// sequence could not be realized due to the argument validation
// procedure.
int count = countLeadingBackslash(verificationType, s, s.length());
while (count-- > 0) {
cmdbuf.append(BACKSLASH); // double the number of backslashes
}
cmdbuf.append('"');
} else {
cmdbuf.append(s);
}
}
return cmdbuf.toString();
}
/**
* Return the argument without quotes (1st and last) if present, else the arg.
* @param str a string
* @return the string without 1st and last quotes
*/
private static String unQuote(String str) {
int len = str.length();
return (len >= 2 && str.charAt(0) == DOUBLEQUOTE && str.charAt(len - 1) == DOUBLEQUOTE)
? str.substring(1, len - 1)
: str;
}
private static boolean needsEscaping(int verificationType, String arg) {
// Switch off MS heuristic for internal ["].
// Please, use the explicit [cmd.exe] call
// if you need the internal ["].
// Example: "cmd.exe", "/C", "Extended_MS_Syntax"
// For [.exe] or [.com] file the unpaired/internal ["]
// in the argument is not a problem.
String unquotedArg = unQuote(arg);
boolean argIsQuoted = !arg.equals(unquotedArg);
boolean embeddedQuote = unquotedArg.indexOf(DOUBLEQUOTE) >= 0;
switch (verificationType) {
case VERIFICATION_CMD_BAT:
if (embeddedQuote) {
throw new IllegalArgumentException("Argument has embedded quote, " +
"use the explicit CMD.EXE call.");
}
break; // break determine whether to quote
case VERIFICATION_WIN32_SAFE:
if (argIsQuoted && embeddedQuote) {
throw new IllegalArgumentException("Malformed argument has embedded quote: "
+ unquotedArg);
}
break;
default:
break;
}
if (!argIsQuoted) {
char testEscape[] = ESCAPE_VERIFICATION[verificationType];
for (int i = 0; i < testEscape.length; ++i) {
if (arg.indexOf(testEscape[i]) >= 0) {
return true;
}
}
}
return false;
}
private static String getExecutablePath(String path)
throws IOException
{
String name = unQuote(path);
if (name.indexOf(DOUBLEQUOTE) >= 0) {
throw new IllegalArgumentException("Executable name has embedded quote, " +
"split the arguments: " + name);
}
// Win32 CreateProcess requires path to be normalized
File fileToRun = new File(name);
// From the [CreateProcess] function documentation:
//
// "If the file name does not contain an extension, .exe is appended.
// Therefore, if the file name extension is .com, this parameter
// must include the .com extension. If the file name ends in
// a period (.) with no extension, or if the file name contains a path,
// .exe is not appended."
//
// "If the file name !does not contain a directory path!,
// the system searches for the executable file in the following
// sequence:..."
//
// In practice ANY non-existent path is extended by [.exe] extension
// in the [CreateProcess] function with the only exception:
// the path ends by (.)
return fileToRun.getPath();
}
/**
* An executable is any program that is an EXE or does not have an extension
* and the Windows createProcess will be looking for .exe.
* The comparison is case insensitive based on the name.
* @param executablePath the executable file
* @return true if the path ends in .exe or does not have an extension.
*/
private boolean isExe(String executablePath) {
File file = new File(executablePath);
String upName = file.getName().toUpperCase(Locale.ROOT);
return (upName.endsWith(".EXE") || upName.indexOf('.') < 0);
}
// Old version that can be bypassed
private boolean isShellFile(String executablePath) {
String upPath = executablePath.toUpperCase();
return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT"));
}
private String quoteString(String arg) {
StringBuilder argbuf = new StringBuilder(arg.length() + 2);
return argbuf.append('"').append(arg).append('"').toString();
}
// Count backslashes before start index of string.
// .bat files don't include backslashes as part of the quote
private static int countLeadingBackslash(int verificationType,
CharSequence input, int start) {
if (verificationType == VERIFICATION_CMD_BAT)
return 0;
int j;
for (j = start - 1; j >= 0 && input.charAt(j) == BACKSLASH; j--) {
// just scanning backwards
}
return (start - 1) - j; // number of BACKSLASHES
}
private static final char DOUBLEQUOTE = '\"';
private static final char BACKSLASH = '\\';
private WinNT.HANDLE handle;
private OutputStream stdin_stream;
private InputStream stdout_stream;
private InputStream stderr_stream;
private ProcessImplForWin32(
String username,
String password,
String cmd[],
final String envblock,
final String path,
final long[] stdHandles,
final boolean redirectErrorStream)
throws IOException
{
String cmdstr;
final SecurityManager security = System.getSecurityManager();
GetPropertyAction action = new GetPropertyAction("jdk.lang.Process.allowAmbiguousCommands",
(security == null) ? "true" : "false");
final boolean allowAmbiguousCommands = !"false".equalsIgnoreCase(action.run());
if (allowAmbiguousCommands && security == null) {
// Legacy mode.
// Normalize path if possible.
String executablePath = new File(cmd[0]).getPath();
// No worry about internal, unpaired ["], and redirection/piping.
if (needsEscaping(VERIFICATION_LEGACY, executablePath) )
executablePath = quoteString(executablePath);
cmdstr = createCommandLine(
//legacy mode doesn't worry about extended verification
VERIFICATION_LEGACY,
executablePath,
cmd);
} else {
String executablePath;
try {
executablePath = getExecutablePath(cmd[0]);
} catch (IllegalArgumentException e) {
// Workaround for the calls like
// Runtime.getRuntime().exec("\"C:\\Program Files\\foo\" bar")
// No chance to avoid CMD/BAT injection, except to do the work
// right from the beginning. Otherwise we have too many corner
// cases from
// Runtime.getRuntime().exec(String[] cmd [, ...])
// calls with internal ["] and escape sequences.
// Restore original command line.
StringBuilder join = new StringBuilder();
// terminal space in command line is ok
for (String s : cmd)
join.append(s).append(' ');
// Parse the command line again.
cmd = getTokensFromCommand(join.toString());
executablePath = getExecutablePath(cmd[0]);
// Check new executable name once more
if (security != null)
security.checkExec(executablePath);
}
// Quotation protects from interpretation of the [path] argument as
// start of longer path with spaces. Quotation has no influence to
// [.exe] extension heuristic.
boolean isShell = allowAmbiguousCommands ? isShellFile(executablePath)
: !isExe(executablePath);
cmdstr = createCommandLine(
// We need the extended verification procedures
isShell ? VERIFICATION_CMD_BAT
: (allowAmbiguousCommands ? VERIFICATION_WIN32 : VERIFICATION_WIN32_SAFE),
quoteString(executablePath),
cmd);
}
handle = create(username, password, cmdstr, envblock, path, stdHandles, redirectErrorStream);
AccessController.doPrivileged(
new PrivilegedAction<Void>() {
public Void run() {
if (stdHandles[0] == -1L)
stdin_stream = ProcessBuilderForWin32.NullOutputStream.INSTANCE;
else {
FileDescriptor stdin_fd = new FileDescriptor();
setHandle(stdin_fd, stdHandles[0]);
stdin_stream = new BufferedOutputStream(
new FileOutputStream(stdin_fd));
}
if (stdHandles[1] == -1L)
stdout_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE;
else {
FileDescriptor stdout_fd = new FileDescriptor();
setHandle(stdout_fd, stdHandles[1]);
stdout_stream = new BufferedInputStream(
new FileInputStream(stdout_fd));
}
if (stdHandles[2] == -1L)
stderr_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE;
else {
FileDescriptor stderr_fd = new FileDescriptor();
setHandle(stderr_fd, stdHandles[2]);
stderr_stream = new FileInputStream(stderr_fd);
}
return null; }});
}
public OutputStream getOutputStream() {
return stdin_stream;
}
public InputStream getInputStream() {
return stdout_stream;
}
public InputStream getErrorStream() {
return stderr_stream;
}
protected void finalize() {
closeHandle(handle);
}
public int exitValue() {
int exitCode = getExitCodeProcess(handle);
if (exitCode == STILL_ACTIVE)
throw new IllegalThreadStateException("process has not exited");
return exitCode;
}
public int waitFor() throws InterruptedException {
waitForInterruptibly(handle);
if (Thread.interrupted())
throw new InterruptedException();
return exitValue();
}
@Override
public boolean waitFor(long timeout, TimeUnit unit)
throws InterruptedException
{
if (getExitCodeProcess(handle) != STILL_ACTIVE) return true;
if (timeout <= 0) return false;
long remainingNanos = unit.toNanos(timeout);
long deadline = System.nanoTime() + remainingNanos ;
do {
// Round up to next millisecond
long msTimeout = TimeUnit.NANOSECONDS.toMillis(remainingNanos + 999_999L);
waitForTimeoutInterruptibly(handle, msTimeout);
if (Thread.interrupted())
throw new InterruptedException();
if (getExitCodeProcess(handle) != STILL_ACTIVE) {
return true;
}
remainingNanos = deadline - System.nanoTime();
} while (remainingNanos > 0);
return (getExitCodeProcess(handle) != STILL_ACTIVE);
}
public void destroy() { terminateProcess(handle); }
public Process destroyForcibly() {
destroy();
return this;
}
public boolean isAlive() {
return isProcessAlive(handle);
}
private static boolean initHolder(WinNT.HANDLEByReference pjhandles,
WinNT.HANDLEByReference[] pipe,
int offset,
WinNT.HANDLEByReference phStd) {
if (!pjhandles.getValue().equals(JAVA_INVALID_HANDLE_VALUE)) {
phStd.setValue(pjhandles.getValue());
pjhandles.setValue(JAVA_INVALID_HANDLE_VALUE);
} else {
if (!Kernel32.INSTANCE.CreatePipe(pipe[0], pipe[1], null, PIPE_SIZE)) {
throw new Win32Exception(Kernel32.INSTANCE.GetLastError());
} else {
WinNT.HANDLE thisProcessEnd = offset == OFFSET_READ ? pipe[1].getValue() : pipe[0].getValue();
phStd.setValue(pipe[offset].getValue());
pjhandles.setValue(thisProcessEnd);
}
}
Kernel32.INSTANCE.SetHandleInformation(phStd.getValue(), Kernel32.HANDLE_FLAG_INHERIT, Kernel32.HANDLE_FLAG_INHERIT);
return true;
}
private static void releaseHolder(boolean complete, WinNT.HANDLEByReference[] pipe, int offset) {
closeHandle(pipe[offset].getValue());
if (complete) {
closeHandle(pipe[offset == OFFSET_READ ? OFFSET_WRITE : OFFSET_READ].getValue());
}
}
private static void prepareIOEHandleState(WinNT.HANDLE[] stdIOE, Boolean[] inherit) {
for(int i = 0; i < HANDLE_STORAGE_SIZE; ++i) {
WinNT.HANDLE hstd = stdIOE[i];
if (!Kernel32.INVALID_HANDLE_VALUE.equals(hstd)) {
inherit[i] = Boolean.TRUE;
Kernel32.INSTANCE.SetHandleInformation(hstd, Kernel32.HANDLE_FLAG_INHERIT, 0);
}
}
}
private static void restoreIOEHandleState(WinNT.HANDLE[] stdIOE, Boolean[] inherit) {
for (int i = HANDLE_STORAGE_SIZE - 1; i >= 0; --i) {
if (!Kernel32.INVALID_HANDLE_VALUE.equals(stdIOE[i])) {
Kernel32.INSTANCE.SetHandleInformation(stdIOE[i], Kernel32.HANDLE_FLAG_INHERIT, inherit[i] ? Kernel32.HANDLE_FLAG_INHERIT : 0);
}
}
}
private static WinNT.HANDLE processCreate(String username,
String password,
String cmd,
final String envblock,
final String path,
final WinNT.HANDLEByReference[] stdHandles,
final boolean redirectErrorStream) {
WinNT.HANDLE ret = new WinNT.HANDLE(Pointer.createConstant(0));
WinNT.HANDLE[] stdIOE = new WinNT.HANDLE[] {
Kernel32.INVALID_HANDLE_VALUE, Kernel32.INVALID_HANDLE_VALUE, Kernel32.INVALID_HANDLE_VALUE,
stdHandles[0].getValue(), stdHandles[1].getValue(), stdHandles[2].getValue()
};
stdIOE[0] = Kernel32.INSTANCE.GetStdHandle(Kernel32.STD_INPUT_HANDLE);
stdIOE[1] = Kernel32.INSTANCE.GetStdHandle(Kernel32.STD_OUTPUT_HANDLE);
stdIOE[2] = Kernel32.INSTANCE.GetStdHandle(Kernel32.STD_ERROR_HANDLE);
Boolean[] inherit = new Boolean[] {
Boolean.FALSE, Boolean.FALSE, Boolean.FALSE,
Boolean.FALSE, Boolean.FALSE, Boolean.FALSE
};
prepareIOEHandleState(stdIOE, inherit);
// input
WinNT.HANDLEByReference hStdInput = new WinNT.HANDLEByReference();
WinNT.HANDLEByReference[] pipeIn = new WinNT.HANDLEByReference[] {
new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE) };
// output
WinNT.HANDLEByReference hStdOutput = new WinNT.HANDLEByReference();
WinNT.HANDLEByReference[] pipeOut = new WinNT.HANDLEByReference[] {
new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE) };
// error
WinNT.HANDLEByReference hStdError = new WinNT.HANDLEByReference();
WinNT.HANDLEByReference[] pipeError = new WinNT.HANDLEByReference[] {
new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE) };
boolean success;
if (initHolder(stdHandles[0], pipeIn, OFFSET_READ, hStdInput)) {
if (initHolder(stdHandles[1], pipeOut, OFFSET_WRITE, hStdOutput)) {
WinBase.STARTUPINFO si = new WinBase.STARTUPINFO();
si.hStdInput = hStdInput.getValue();
si.hStdOutput = hStdOutput.getValue();
if (redirectErrorStream) {
si.hStdError = si.hStdOutput;
stdHandles[2].setValue(JAVA_INVALID_HANDLE_VALUE);
success = true;
} else {
success = initHolder(stdHandles[2], pipeError, OFFSET_WRITE, hStdError);
si.hStdError = hStdError.getValue();
}
if (success) {
WTypes.LPSTR lpEnvironment = envblock == null ? new WTypes.LPSTR() : new WTypes.LPSTR(envblock);
Kernel32.PROCESS_INFORMATION pi = new WinBase.PROCESS_INFORMATION();
si.dwFlags = Kernel32.STARTF_USESTDHANDLES;
if (!Advapi32.INSTANCE.CreateProcessWithLogonW(
username
, null
, password
, Advapi32.LOGON_WITH_PROFILE
, null
, cmd
, Kernel32.CREATE_NO_WINDOW
, lpEnvironment.getPointer()
, path
, si
, pi)) {
throw new Win32Exception(Kernel32.INSTANCE.GetLastError());
} else {
closeHandle(pi.hThread);
ret = pi.hProcess;
}
}
releaseHolder(ret.getPointer().equals(Pointer.createConstant(0)), pipeError, OFFSET_WRITE);
releaseHolder(ret.getPointer().equals(Pointer.createConstant(0)), pipeOut, OFFSET_WRITE);
}
releaseHolder(ret.getPointer().equals(Pointer.createConstant(0)), pipeIn, OFFSET_READ);
}
restoreIOEHandleState(stdIOE, inherit);
return ret;
}
private static synchronized WinNT.HANDLE create(String username,
String password,
String cmd,
final String envblock,
final String path,
final long[] stdHandles,
final boolean redirectErrorStream) {
WinNT.HANDLE ret = new WinNT.HANDLE(Pointer.createConstant(0));
WinNT.HANDLEByReference[] handles = new WinNT.HANDLEByReference[stdHandles.length];
for (int i = 0; i < stdHandles.length; i++) {
handles[i] = new WinNT.HANDLEByReference(new WinNT.HANDLE(Pointer.createConstant(stdHandles[i])));
}
if (cmd != null) {
if (username != null && password != null) {
ret = processCreate(username, password, cmd, envblock, path, handles, redirectErrorStream);
}
}
for (int i = 0; i < stdHandles.length; i++) {
stdHandles[i] = handles[i].getPointer().getLong(0);
}
return ret;
}
private static int getExitCodeProcess(WinNT.HANDLE handle) {
IntByReference exitStatus = new IntByReference();
if (!Kernel32.INSTANCE.GetExitCodeProcess(handle, exitStatus)) {
throw new Win32Exception(Kernel32.INSTANCE.GetLastError());
}
return exitStatus.getValue();
}
private static void terminateProcess(WinNT.HANDLE handle) {
Kernel32.INSTANCE.TerminateProcess(handle, 1);
}
private static boolean isProcessAlive(WinNT.HANDLE handle) {
IntByReference exitStatus = new IntByReference();
Kernel32.INSTANCE.GetExitCodeProcess(handle, exitStatus);
return exitStatus.getValue() == STILL_ACTIVE;
}
private static void closeHandle(WinNT.HANDLE handle) {
Kernel32Util.closeHandle(handle);
}
/**
* Opens a file for atomic append. The file is created if it doesn't
* already exist.
*
* @param path the file to open or create
* @return the native HANDLE
*/
private static long openForAtomicAppend(String path) throws IOException {
int access = Kernel32.GENERIC_READ | Kernel32.GENERIC_WRITE;
int sharing = Kernel32.FILE_SHARE_READ | Kernel32.FILE_SHARE_WRITE;
int disposition = Kernel32.OPEN_ALWAYS;
int flagsAndAttributes = Kernel32.FILE_ATTRIBUTE_NORMAL;
if (path == null || path.isEmpty()) {
return -1;
} else {
WinNT.HANDLE handle = Kernel32.INSTANCE.CreateFile(path, access, sharing, null, disposition, flagsAndAttributes, null);
if (handle == Kernel32.INVALID_HANDLE_VALUE) {
throw new Win32Exception(Kernel32.INSTANCE.GetLastError());
}
return handle.getPointer().getLong(0);
}
}
private static void waitForInterruptibly(WinNT.HANDLE handle) {
int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, Kernel32.INFINITE);
if (result == Kernel32.WAIT_FAILED) {
throw new Win32Exception(Kernel32.INSTANCE.GetLastError());
}
}
private static void waitForTimeoutInterruptibly(WinNT.HANDLE handle, long timeout) {
int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, (int) timeout);
if (result == Kernel32.WAIT_FAILED) {
throw new Win32Exception(Kernel32.INSTANCE.GetLastError());
}
}
}

40
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/ConstantsTest.java

@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.junit.Assert;
import org.junit.Test;
/**
* Constants Test
*/
public class ConstantsTest {
/**
* Test PID via env
*/
@Test
public void testPID() {
if (OSUtils.isWindows()) {
Assert.assertEquals(Constants.PID, "handle");
} else {
Assert.assertEquals(Constants.PID, "pid");
}
}
}

2
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminatorTest.java

@ -27,8 +27,6 @@ import org.slf4j.Marker;
import java.util.Map;
import static org.junit.Assert.*;
public class TaskLogDiscriminatorTest {
/**

1
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogFilterTest.java

@ -21,7 +21,6 @@ import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.IThrowableProxy;
import ch.qos.logback.classic.spi.LoggerContextVO;
import ch.qos.logback.core.spi.FilterReply;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.junit.Assert;
import org.junit.Test;

55
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java

@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.task;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.junit.Assert;
import org.junit.Test;
import java.util.LinkedList;
import java.util.List;
public class FlinkParametersTest {
@Test
public void getResourceFilesList() {
FlinkParameters flinkParameters = new FlinkParameters();
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertTrue(flinkParameters.getResourceFilesList().isEmpty());
ResourceInfo mainResource = new ResourceInfo();
mainResource.setRes("testFlinkMain-1.0.0-SNAPSHOT.jar");
flinkParameters.setMainJar(mainResource);
List<ResourceInfo> resourceInfos = new LinkedList<>();
ResourceInfo resourceInfo1 = new ResourceInfo();
resourceInfo1.setRes("testFlinkParameters1.jar");
resourceInfos.add(resourceInfo1);
flinkParameters.setResourceList(resourceInfos);
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertEquals(2, flinkParameters.getResourceFilesList().size());
ResourceInfo resourceInfo2 = new ResourceInfo();
resourceInfo2.setRes("testFlinkParameters2.jar");
resourceInfos.add(resourceInfo2);
flinkParameters.setResourceList(resourceInfos);
Assert.assertNotNull(flinkParameters.getResourceFilesList());
Assert.assertEquals(3, flinkParameters.getResourceFilesList().size());
}
}

17
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java

@ -30,29 +30,32 @@ public class FileUtilsTest {
@Test
public void suffix() {
Assert.assertEquals(FileUtils.suffix("ninfor.java"),"java");
Assert.assertEquals("java", FileUtils.suffix("ninfor.java"));
Assert.assertEquals("", FileUtils.suffix(null));
Assert.assertEquals("", FileUtils.suffix(""));
Assert.assertEquals("", FileUtils.suffix("ninfor-java"));
}
@Test
public void testGetDownloadFilename() {
PowerMockito.mockStatic(DateUtils.class);
PowerMockito.when(DateUtils.getCurrentTime(YYYYMMDDHHMMSS)).thenReturn("20190101101059");
Assert.assertEquals(FileUtils.getDownloadFilename("test"),
"/tmp/dolphinscheduler/download/20190101101059/test");
Assert.assertEquals("/tmp/dolphinscheduler/download/20190101101059/test",
FileUtils.getDownloadFilename("test"));
}
@Test
public void testGetUploadFilename() {
Assert.assertEquals(FileUtils.getUploadFilename("aaa","bbb"),
"/tmp/dolphinscheduler/aaa/resources/bbb");
Assert.assertEquals("/tmp/dolphinscheduler/aaa/resources/bbb",
FileUtils.getUploadFilename("aaa","bbb"));
}
@Test
public void testGetProcessExecDir() {
String dir = FileUtils.getProcessExecDir(1,2,3, 4);
Assert.assertEquals(dir, "/tmp/dolphinscheduler/exec/process/1/2/3/4");
Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3/4", dir);
dir = FileUtils.getProcessExecDir(1,2,3);
Assert.assertEquals(dir, "/tmp/dolphinscheduler/exec/process/1/2/3");
Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3", dir);
}
@Test

3
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java

@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -39,7 +38,7 @@ public class HttpUtilsTest {
String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result);
JSONObject jsonObject = JSON.parseObject(result);
Assert.assertEquals(jsonObject.getString("name"), "GitHub");
Assert.assertEquals("GitHub", jsonObject.getString("name"));
result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result);

24
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java

@ -39,16 +39,20 @@ public class OSUtilsTest {
@Test
public void testOSMetric(){
double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize();
Assert.assertTrue(availablePhysicalMemorySize > 0.0f);
double totalMemorySize = OSUtils.totalMemorySize();
Assert.assertTrue(totalMemorySize > 0.0f);
double loadAverage = OSUtils.loadAverage();
logger.info("loadAverage {}", loadAverage);
double memoryUsage = OSUtils.memoryUsage();
Assert.assertTrue(memoryUsage > 0.0f);
double cpuUsage = OSUtils.cpuUsage();
Assert.assertTrue(cpuUsage > 0.0f);
if (!OSUtils.isWindows()) {
double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize();
Assert.assertTrue(availablePhysicalMemorySize > 0.0f);
double totalMemorySize = OSUtils.totalMemorySize();
Assert.assertTrue(totalMemorySize > 0.0f);
double loadAverage = OSUtils.loadAverage();
logger.info("loadAverage {}", loadAverage);
double memoryUsage = OSUtils.memoryUsage();
Assert.assertTrue(memoryUsage > 0.0f);
double cpuUsage = OSUtils.cpuUsage();
Assert.assertTrue(cpuUsage > 0.0f);
} else {
// TODO window ut
}
}
@Test

3
dolphinscheduler-service/src/test/java/utils/PreconditionsTest.java → dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PreconditionsTest.java

@ -14,9 +14,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils;
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

3
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SchemaUtilsTest.java

@ -19,15 +19,12 @@ package org.apache.dolphinscheduler.common.utils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

2
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java

@ -19,8 +19,6 @@ package org.apache.dolphinscheduler.common.utils;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
public class StringUtilsTest {
@Test
public void testIsNotEmpty() {

3
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtilsTest.java

@ -19,9 +19,6 @@ package org.apache.dolphinscheduler.common.utils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.LoggerFactory;

210
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32Test.java

@ -0,0 +1,210 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils.process;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@RunWith(PowerMockRunner.class)
@PrepareForTest(OSUtils.class)
public class ProcessBuilderForWin32Test {
private static final Logger logger = LoggerFactory.getLogger(ProcessBuilderForWin32Test.class);
@Before
public void before() {
PowerMockito.mockStatic(OSUtils.class);
PowerMockito.when(OSUtils.isWindows()).thenReturn(true);
}
@Test
public void testCreateProcessBuilderForWin32() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
Assert.assertNotNull(builder);
builder = new ProcessBuilderForWin32("net");
Assert.assertNotNull(builder);
builder = new ProcessBuilderForWin32(Collections.singletonList("net"));
Assert.assertNotNull(builder);
builder = new ProcessBuilderForWin32((List<String>) null);
Assert.assertNotNull(builder);
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testBuildUser() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.user("test", StringUtils.EMPTY);
Assert.assertNotNull(builder);
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testBuildCommand() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.command(Collections.singletonList("net"));
Assert.assertNotEquals(0, builder.command().size());
builder = new ProcessBuilderForWin32();
builder.command("net");
Assert.assertNotEquals(0, builder.command().size());
builder = new ProcessBuilderForWin32();
builder.command((List<String>) null);
Assert.assertNotEquals(0, builder.command().size());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testEnvironment() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
Assert.assertNotNull(builder.environment());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.environment(new String[]{ "a=123" });
Assert.assertNotEquals(0, builder.environment().size());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testDirectory() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.directory(new File("/tmp"));
Assert.assertNotNull(builder.directory());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testStream() {
try {
InputStream in = ProcessBuilderForWin32.NullInputStream.INSTANCE;
Assert.assertNotNull(in);
Assert.assertEquals(-1, in.read());
Assert.assertEquals(0, in.available());
OutputStream out = ProcessBuilderForWin32.NullOutputStream.INSTANCE;
Assert.assertNotNull(out);
out.write(new byte[] {1});
} catch (Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testRedirect() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.redirectInput(new File("/tmp"));
Assert.assertNotNull(builder.redirectInput());
Assert.assertNotNull(builder.redirectInput().file());
builder.redirectOutput(new File("/tmp"));
Assert.assertNotNull(builder.redirectOutput());
Assert.assertNotNull(builder.redirectOutput().file());
builder.redirectError(new File("/tmp"));
Assert.assertNotNull(builder.redirectError());
Assert.assertNotNull(builder.redirectError().file());
builder.redirectInput(builder.redirectOutput());
builder.redirectOutput(builder.redirectInput());
builder.redirectError(builder.redirectInput());
Assert.assertNotNull(ProcessBuilderForWin32.Redirect.PIPE.type());
Assert.assertNotNull(ProcessBuilderForWin32.Redirect.PIPE.toString());
Assert.assertNotNull(ProcessBuilderForWin32.Redirect.INHERIT.type());
Assert.assertNotNull(ProcessBuilderForWin32.Redirect.INHERIT.toString());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testRedirectErrorStream() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.redirectErrorStream(true);
Assert.assertTrue(builder.redirectErrorStream());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void runCmdViaUser() {
try {
ProcessBuilderForWin32 builder = new ProcessBuilderForWin32();
builder.user("test123", StringUtils.EMPTY);
List<String> commands = new ArrayList<>();
commands.add("cmd.exe");
commands.add("/c");
commands.add("net user");
builder.command(commands);
Process process = builder.start();
BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream(), Charset.forName("GBK")));
String line;
StringBuilder sb = new StringBuilder();
while ((line = inReader.readLine()) != null) {
sb.append(line);
}
logger.info("net user: {}", sb.toString());
Assert.assertNotEquals(StringUtils.EMPTY, sb.toString());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
}

124
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32Test.java

@ -0,0 +1,124 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils.process;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
@RunWith(PowerMockRunner.class)
@PrepareForTest({OSUtils.class, ProcessEnvironmentForWin32.class})
public class ProcessEnvironmentForWin32Test {
private static final Logger logger = LoggerFactory.getLogger(ProcessBuilderForWin32Test.class);
@Before
public void before() {
try {
PowerMockito.mockStatic(OSUtils.class);
PowerMockito.when(OSUtils.isWindows()).thenReturn(true);
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testPutAndGet() {
try {
ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0);
processEnvironmentForWin32.put("a", "123");
Assert.assertEquals("123", processEnvironmentForWin32.get("a"));
Assert.assertTrue(processEnvironmentForWin32.containsKey("a"));
Assert.assertTrue(processEnvironmentForWin32.containsValue("123"));
Assert.assertEquals("123", processEnvironmentForWin32.remove("a"));
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
try {
ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0);
processEnvironmentForWin32.put("b=", "123");
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
try {
ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0);
processEnvironmentForWin32.put("b", "\u0000");
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
try {
ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0);
processEnvironmentForWin32.get(null);
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testEntrySet() {
try {
ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0);
processEnvironmentForWin32.clear();
processEnvironmentForWin32.put("a", "123");
Assert.assertEquals(0, processEnvironmentForWin32.entrySet().size());
Assert.assertTrue(processEnvironmentForWin32.entrySet().isEmpty());
for (Map.Entry<String, String> entry : processEnvironmentForWin32.entrySet()) {
Assert.assertNotNull(entry);
Assert.assertNotNull(entry.getKey());
Assert.assertNotNull(entry.getValue());
Assert.assertNotNull(entry.setValue("123"));
}
processEnvironmentForWin32.clear();
Set<String> keys = processEnvironmentForWin32.keySet();
Assert.assertEquals(0, keys.size());
Assert.assertTrue(keys.isEmpty());
processEnvironmentForWin32.clear();
Collection<String> values = processEnvironmentForWin32.values();
Assert.assertEquals(0, keys.size());
Assert.assertTrue(keys.isEmpty());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
@Test
public void testToEnvironmentBlock() {
try {
ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0);
Assert.assertNotNull(processEnvironmentForWin32.toEnvironmentBlock());
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
}

70
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32Test.java

@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils.process;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.security.action.GetPropertyAction;
@RunWith(PowerMockRunner.class)
@PrepareForTest({OSUtils.class, GetPropertyAction.class})
public class ProcessImplForWin32Test {
private static final Logger logger = LoggerFactory.getLogger(ProcessBuilderForWin32Test.class);
@Before
public void before() {
PowerMockito.mockStatic(OSUtils.class);
PowerMockito.mockStatic(GetPropertyAction.class);
PowerMockito.when(OSUtils.isWindows()).thenReturn(true);
}
@Test
public void testStart() {
try {
Process process = ProcessImplForWin32.start(
"test123", StringUtils.EMPTY, new String[]{"net"},
null, null, null, false);
Assert.assertNotNull(process);
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
try {
Process process = ProcessImplForWin32.start(
"test123", StringUtils.EMPTY, new String[]{"net"},
null, null, new ProcessBuilderForWin32.Redirect[]{
ProcessBuilderForWin32.Redirect.PIPE,
ProcessBuilderForWin32.Redirect.PIPE,
ProcessBuilderForWin32.Redirect.PIPE
}, false);
Assert.assertNotNull(process);
} catch (Error | Exception e) {
logger.error(e.getMessage());
}
}
}

21
dolphinscheduler-dao/pom.xml

@ -25,7 +25,7 @@
</parent>
<artifactId>dolphinscheduler-dao</artifactId>
<name>${project.artifactId}</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
@ -44,6 +44,12 @@
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-to-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
@ -71,6 +77,14 @@
<artifactId>log4j-api</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-to-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@ -78,7 +92,10 @@
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>

6
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java

@ -144,9 +144,11 @@ public class AlertDao extends AbstractBaseDao {
* @param taskId taskId
* @param taskName taskName
*/
public void sendTaskTimeoutAlert(int alertgroupId,String receivers,String receiversCc,int taskId,String taskName){
public void sendTaskTimeoutAlert(int alertgroupId,String receivers,String receiversCc, int processInstanceId,
String processInstanceName, int taskId,String taskName){
Alert alert = new Alert();
String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]",taskId,taskName);
String content = String.format("[{'process instance id':'%d','task name':'%s','task id':'%d','task name':'%s'," +
"'event':'timeout','warnLevel':'middle'}]", processInstanceId, processInstanceName, taskId, taskName);
alert.setTitle("Task Timeout Warn");
alert.setShowType(ShowType.TABLE);
alert.setContent(content);

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java

@ -39,7 +39,7 @@ public class HiveDataSource extends BaseDataSource {
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) {
if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) {
jdbcUrl += "/";
}

1
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java

@ -29,7 +29,6 @@ import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

1
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java

@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import org.apache.dolphinscheduler.common.enums.UdfType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;

1
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.dao.upgrade;
import org.apache.dolphinscheduler.common.utils.ConnectionUtils;
import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

1
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.dao.upgrade;
import org.apache.dolphinscheduler.common.utils.ConnectionUtils;
import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

4
dolphinscheduler-dao/src/main/resources/application.properties

@ -23,6 +23,10 @@ spring.datasource.url=jdbc:postgresql://localhost:5432/dolphinscheduler
# mysql
#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8
# h2
#spring.datasource.driver-class-name=org.h2.Driver
#spring.datasource.url=jdbc:h2:file:../sql/h2;AUTO_SERVER=TRUE
spring.datasource.username=test
spring.datasource.password=test

4
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java

@ -67,7 +67,7 @@ public class ErrorCommandMapperTest {
//update
errorCommand.setUpdateTime(new Date());
int update = errorCommandMapper.updateById(errorCommand);
Assert.assertEquals(update, 1);
Assert.assertEquals(1,update);
errorCommandMapper.deleteById(errorCommand.getId());
}
@ -79,7 +79,7 @@ public class ErrorCommandMapperTest {
ErrorCommand errorCommand = insertOne();
int delete = errorCommandMapper.deleteById(errorCommand.getId());
Assert.assertEquals(delete, 1);
Assert.assertEquals(1,delete);
}
/**

2
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java

@ -17,8 +17,6 @@
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.junit.Assert;
import org.junit.Test;

1
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.Queue;
import org.apache.dolphinscheduler.dao.entity.Queue;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;

2
dolphinscheduler-remote/pom.xml

@ -12,8 +12,6 @@
<artifactId>dolphinscheduler-remote</artifactId>
<name>dolphinscheduler-remote</name>
<!-- FIXME change it to the project's website -->
<url>http://www.example.com</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

2
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java

@ -145,7 +145,7 @@ public class NettyRemotingServer {
try {
future = serverBootstrap.bind(serverConfig.getListenPort()).sync();
} catch (Exception e) {
logger.error("NettyRemotingServer bind fail {}, exit", e);
logger.error("NettyRemotingServer bind fail {}, exit",e.getMessage(), e);
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
if (future.isSuccess()) {

2
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java

@ -16,8 +16,6 @@
*/
package org.apache.dolphinscheduler.remote.command;
import com.sun.org.apache.regexp.internal.RE;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;

2
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java

@ -1 +1 @@
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** * execute task request command */ public class ExecuteTaskRequestCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * application name */ private String applicationName; /** * group name */ private String groupName; /** * task name */ private String taskName; /** * connector port */ private int connectorPort; /** * description info */ private String description; /** * class name */ private String className; /** * method name */ private String methodName; /** * parameters */ private String params; /** * shard itemds */ private List<Integer> shardItems; public List<Integer> getShardItems() { return shardItems; } public void setShardItems(List<Integer> shardItems) { this.shardItems = shardItems; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getGroupName() { return groupName; } public void setGroupName(String groupName) { this.groupName = groupName; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public int getConnectorPort() { return connectorPort; } public void setConnectorPort(int connectorPort) { this.connectorPort = connectorPort; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.List; /** * execute task request command */ public class ExecuteTaskRequestCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * application name */ private String applicationName; /** * group name */ private String groupName; /** * task name */ private String taskName; /** * connector port */ private int connectorPort; /** * description info */ private String description; /** * class name */ private String className; /** * method name */ private String methodName; /** * parameters */ private String params; /** * shard itemds */ private List<Integer> shardItems; public List<Integer> getShardItems() { return shardItems; } public void setShardItems(List<Integer> shardItems) { this.shardItems = shardItems; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getGroupName() { return groupName; } public void setGroupName(String groupName) { this.groupName = groupName; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public int getConnectorPort() { return connectorPort; } public void setConnectorPort(int connectorPort) { this.connectorPort = connectorPort; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } }

2
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java

@ -1 +1 @@
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.concurrent.atomic.AtomicLong; /** * execute task response command */ public class ExecuteTaskResponseCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * return result */ private Object result; /** * received time */ private long receivedTime; /** * execute count */ private int executeCount; /** * execute time */ private long executeTime; public String getAttemptId() { return attemptId; } public void setAttemptId(String attemptId) { this.attemptId = attemptId; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public Object getResult() { return result; } public void setResult(Object result) { this.result = result; } public long getReceivedTime() { return receivedTime; } public void setReceivedTime(long receivedTime) { this.receivedTime = receivedTime; } public int getExecuteCount() { return executeCount; } public void setExecuteCount(int executeCount) { this.executeCount = executeCount; } public long getExecuteTime() { return executeTime; } public void setExecuteTime(long executeTime) { this.executeTime = executeTime; } public Command convert2Command(long opaque){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; /** * execute task response command */ public class ExecuteTaskResponseCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * return result */ private Object result; /** * received time */ private long receivedTime; /** * execute count */ private int executeCount; /** * execute time */ private long executeTime; public String getAttemptId() { return attemptId; } public void setAttemptId(String attemptId) { this.attemptId = attemptId; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public Object getResult() { return result; } public void setResult(Object result) { this.result = result; } public long getReceivedTime() { return receivedTime; } public void setReceivedTime(long receivedTime) { this.receivedTime = receivedTime; } public int getExecuteCount() { return executeCount; } public void setExecuteCount(int executeCount) { this.executeCount = executeCount; } public long getExecuteTime() { return executeTime; } public void setExecuteTime(long executeTime) { this.executeTime = executeTime; } public Command convert2Command(long opaque){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } }

1
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java

@ -21,7 +21,6 @@ import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* ping machine

1
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java

@ -22,7 +22,6 @@ import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* get log bytes request command

1
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java

@ -22,7 +22,6 @@ import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* roll view log request command

1
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java

@ -22,7 +22,6 @@ import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* view log request command

2
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java

@ -107,7 +107,7 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter {
*/
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("exceptionCaught : {}", cause);
logger.error("exceptionCaught : {}",cause.getMessage(), cause);
nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel()));
ctx.channel().close();
}

2
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java

@ -140,7 +140,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter {
*/
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("exceptionCaught : {}", cause);
logger.error("exceptionCaught : {}",cause.getMessage(), cause);
ctx.channel().close();
}

8
dolphinscheduler-server/pom.xml

@ -25,7 +25,7 @@
</parent>
<artifactId>dolphinscheduler-server</artifactId>
<name>dolphinscheduler-server</name>
<url>http://maven.apache.org</url>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@ -86,6 +86,12 @@
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java

@ -565,7 +565,7 @@ public class MasterExecThread implements Runnable {
TaskInstance taskInstance = completeTaskList.get(nodeName);
if(taskInstance == null){
logger.error("task instance cannot find, please check it!", nodeName);
logger.error("task instance {} cannot find, please check it!", nodeName);
return conditionTaskList;
}

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java

@ -128,7 +128,9 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
// process define
ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
// send warn mail
alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),processDefine.getReceiversCc(),taskInstance.getId(),taskInstance.getName());
alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),
processDefine.getReceiversCc(), processInstance.getId(), processInstance.getName(),
taskInstance.getId(),taskInstance.getName());
checkTimeout = false;
}
}

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java

@ -133,6 +133,8 @@ public class AlertManager {
continue;
}
LinkedHashMap<String, String> failedTaskMap = new LinkedHashMap();
failedTaskMap.put("process instance id", String.valueOf(processInstance.getId()));
failedTaskMap.put("process instance name", processInstance.getName());
failedTaskMap.put("task id", String.valueOf(task.getId()));
failedTaskMap.put("task name", task.getName());
failedTaskMap.put("task type", task.getTaskType());
@ -193,7 +195,7 @@ public class AlertManager {
logger.info("add alert to db , alert : {}", alert.toString());
}catch (Exception e){
logger.error("send alert failed! " + e);
logger.error("send alert failed:{} ", e.getMessage());
}
}

16
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java

@ -17,12 +17,11 @@
package org.apache.dolphinscheduler.server.utils;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.apache.commons.lang.StringUtils;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
@ -32,12 +31,7 @@ import java.util.List;
* spark args utils
*/
public class FlinkArgsUtils {
/**
* logger of FlinkArgsUtils
*/
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(FlinkArgsUtils.class);
private static final String LOCAL_DEPLOY_MODE = "local";
/**
* build args
* @param param flink parameters
@ -52,7 +46,7 @@ public class FlinkArgsUtils {
deployMode = tmpDeployMode;
}
if (!"local".equals(deployMode)) {
if (!LOCAL_DEPLOY_MODE.equals(deployMode)) {
args.add(Constants.FLINK_RUN_MODE); //-m
args.add(Constants.FLINK_YARN_CLUSTER); //yarn-cluster
@ -113,12 +107,12 @@ public class FlinkArgsUtils {
String queue = param.getQueue();
if (StringUtils.isNotEmpty(others)) {
if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals("local")) {
if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) {
args.add(Constants.FLINK_QUEUE);
args.add(param.getQueue());
}
args.add(others);
} else if (StringUtils.isNotEmpty(queue) && !deployMode.equals("local")) {
} else if (StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) {
args.add(Constants.FLINK_QUEUE);
args.add(param.getQueue());
}

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java

@ -296,9 +296,7 @@ public class TaskScheduleThread implements Runnable {
if (baseParam != null) {
List<String> projectResourceFiles = baseParam.getResourceFilesList();
if (projectResourceFiles != null) {
projectFiles.addAll(projectResourceFiles);
}
projectFiles.addAll(projectResourceFiles);
}
return new ArrayList<>(projectFiles);

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save