Browse Source

Merge remote-tracking branch 'upstream/dev' into dev

pull/3/MERGE
dailidong 4 years ago
parent
commit
27139913c0
  1. 4
      .github/workflows/ci_e2e.yml
  2. 27
      .github/workflows/ci_ut.yml
  3. 1
      .gitignore
  4. 2
      CONTRIBUTING.md
  5. 2
      docker/build/Dockerfile
  6. 4
      docker/build/README.md
  7. 4
      docker/build/README_zh_CN.md
  8. 5
      docker/build/conf/dolphinscheduler/worker.properties.tpl
  9. 2
      docker/build/conf/zookeeper/zoo.cfg
  10. 1
      docker/build/startup-init-conf.sh
  11. 1
      docker/docker-swarm/docker-compose.yml
  12. 1
      docker/docker-swarm/docker-stack.yml
  13. 1
      docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml
  14. 6
      docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml
  15. 6
      docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml
  16. 11
      docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml
  17. 1
      docker/kubernetes/dolphinscheduler/values.yaml
  18. 89
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java
  19. 7
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java
  20. 233
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java
  21. 4
      dolphinscheduler-api/pom.xml
  22. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java
  23. 30
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java
  24. 10
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java
  25. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java
  26. 218
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  27. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java
  28. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java
  29. 36
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java
  30. 56
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  31. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java
  32. 3
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilter.java
  33. 331
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  34. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java
  35. 14
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java
  36. 124
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java
  37. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java
  38. 54
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java
  39. 17
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java
  40. 317
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java
  41. 92
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  42. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
  43. 93
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java
  44. 8
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java
  45. 1359
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  46. 70
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java
  47. 175
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  48. 326
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java
  49. 8
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java
  50. 17
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
  51. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  52. 110
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java
  53. 55
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java
  54. 273
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java
  55. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java
  56. 121
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java
  57. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java
  58. 186
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java
  59. 384
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java
  60. 146
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java
  61. 1731
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  62. 181
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java
  63. 443
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java
  64. 158
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java
  65. 331
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java
  66. 10
      dolphinscheduler-api/src/main/resources/i18n/messages.properties
  67. 10
      dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
  68. 11
      dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
  69. 258
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java
  70. 83
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java
  71. 23
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java
  72. 40
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java
  73. 3
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilterTest.java
  74. 105
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java
  75. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java
  76. 50
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java
  77. 70
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java
  78. 218
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java
  79. 63
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java
  80. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java
  81. 44
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java
  82. 910
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  83. 274
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java
  84. 115
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java
  85. 203
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java
  86. 32
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java
  87. 11
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java
  88. 58
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java
  89. 131
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java
  90. 100
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
  91. 5
      dolphinscheduler-common/pom.xml
  92. 15
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  93. 23
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertEvent.java
  94. 23
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertWarnLevel.java
  95. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java
  96. 51
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java
  97. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java
  98. 50
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java
  99. 26
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java
  100. 65
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java
  101. Some files were not shown because too many files have changed in this diff Show More

4
.github/workflows/ci_e2e.yml

@ -58,7 +58,9 @@ jobs:
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
sudo dpkg -i google-chrome*.deb
sudo apt-get install -f -y
wget -N https://chromedriver.storage.googleapis.com/83.0.4103.39/chromedriver_linux64.zip
google-chrome -version
googleVersion=`google-chrome -version | awk '{print $3}'`
wget -N https://chromedriver.storage.googleapis.com/${googleVersion}/chromedriver_linux64.zip
unzip chromedriver_linux64.zip
sudo mv -f chromedriver /usr/local/share/chromedriver
sudo ln -s /usr/local/share/chromedriver /usr/local/bin/chromedriver

27
.github/workflows/ci_ut.yml

@ -91,3 +91,30 @@ jobs:
mkdir -p ${LOG_DIR}
docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml logs dolphinscheduler-postgresql > ${LOG_DIR}/db.txt
continue-on-error: true
Checkstyle:
name: Check code style
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: check code style
env:
WORKDIR: ./
REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CHECKSTYLE_CONFIG: style/checkstyle.xml
REVIEWDOG_VERSION: v0.10.2
run: |
wget -O - -q https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.22/checkstyle-8.22-all.jar > /opt/checkstyle.jar
wget -O - -q https://raw.githubusercontent.com/reviewdog/reviewdog/master/install.sh | sh -s -- -b /opt ${REVIEWDOG_VERSION}
java -jar /opt/checkstyle.jar "${WORKDIR}" -c "${CHECKSTYLE_CONFIG}" -f xml \
| /opt/reviewdog -f=checkstyle \
-reporter="${INPUT_REPORTER:-github-pr-check}" \
-filter-mode="${INPUT_FILTER_MODE:-added}" \
-fail-on-error="${INPUT_FAIL_ON_ERROR:-false}"

1
.gitignore vendored

@ -19,6 +19,7 @@ third-party-dependencies.txt
*.iws
*.tgz
.*.swp
.factorypath
.vim
.tmp
**/node_modules

2
CONTRIBUTING.md

@ -56,7 +56,7 @@ If remote branch has a new branch `DEV-1.0`, you need to synchronize this branch
```
git checkout -b dev-1.0 upstream/dev-1.0
git push --set-upstream origin dev1.0
git push --set-upstream origin dev-1.0
```
## Create your feature branch

2
docker/build/Dockerfile

@ -27,7 +27,7 @@ ENV DEBIAN_FRONTEND noninteractive
#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
RUN apk update && \
apk add dos2unix shadow bash openrc python python3 sudo vim wget iputils net-tools openssh-server py2-pip tini && \
apk --update add --no-cache dos2unix shadow bash openrc python2 python3 sudo vim wget iputils net-tools openssh-server py-pip tini && \
apk add --update procps && \
openrc boot && \
pip install kazoo

4
docker/build/README.md

@ -238,6 +238,10 @@ This environment variable sets max cpu load avg for `worker-server`. The default
This environment variable sets reserved memory for `worker-server`. The default value is `0.1`.
**`WORKER_WEIGHT`**
This environment variable sets port for `worker-server`. The default value is `100`.
**`WORKER_LISTEN_PORT`**
This environment variable sets port for `worker-server`. The default value is `1234`.

4
docker/build/README_zh_CN.md

@ -238,6 +238,10 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些
配置`worker-server`的保留内存,默认值 `0.1`
**`WORKER_WEIGHT`**
配置`worker-server`的权重,默认之`100`。
**`WORKER_LISTEN_PORT`**
配置`worker-server`的端口,默认值 `1234`

5
docker/build/conf/dolphinscheduler/worker.properties.tpl

@ -34,4 +34,7 @@ worker.reserved.memory=${WORKER_RESERVED_MEMORY}
#worker.listen.port=${WORKER_LISTEN_PORT}
# default worker group
#worker.group=${WORKER_GROUP}
#worker.groups=${WORKER_GROUP}
# default worker weight
#worker.weight=${WORKER_WEIGHT}

2
docker/build/conf/zookeeper/zoo.cfg

@ -43,3 +43,5 @@ clientPort=2181
# Purge task interval in hours
# Set to "0" to disable auto purge feature
#autopurge.purgeInterval=1
#Four Letter Words commands:stat,ruok,conf,isro
4lw.commands.whitelist=*

1
docker/build/startup-init-conf.sh

@ -74,6 +74,7 @@ export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"}
export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"}
export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"}
export WORKER_GROUP=${WORKER_GROUP:-"default"}
export WORKER_WEIGHT=${WORKER_WEIGHT:-"100"}
#============================================================================
# Alert Server

1
docker/docker-swarm/docker-compose.yml

@ -187,6 +187,7 @@ services:
WORKER_MAX_CPULOAD_AVG: "100"
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUP: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432

1
docker/docker-swarm/docker-stack.yml

@ -187,6 +187,7 @@ services:
WORKER_MAX_CPULOAD_AVG: "100"
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUP: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432

1
docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml

@ -31,6 +31,7 @@ data:
WORKER_RESERVED_MEMORY: {{ .Values.worker.configmap.WORKER_RESERVED_MEMORY | quote }}
WORKER_LISTEN_PORT: {{ .Values.worker.configmap.WORKER_LISTEN_PORT | quote }}
WORKER_GROUP: {{ .Values.worker.configmap.WORKER_GROUP | quote }}
WORKER_WEIGHT: {{ .Values.worker.configmap.WORKER_WEIGHT | quote }}
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: {{ include "dolphinscheduler.worker.base.dir" . | quote }}
dolphinscheduler_env.sh: |-
{{- range .Values.worker.configmap.DOLPHINSCHEDULER_ENV }}

6
docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml

@ -162,6 +162,12 @@ spec:
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperQuorum }}
{{- end }}
- name: ZOOKEEPER_ROOT
{{- if .Values.zookeeper.enabled }}
value: "/dolphinscheduler"
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperRoot }}
{{- end }}
- name: RESOURCE_STORAGE_TYPE
valueFrom:
configMapKeyRef:

6
docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml

@ -228,6 +228,12 @@ spec:
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperQuorum }}
{{- end }}
- name: ZOOKEEPER_ROOT
{{- if .Values.zookeeper.enabled }}
value: "/dolphinscheduler"
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperRoot }}
{{- end }}
- name: RESOURCE_STORAGE_TYPE
valueFrom:
configMapKeyRef:

11
docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml

@ -162,6 +162,11 @@ spec:
configMapKeyRef:
name: {{ include "dolphinscheduler.fullname" . }}-worker
key: WORKER_GROUP
- name: WORKER_WEUGHT
valueFrom:
configMapKeyRef:
name: {{ include "dolphinscheduler.fullname" . }}-worker
key: WORKER_WEIGHT
- name: DOLPHINSCHEDULER_DATA_BASEDIR_PATH
valueFrom:
configMapKeyRef:
@ -225,6 +230,12 @@ spec:
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperQuorum }}
{{- end }}
- name: ZOOKEEPER_ROOT
{{- if .Values.zookeeper.enabled }}
value: "/dolphinscheduler"
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperRoot }}
{{- end }}
- name: RESOURCE_STORAGE_TYPE
valueFrom:
configMapKeyRef:

1
docker/kubernetes/dolphinscheduler/values.yaml

@ -201,6 +201,7 @@ worker:
WORKER_RESERVED_MEMORY: "0.1"
WORKER_LISTEN_PORT: "1234"
WORKER_GROUP: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
DOLPHINSCHEDULER_ENV:
- "export HADOOP_HOME=/opt/soft/hadoop"

89
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java

@ -14,13 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.alert.utils;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.plugin.model.AlertData;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
@ -29,11 +30,17 @@ import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Enterprise WeChat utils
@ -41,25 +48,21 @@ import java.util.*;
public class EnterpriseWeChatUtils {
public static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatUtils.class);
public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID);
public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS);
private static final String ENTERPRISE_WE_CHAT_CORP_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_CORP_ID);
private static final String ENTERPRISE_WE_CHAT_SECRET = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET);
private static final String ENTERPRISE_WE_CHAT_TOKEN_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL);
private static final String ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE = ENTERPRISE_WE_CHAT_TOKEN_URL == null ? null : ENTERPRISE_WE_CHAT_TOKEN_URL
.replaceAll("\\{corpId\\}", ENTERPRISE_WE_CHAT_CORP_ID)
.replaceAll("\\{secret\\}", ENTERPRISE_WE_CHAT_SECRET);
.replaceAll("\\{corpId}", ENTERPRISE_WE_CHAT_CORP_ID)
.replaceAll("\\{secret}", ENTERPRISE_WE_CHAT_SECRET);
private static final String ENTERPRISE_WE_CHAT_PUSH_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_PUSH_URL);
private static final String ENTERPRISE_WE_CHAT_TEAM_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG);
private static final String ENTERPRISE_WE_CHAT_USER_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG);
public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID);
public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS);
private static final String agentIdRegExp = "\\{agentId}";
private static final String msgRegExp = "\\{msg}";
private static final String userRegExp = "\\{toUser}";
/**
* get Enterprise WeChat is enable
@ -120,9 +123,9 @@ public class EnterpriseWeChatUtils {
* @return Enterprise WeChat send message
*/
public static String makeTeamSendMsg(String toParty, String agentId, String msg) {
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty\\}", toParty)
.replaceAll("\\{agentId\\}", agentId)
.replaceAll("\\{msg\\}", msg);
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty}", toParty)
.replaceAll(agentIdRegExp, agentId)
.replaceAll(msgRegExp, msg);
}
/**
@ -135,9 +138,9 @@ public class EnterpriseWeChatUtils {
*/
public static String makeTeamSendMsg(Collection<String> toParty, String agentId, String msg) {
String listParty = FuncUtils.mkString(toParty, "|");
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty\\}", listParty)
.replaceAll("\\{agentId\\}", agentId)
.replaceAll("\\{msg\\}", msg);
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty}", listParty)
.replaceAll(agentIdRegExp, agentId)
.replaceAll(msgRegExp, msg);
}
/**
@ -149,9 +152,9 @@ public class EnterpriseWeChatUtils {
* @return Enterprise WeChat send message
*/
public static String makeUserSendMsg(String toUser, String agentId, String msg) {
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser\\}", toUser)
.replaceAll("\\{agentId\\}", agentId)
.replaceAll("\\{msg\\}", msg);
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser}", toUser)
.replaceAll(agentIdRegExp, agentId)
.replaceAll(msgRegExp, msg);
}
/**
@ -164,9 +167,9 @@ public class EnterpriseWeChatUtils {
*/
public static String makeUserSendMsg(Collection<String> toUser, String agentId, String msg) {
String listUser = FuncUtils.mkString(toUser, "|");
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser\\}", listUser)
.replaceAll("\\{agentId\\}", agentId)
.replaceAll("\\{msg\\}", msg);
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll(userRegExp, listUser)
.replaceAll(agentIdRegExp, agentId)
.replaceAll(msgRegExp, msg);
}
/**
@ -179,7 +182,7 @@ public class EnterpriseWeChatUtils {
* @throws IOException the IOException
*/
public static String sendEnterpriseWeChat(String charset, String data, String token) throws IOException {
String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\{token\\}", token);
String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\{token}", token);
CloseableHttpClient httpClient = HttpClients.createDefault();
try {
@ -215,13 +218,13 @@ public class EnterpriseWeChatUtils {
if (null != mapItemsList) {
for (LinkedHashMap mapItems : mapItemsList) {
Set<Map.Entry<String, String>> entries = mapItems.entrySet();
Iterator<Map.Entry<String, String>> iterator = entries.iterator();
Set<Map.Entry<String, Object>> entries = mapItems.entrySet();
Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
StringBuilder t = new StringBuilder(String.format("`%s`%s", title, Constants.MARKDOWN_ENTER));
while (iterator.hasNext()) {
Map.Entry<String, String> entry = iterator.next();
Map.Entry<String, Object> entry = iterator.next();
t.append(Constants.MARKDOWN_QUOTE);
t.append(entry.getKey()).append(":").append(entry.getValue());
t.append(Constants.MARKDOWN_ENTER);
@ -241,23 +244,24 @@ public class EnterpriseWeChatUtils {
*/
public static String markdownText(String title, String content) {
if (StringUtils.isNotEmpty(content)) {
List<String> list;
try {
list = JSONUtils.toList(content, String.class);
} catch (Exception e) {
logger.error("json format exception", e);
return null;
}
List<LinkedHashMap> mapItemsList = JSONUtils.toList(content, LinkedHashMap.class);
if (null != mapItemsList) {
StringBuilder contents = new StringBuilder(100);
contents.append(String.format("`%s`%n", title));
for (String str : list) {
for (LinkedHashMap mapItems : mapItemsList) {
Set<Map.Entry<String, Object>> entries = mapItems.entrySet();
Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
while (iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
contents.append(Constants.MARKDOWN_QUOTE);
contents.append(str);
contents.append(entry.getKey()).append(":").append(entry.getValue());
contents.append(Constants.MARKDOWN_ENTER);
}
}
return contents.toString();
}
}
return null;
@ -278,4 +282,5 @@ public class EnterpriseWeChatUtils {
return result;
}
}

7
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java

@ -59,7 +59,7 @@ public class MailUtils {
public static final String STARTTLS_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final String SSL_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE);
public static final Boolean SSL_ENABLE = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String SSL_TRUST = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST);
@ -213,6 +213,7 @@ public class MailUtils {
/**
* get session
*
* @return the new Session
*/
private static Session getSession() {
@ -222,8 +223,10 @@ public class MailUtils {
props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE);
props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, MAIL_PROTOCOL);
props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, STARTTLS_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, SSL_ENABLE);
if (SSL_ENABLE) {
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, "true");
props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, SSL_TRUST);
}
Authenticator auth = new Authenticator() {
@Override

233
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java

@ -14,26 +14,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.alert.utils;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.plugin.model.AlertData;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.io.IOException;
import java.util.*;
import org.apache.dolphinscheduler.common.utils.*;
/**
* Please manually modify the configuration file before testing.
* file: alert.properties
@ -52,14 +54,18 @@ public class EnterpriseWeChatUtilsTest {
private static final String toParty = "wwc99134b6fc1edb6";
private static final String enterpriseWechatSecret = "Uuv2KFrkdf7SeKOsTDCpsTkpawXBMNRhFy6VKX5FV";
private static final String enterpriseWechatAgentId = "1000004";
private static final String enterpriseWechatUsers="LiGang,journey";
private static final String enterpriseWechatUsers = "LiGang,journey";
private static final String msg = "hello world";
private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"{toParty}\\\",\\\"agentid\\\":\\\"{agentId}\\\",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"{msg}\\\"},\\\"safe\\\":\\\"0\\\"}";
private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"{toUser}\\\",\\\"agentid\\\":\\\"{agentId}\\\",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"{msg}\\\"}}";
private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"{toParty}\\\",\\\"agentid\\\":\\\"{agentId}\\\""
+
",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"{msg}\\\"},\\\"safe\\\":\\\"0\\\"}";
private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"{toUser}\\\",\\\"agentid\\\":\\\"{agentId}\\\""
+
",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"{msg}\\\"}}";
@Before
public void init(){
public void init() {
PowerMockito.mockStatic(PropertyUtils.class);
Mockito.when(PropertyUtils.getBoolean(Constants.ENTERPRISE_WECHAT_ENABLE)).thenReturn(true);
Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG)).thenReturn(enterpriseWechatUserSendMsg);
@ -67,14 +73,13 @@ public class EnterpriseWeChatUtilsTest {
}
@Test
public void testIsEnable(){
public void testIsEnable() {
Boolean weChartEnable = EnterpriseWeChatUtils.isEnable();
Assert.assertTrue(weChartEnable);
}
@Test
public void testMakeTeamSendMsg1(){
public void testMakeTeamSendMsg1() {
String sendMsg = EnterpriseWeChatUtils.makeTeamSendMsg(toParty, enterpriseWechatSecret, msg);
Assert.assertTrue(sendMsg.contains(toParty));
Assert.assertTrue(sendMsg.contains(enterpriseWechatSecret));
@ -82,9 +87,8 @@ public class EnterpriseWeChatUtilsTest {
}
@Test
public void testMakeTeamSendMsg2(){
public void testMakeTeamSendMsg2() {
List<String> parties = new ArrayList<>();
parties.add(toParty);
parties.add("test1");
@ -96,7 +100,7 @@ public class EnterpriseWeChatUtilsTest {
}
@Test
public void tesMakeUserSendMsg1(){
public void tesMakeUserSendMsg1() {
String sendMsg = EnterpriseWeChatUtils.makeUserSendMsg(enterpriseWechatUsers, enterpriseWechatAgentId, msg);
Assert.assertTrue(sendMsg.contains(enterpriseWechatUsers));
@ -105,7 +109,7 @@ public class EnterpriseWeChatUtilsTest {
}
@Test
public void tesMakeUserSendMsg2(){
public void tesMakeUserSendMsg2() {
List<String> users = new ArrayList<>();
users.add("user1");
users.add("user2");
@ -118,7 +122,7 @@ public class EnterpriseWeChatUtilsTest {
}
@Test
public void testMarkdownByAlertForText(){
public void testMarkdownByAlertForText() {
Alert alertForText = createAlertForText();
AlertData alertData = new AlertData();
alertData.setTitle(alertForText.getTitle())
@ -129,7 +133,7 @@ public class EnterpriseWeChatUtilsTest {
}
@Test
public void testMarkdownByAlertForTable(){
public void testMarkdownByAlertForTable() {
Alert alertForText = createAlertForTable();
AlertData alertData = new AlertData();
alertData.setTitle(alertForText.getTitle())
@ -139,17 +143,26 @@ public class EnterpriseWeChatUtilsTest {
Assert.assertNotNull(result);
}
private Alert createAlertForText(){
String content ="[\"id:69\"," +
"\"name:UserBehavior-0--1193959466\"," +
"\"Job name: Start workflow\"," +
"\"State: SUCCESS\"," +
"\"Recovery:NO\"," +
"\"Run time: 1\"," +
"\"Start time: 2018-08-06 10:31:34.0\"," +
"\"End time: 2018-08-06 10:31:49.0\"," +
"\"Host: 192.168.xx.xx\"," +
"\"Notify group :4\"]";
private Alert createAlertForText() {
String content = "[{\"id\":\"69\","
+
"\"name\":\"UserBehavior-0--1193959466\","
+
"\"Job name\":\"Start workflow\","
+
"\"State\":\"SUCCESS\","
+
"\"Recovery\":\"NO\","
+
"\"Run time\":\"1\","
+
"\"Start time\": \"2018-08-06 10:31:34.0\","
+
"\"End time\": \"2018-08-06 10:31:49.0\","
+
"\"Host\": \"192.168.xx.xx\","
+
"\"Notify group\" :\"4\"}]";
Alert alert = new Alert();
alert.setTitle("Mysql Exception");
@ -161,18 +174,18 @@ public class EnterpriseWeChatUtilsTest {
return alert;
}
private String list2String(){
private String list2String() {
LinkedHashMap<String, Object> map1 = new LinkedHashMap<>();
map1.put("mysql service name","mysql200");
map1.put("mysql address","192.168.xx.xx");
map1.put("port","3306");
map1.put("no index of number","80");
map1.put("database client connections","190");
map1.put("mysql service name", "mysql200");
map1.put("mysql address", "192.168.xx.xx");
map1.put("port", "3306");
map1.put("no index of number", "80");
map1.put("database client connections", "190");
LinkedHashMap<String, Object> map2 = new LinkedHashMap<>();
map2.put("mysql service name","mysql210");
map2.put("mysql address","192.168.xx.xx");
map2.put("mysql service name", "mysql210");
map2.put("mysql address", "192.168.xx.xx");
map2.put("port", "3306");
map2.put("no index of number", "10");
map2.put("database client connections", "90");
@ -184,11 +197,11 @@ public class EnterpriseWeChatUtilsTest {
return mapjson;
}
private Alert createAlertForTable(){
private Alert createAlertForTable() {
Alert alert = new Alert();
alert.setTitle("Mysql Exception");
alert.setShowType(ShowType.TABLE);
String content= list2String();
String content = list2String();
alert.setContent(content);
alert.setAlertType(AlertType.EMAIL);
alert.setAlertGroupId(1);
@ -196,77 +209,75 @@ public class EnterpriseWeChatUtilsTest {
}
// @Test
// public void testSendSingleTeamWeChat() {
// try {
// String token = EnterpriseWeChatUtils.getToken();
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// @Test
// public void testSendMultiTeamWeChat() {
//
// try {
// String token = EnterpriseWeChatUtils.getToken();
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// @Test
// public void testSendSingleUserWeChat() {
// try {
// String token = EnterpriseWeChatUtils.getToken();
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" +
// ">**matter details** \n" +
// ">matter:<font color='info'>meeting</font> <br>" +
// ">organizer:@miglioguan \n" +
// ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" +
// "> \n" +
// ">meeting room:<font color='info'>Guangzhou TIT 1st Floor 301</font> \n" +
// ">date:<font color='warning'>May 18, 2018</font> \n" +
// ">time:<font color='comment'>9:00-11:00 am</font> \n" +
// "> \n" +
// ">please attend the meeting on time\n" +
// "> \n" +
// ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\"");
//
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// @Test
// public void testSendMultiUserWeChat() {
// try {
// String token = EnterpriseWeChatUtils.getToken();
//
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// @Test
// public void testSendSingleTeamWeChat() {
// try {
// String token = EnterpriseWeChatUtils.getToken();
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// @Test
// public void testSendMultiTeamWeChat() {
//
// try {
// String token = EnterpriseWeChatUtils.getToken();
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// @Test
// public void testSendSingleUserWeChat() {
// try {
// String token = EnterpriseWeChatUtils.getToken();
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" +
// ">**matter details** \n" +
// ">matter:<font color='info'>meeting</font> <br>" +
// ">organizer:@miglioguan \n" +
// ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" +
// "> \n" +
// ">meeting room:<font color='info'>Guangzhou TIT 1st Floor 301</font> \n" +
// ">date:<font color='warning'>May 18, 2018</font> \n" +
// ">time:<font color='comment'>9:00-11:00 am</font> \n" +
// "> \n" +
// ">please attend the meeting on time\n" +
// "> \n" +
// ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\"");
//
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// @Test
// public void testSendMultiUserWeChat() {
// try {
// String token = EnterpriseWeChatUtils.getToken();
//
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
}

4
dolphinscheduler-api/pom.xml

@ -152,6 +152,10 @@
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
</exclusion>
</exclusions>
</dependency>

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java

@ -220,11 +220,7 @@ public class ServiceModelToSwagger2MapperImpl extends ServiceModelToSwagger2Mapp
if (resourceListing == null) {
return null;
}
ApiInfo info = resourceListing.getInfo();
if (info == null) {
return null;
}
return info;
return resourceListing.getInfo();
}
protected List<Tag> tagSetToTagList(Set<springfox.documentation.service.Tag> set) {

30
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java

@ -17,6 +17,12 @@
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GENERATE_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ACCESSTOKEN_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ACCESS_TOKEN_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AccessTokenService;
@ -24,20 +30,26 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
* access token controller

10
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java

@ -100,7 +100,7 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "other") String other) {
logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {}, port: {}, database : {}, principal: {}, userName : {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, host, port, database, principal, userName, connectType, other);
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other);
Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter);
return returnDataList(result);
}
@ -155,7 +155,7 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "other") String other) {
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, connectType, other);
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other);
Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter);
return returnDataList(dataSource);
}
@ -280,7 +280,7 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "other") String other) {
logger.info("login user {}, connect datasource: {}, note: {}, type: {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, connectType, other);
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other);
Boolean isConnection = dataSourceService.checkConnection(type, parameter);
Result result = new Result();
@ -310,7 +310,7 @@ public class DataSourceController extends BaseController {
@RequestParam("id") int id) {
logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id);
Boolean isConnection = dataSourceService.connectionTest(loginUser, id);
Boolean isConnection = dataSourceService.connectionTest(id);
Result result = new Result();
if (isConnection) {
@ -361,7 +361,7 @@ public class DataSourceController extends BaseController {
logger.info("login user {}, verfiy datasource name: {}",
loginUser.getUserName(), name);
return dataSourceService.verifyDataSourceName(loginUser, name);
return dataSourceService.verifyDataSourceName(name);
}

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java

@ -17,25 +17,34 @@
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
@ -70,7 +79,7 @@ public class LoggerController extends BaseController {
@GetMapping(value = "/detail")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_INSTANCE_LOG_ERROR)
public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
public Result<String> queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskInstanceId") int taskInstanceId,
@RequestParam(value = "skipLineNum") int skipNum,
@RequestParam(value = "limit") int limit) {

218
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -14,32 +14,65 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import com.fasterxml.jackson.core.JsonProcessingException;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_COPY_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_MOVE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_DEFINITION;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_VERSIONS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/**
@ -55,6 +88,9 @@ public class ProcessDefinitionController extends BaseController {
@Autowired
private ProcessDefinitionService processDefinitionService;
@Autowired
private ProcessDefinitionVersionService processDefinitionVersionService;
/**
* create process definition
*
@ -86,8 +122,8 @@ public class ProcessDefinitionController extends BaseController {
@RequestParam(value = "connects", required = true) String connects,
@RequestParam(value = "description", required = false) String description) throws JsonProcessingException {
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " +
"process_definition_json: {}, desc: {} locations:{}, connects:{}",
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, "
+ "process_definition_json: {}, desc: {} locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, json, description, locations, connects);
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json,
description, locations, connects);
@ -99,23 +135,61 @@ public class ProcessDefinitionController extends BaseController {
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return copy result code
*/
@ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "copyProcessDefinition", notes = "COPY_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
})
@PostMapping(value = "/copy")
@ResponseStatus(HttpStatus.OK)
@ApiException(COPY_PROCESS_DEFINITION_ERROR)
@ApiException(BATCH_COPY_PROCESS_DEFINITION_ERROR)
public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId) throws JsonProcessingException {
logger.info("copy process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
Map<String, Object> result = processDefinitionService.copyProcessDefinition(loginUser, projectName, processId);
return returnDataList(result);
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
}
/**
* move process definition
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return move result code
*/
@ApiOperation(value = "moveProcessDefinition", notes = "MOVE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
})
@PostMapping(value = "/move")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_MOVE_PROCESS_DEFINITION_ERROR)
public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
}
/**
@ -156,7 +230,7 @@ public class ProcessDefinitionController extends BaseController {
* @return update result code
*/
@ApiOperation(value = "updateProcessDefinition", notes= "UPDATE_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "updateProcessDefinition", notes = "UPDATE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ -177,14 +251,97 @@ public class ProcessDefinitionController extends BaseController {
@RequestParam(value = "connects", required = false) String connects,
@RequestParam(value = "description", required = false) String description) {
logger.info("login user {}, update process define, project name: {}, process define name: {}, " +
"process_definition_json: {}, desc: {}, locations:{}, connects:{}",
logger.info("login user {}, update process define, project name: {}, process define name: {}, "
+ "process_definition_json: {}, desc: {}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects);
Map<String, Object> result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name,
processDefinitionJson, description, locations, connects);
return returnDataList(result);
}
/**
* query process definition version paging list info
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param pageNo the process definition version list current page number
* @param pageSize the process definition version list page size
* @param processDefinitionId the process definition id
* @return the process definition version list
*/
@ApiOperation(value = "queryProcessDefinitionVersions", notes = "QUERY_PROCESS_DEFINITION_VERSIONS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/versions")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_VERSIONS_ERROR)
public Result queryProcessDefinitionVersions(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "pageNo") int pageNo,
@RequestParam(value = "pageSize") int pageSize,
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
Map<String, Object> result = processDefinitionVersionService.queryProcessDefinitionVersions(loginUser
, projectName, pageNo, pageSize, processDefinitionId);
return returnDataList(result);
}
/**
* switch certain process definition version
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param processDefinitionId the process definition id
* @param version the version user want to switch
* @return switch version result code
*/
@ApiOperation(value = "switchProcessDefinitionVersion", notes = "SWITCH_PROCESS_DEFINITION_VERSION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
})
@GetMapping(value = "/version/switch")
@ResponseStatus(HttpStatus.OK)
@ApiException(SWITCH_PROCESS_DEFINITION_VERSION_ERROR)
public Result switchProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "version") long version) {
Map<String, Object> result = processDefinitionService.switchProcessDefinitionVersion(loginUser, projectName
, processDefinitionId, version);
return returnDataList(result);
}
/**
* delete the certain process definition version by version and process definition id
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param processDefinitionId process definition id
* @param version the process definition version user want to delete
* @return delete version result code
*/
@ApiOperation(value = "deleteProcessDefinitionVersion", notes = "DELETE_PROCESS_DEFINITION_VERSION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
})
@GetMapping(value = "/version/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR)
public Result deleteProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "version") long version) {
Map<String, Object> result = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(loginUser, projectName, processDefinitionId, version);
return returnDataList(result);
}
/**
* release process definition
*
@ -194,8 +351,7 @@ public class ProcessDefinitionController extends BaseController {
* @param releaseState release state
* @return release result code
*/
@ApiOperation(value = "releaseProcessDefinition", notes= "RELEASE_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "releaseProcessDefinition", notes = "RELEASE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ -223,7 +379,7 @@ public class ProcessDefinitionController extends BaseController {
* @param processId process definition id
* @return process definition detail
*/
@ApiOperation(value = "queryProcessDefinitionById", notes= "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiOperation(value = "queryProcessDefinitionById", notes = "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@ -271,7 +427,7 @@ public class ProcessDefinitionController extends BaseController {
* @param userId user id
* @return process definition page
*/
@ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiOperation(value = "queryProcessDefinitionListPaging", notes = "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"),
@ -365,7 +521,7 @@ public class ProcessDefinitionController extends BaseController {
public Result getNodeListByDefinitionIdList(
@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIdList") String processDefinitionIdList) throws Exception {
@RequestParam("processDefinitionIdList") String processDefinitionIdList) {
logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}",
loginUser.getUserName(), projectName, processDefinitionIdList);
@ -420,7 +576,7 @@ public class ProcessDefinitionController extends BaseController {
logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processDefinitionIds)) {
String[] processDefinitionIdArray = processDefinitionIds.split(",");
@ -457,7 +613,7 @@ public class ProcessDefinitionController extends BaseController {
* @param response response
*/
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes= "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes = "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
})

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java

@ -370,7 +370,7 @@ public class ProcessInstanceController extends BaseController {
logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}",
loginUser.getUserName(), projectName, processInstanceIds);
// task queue
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processInstanceIds)) {
String[] processInstanceIdArray = processInstanceIds.split(",");

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java

@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
@ -226,6 +227,25 @@ public class ProjectController extends BaseController {
return returnDataList(result);
}
/**
* query user created project
*
* @param loginUser login user
* @return projects which the user create
*/
@ApiOperation(value = "queryProjectCreatedByUser", notes = "QUERY_USER_CREATED_PROJECT_NOTES")
@GetMapping(value = "/login-user-created-project")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_USER_CREATED_PROJECT_ERROR)
public Result queryProjectCreatedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query authorized project by user id: {}.",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(String.valueOf(loginUser.getId())));
Map<String, Object> result = projectService.queryProjectCreatedByUser(loginUser);
return returnDataList(result);
}
/**
* import process definition
*

36
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java

@ -14,8 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_TENANT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_TENANT_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TENANT_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TENANT_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TENANT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_TENANT_CODE_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
@ -24,20 +31,26 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
@ -195,7 +208,6 @@ public class TenantController extends BaseController {
return returnDataList(result);
}
/**
* verify tenant code
*
@ -211,12 +223,10 @@ public class TenantController extends BaseController {
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_TENANT_CODE_ERROR)
public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "tenantCode") String tenantCode
) {
@RequestParam(value = "tenantCode") String tenantCode) {
logger.info("login user {}, verfiy tenant code: {}",
loginUser.getUserName(), tenantCode);
return tenantService.verifyTenantCode(tenantCode);
}
}

56
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -35,10 +35,12 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* user controller
@ -432,14 +434,54 @@ public class UsersController extends BaseController {
@RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "repeatPassword") String repeatPassword,
@RequestParam(value = "email") String email) throws Exception {
userName = userName.replaceAll("[\n|\r|\t]", "");
userPassword = userPassword.replaceAll("[\n|\r|\t]", "");
repeatPassword = repeatPassword.replaceAll("[\n|\r|\t]", "");
email = email.replaceAll("[\n|\r|\t]", "");
userName = ParameterUtils.handleEscapes(userName);
userPassword = ParameterUtils.handleEscapes(userPassword);
repeatPassword = ParameterUtils.handleEscapes(repeatPassword);
email = ParameterUtils.handleEscapes(email);
logger.info("user self-register, userName: {}, userPassword {}, repeatPassword {}, eamil {}",
userName, userPassword, repeatPassword, email);
userName, Constants.PASSWORD_DEFAULT, Constants.PASSWORD_DEFAULT, email);
Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email);
return returnDataList(result);
}
/**
* user activate
*
* @param userName user name
*/
@ApiOperation(value="activateUser",notes = "ACTIVATE_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"),
})
@PostMapping("/activate")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_USER_ERROR)
public Result<Object> activateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userName") String userName) {
userName = ParameterUtils.handleEscapes(userName);
logger.info("login user {}, activate user, userName: {}",
loginUser.getUserName(), userName);
Map<String, Object> result = usersService.activateUser(loginUser, userName);
return returnDataList(result);
}
/**
* user batch activate
*
* @param userNames user names
*/
@ApiOperation(value = "batchActivateUser",notes = "BATCH_ACTIVATE_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userNames", value = "USER_NAMES", type = "String"),
})
@PostMapping("/batch/activate")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_USER_ERROR)
public Result<Object> batchActivateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestBody List<String> userNames) {
List<String> formatUserNames = userNames.stream().map(ParameterUtils::handleEscapes).collect(Collectors.toList());
logger.info(" activate userNames: {}", formatUserNames);
Map<String, Object> result = usersService.batchActivateUser(loginUser, formatUserNames);
return returnDataList(result);
}
}

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java

@ -42,9 +42,10 @@ public class TaskCountDto {
countTaskDtos(taskInstanceStateCounts);
}
private void countTaskDtos(List<ExecuteStatusCount> taskInstanceStateCounts){
private void countTaskDtos(List<ExecuteStatusCount> taskInstanceStateCounts) {
int submittedSuccess = 0;
int runningExeution = 0;
int runningExecution = 0;
int delayExecution = 0;
int readyPause = 0;
int pause = 0;
int readyStop = 0;
@ -55,15 +56,18 @@ public class TaskCountDto {
int kill = 0;
int waittingThread = 0;
for(ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts){
for (ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts) {
ExecutionStatus status = taskInstanceStateCount.getExecutionStatus();
totalCount += taskInstanceStateCount.getCount();
switch (status){
switch (status) {
case SUBMITTED_SUCCESS:
submittedSuccess += taskInstanceStateCount.getCount();
break;
case RUNNING_EXECUTION:
runningExeution += taskInstanceStateCount.getCount();
runningExecution += taskInstanceStateCount.getCount();
break;
case DELAY_EXECUTION:
delayExecution += taskInstanceStateCount.getCount();
break;
case READY_PAUSE:
readyPause += taskInstanceStateCount.getCount();
@ -99,7 +103,8 @@ public class TaskCountDto {
}
this.taskCountDtos = new ArrayList<>();
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submittedSuccess));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXECUTION, runningExeution));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXECUTION, runningExecution));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.DELAY_EXECUTION, delayExecution));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, readyPause));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, readyStop));
@ -111,8 +116,7 @@ public class TaskCountDto {
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waittingThread));
}
public List<TaskStateCount> getTaskCountDtos(){
public List<TaskStateCount> getTaskCountDtos() {
return taskCountDtos;
}

3
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilter.java

@ -57,11 +57,10 @@ public class ResourceFilter implements IFilter {
* @return file filtered by suffix
*/
public Set<Resource> fileFilter(){
Set<Resource> resources = resourceList.stream().filter(t -> {
return resourceList.stream().filter(t -> {
String alias = t.getAlias();
return alias.endsWith(suffix);
}).collect(Collectors.toSet());
return resources;
}
/**

331
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -14,12 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.enums;
import org.springframework.context.i18n.LocaleContextHolder;
package org.apache.dolphinscheduler.api.enums;
import java.util.Locale;
import org.springframework.context.i18n.LocaleContextHolder;
/**
* status enum
*/
@ -32,15 +33,15 @@ public enum Status {
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"),
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"),
USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"),
USER_NAME_NULL(10004,"user name is null", "用户名不能为空"),
USER_NAME_NULL(10004, "user name is null", "用户名不能为空"),
HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"),
TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"),
TENANT_NAME_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"),
USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"),
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"),
ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"),
USER_NAME_PASSWD_ERROR(10013,"user name or password error", "用户名或密码错误"),
LOGIN_SESSION_FAILED(10014,"create session failed!", "创建session失败"),
USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"),
LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"),
DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"),
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"),
TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"),
@ -53,105 +54,105 @@ public enum Status {
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"),
MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"),
SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"),
CREATE_ALERT_GROUP_ERROR(10027,"create alert group error", "创建告警组错误"),
QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error", "查询告警组错误"),
LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error", "分页查询告警组错误"),
UPDATE_ALERT_GROUP_ERROR(10030,"update alert group error", "更新告警组错误"),
DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error", "删除告警组错误"),
ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error", "告警组授权用户错误"),
CREATE_DATASOURCE_ERROR(10033,"create datasource error", "创建数据源错误"),
UPDATE_DATASOURCE_ERROR(10034,"update datasource error", "更新数据源错误"),
QUERY_DATASOURCE_ERROR(10035,"query datasource error", "查询数据源错误"),
CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure", "建立数据源连接失败"),
CONNECTION_TEST_FAILURE(10037,"connection test failure", "测试数据源连接失败"),
DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure", "删除数据源失败"),
VERIFY_DATASOURCE_NAME_FAILURE(10039,"verify datasource name failure", "验证数据源名称失败"),
UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource", "未经授权的数据源"),
AUTHORIZED_DATA_SOURCE(10041,"authorized data source", "授权数据源失败"),
LOGIN_SUCCESS(10042,"login success", "登录成功"),
USER_LOGIN_FAILURE(10043,"user login failure", "用户登录失败"),
LIST_WORKERS_ERROR(10044,"list workers error", "查询worker列表错误"),
LIST_MASTERS_ERROR(10045,"list masters error", "查询master列表错误"),
UPDATE_PROJECT_ERROR(10046,"update project error", "更新项目信息错误"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error", "查询项目详细信息错误"),
CREATE_PROJECT_ERROR(10048,"create project error", "创建项目错误"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error", "分页查询项目列表错误"),
DELETE_PROJECT_ERROR(10050,"delete project error", "删除项目错误"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error", "查询未授权项目错误"),
QUERY_AUTHORIZED_PROJECT(10052,"query authorized project", "查询授权项目错误"),
QUERY_QUEUE_LIST_ERROR(10053,"query queue list error", "查询队列列表错误"),
CREATE_RESOURCE_ERROR(10054,"create resource error", "创建资源错误"),
UPDATE_RESOURCE_ERROR(10055,"update resource error", "更新资源错误"),
QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error", "查询资源列表错误"),
QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging", "分页查询资源列表错误"),
DELETE_RESOURCE_ERROR(10058,"delete resource error", "删除资源错误"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error", "资源名称或类型验证错误"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error", "查看资源文件错误"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error", "创建资源文件错误"),
RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067,"update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error", "授权资源文件错误"),
AUTHORIZE_RESOURCE_TREE( 10073,"authorize resource tree display error","授权资源目录树错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076,"create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077,"update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error", "上线调度配置错误"),
OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error", "下线调度配置错误"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error", "分页查询调度配置列表错误"),
QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error", "查询调度配置列表错误"),
QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error", "分页查询任务列表错误"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error", "分页查询任务记录错误"),
CREATE_TENANT_ERROR(10084,"create tenant error", "创建租户错误"),
QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error", "分页查询租户列表错误"),
QUERY_TENANT_LIST_ERROR(10086,"query tenant list error", "查询租户列表错误"),
UPDATE_TENANT_ERROR(10087,"update tenant error", "更新租户错误"),
DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error", "删除租户错误"),
VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error", "租户编码验证错误"),
CREATE_USER_ERROR(10090,"create user error", "创建用户错误"),
QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error", "分页查询用户列表错误"),
UPDATE_USER_ERROR(10092,"update user error", "更新用户错误"),
DELETE_USER_BY_ID_ERROR(10093,"delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094,"grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095,"grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097,"grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098,"get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099,"user list error", "查询用户列表错误"),
VERIFY_USERNAME_ERROR(10100,"verify username error", "用户名验证错误"),
UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error", "查询未授权用户错误"),
AUTHORIZED_USER_ERROR(10102,"authorized user error", "查询授权用户错误"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error", "查询任务实例日志错误"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error", "下载任务日志文件错误"),
CREATE_PROCESS_DEFINITION(10105,"create process definition", "创建工作流错误"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error", "工作流名称已存在"),
UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCESS_DEFINITION_LIST(10110,"query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115,"update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123,"sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"),
CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"),
QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"),
LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"),
UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"),
DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"),
ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"),
CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"),
UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"),
QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"),
CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"),
CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"),
DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"),
VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"),
UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"),
AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"),
LOGIN_SUCCESS(10042, "login success", "登录成功"),
USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"),
LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"),
LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"),
UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047, "query project details by id error", "查询项目详细信息错误"),
CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049, "login user query project list paging error", "分页查询项目列表错误"),
DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"),
QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"),
QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"),
CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"),
UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"),
QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"),
QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"),
DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059, "verify resource by name and type error", "资源名称或类型验证错误"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"),
RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"),
AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"),
OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"),
QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"),
QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"),
CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"),
QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"),
QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"),
UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"),
DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"),
VERIFY_TENANT_CODE_ERROR(10089, "verify tenant code error", "租户编码验证错误"),
CREATE_USER_ERROR(10090, "create user error", "创建用户错误"),
QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"),
UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"),
DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"),
VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"),
UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"),
AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"),
CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流名称已存在"),
UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123, "sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124, "tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"),
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"),
CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"),
QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"),
@ -159,24 +160,41 @@ public enum Status {
QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"),
UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"),
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"),
VERIFY_QUEUE_ERROR(10133,"verify queue error", "验证队列信息错误"),
NAME_NULL(10134,"name must be not null", "名称不能为空"),
VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"),
NAME_NULL(10134, "name must be not null", "名称不能为空"),
NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"),
SAVE_ERROR(10136, "save error", "保存错误"),
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"),
QUERY_WORKFLOW_LINEAGE_ERROR(10143,"query workflow lineage error", "查询血缘失败"),
COPY_PROCESS_DEFINITION_ERROR(10148,"copy process definition error", "复制工作流错误"),
USER_DISABLED(10149,"The current user is disabled", "当前用户已停用"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"),
USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"),
COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152
, "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153
, "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"),
QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"),
QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR(10155
, "query process definition versions error: [page number:{0}] < 1 or [page size:{1}] < 1", "查询工作流历史版本出错:[pageNo:{0}] < 1 或 [pageSize:{1}] < 1"),
DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"),
QUERY_USER_CREATED_PROJECT_ERROR(10157, "query user created project error error", "查询用户创建的项目错误"),
PROCESS_DEFINITION_IDS_IS_EMPTY(10158, "process definition ids is empty", "工作流IDS不能为空"),
BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"),
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"),
QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"),
DELETE_PROCESS_DEFINITION_BY_ID_FAIL(10162,"delete process definition by id fail, for there are {0} process instances in executing using it", "删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"),
@ -188,10 +206,10 @@ public enum Status {
HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}","udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition","资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist","父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource","请检查任务节点并移除无权限或者已删除的资源"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"),
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
@ -208,52 +226,51 @@ public enum Status {
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"),
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."),
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error", "查询各用户流程定义数错误"),
START_PROCESS_INSTANCE_ERROR(50014,"start process instance error", "运行工作流实例错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node {0} parameter invalid", "流程节点[{0}]参数无效"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"),
START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028,"batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022, "delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023, "the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026, "batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"),
HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"),
HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"),
/**
* for monitor
*/
QUERY_DATABASE_STATE_ERROR(70001,"query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error", "查询zookeeper状态错误"),
QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"),
CREATE_ACCESS_TOKEN_ERROR(70010,"create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011,"generate token error", "生成token错误"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012,"query access token list paging error", "分页查询访问token列表错误"),
UPDATE_ACCESS_TOKEN_ERROR(70013,"update access token error", "更新访问token错误"),
DELETE_ACCESS_TOKEN_ERROR(70014,"delete access token error", "删除访问token错误"),
CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012, "query access token list paging error", "分页查询访问token列表错误"),
UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"),
DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"),
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"),
COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error", "查询各状态任务实例数错误"),
NEGTIVE_SIZE_NUMBER_ERROR(80002,"query size number error","查询size错误"),
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003,"start time bigger than end time error","开始时间在结束时间之后错误"),
QUEUE_COUNT_ERROR(90001,"queue count error", "查询队列数据错误"),
COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"),
NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"),
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"),
QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"),
KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error", "获取kerberos启动状态错误"),
KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"),
;
private final int code;

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java

@ -18,17 +18,18 @@ package org.apache.dolphinscheduler.api.exceptions;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.method.HandlerMethod;
/**
* Exception Handler
*/
@ControllerAdvice
@RestControllerAdvice
@ResponseBody
public class ApiExceptionHandler {

14
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java

@ -16,32 +16,28 @@
*/
package org.apache.dolphinscheduler.api.interceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.security.Authenticator;
import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.HandlerInterceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* login interceptor, must login first
*/
public class LoginHandlerInterceptor implements HandlerInterceptor {
private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class);
@Autowired
private SessionService sessionService;
@Autowired
private UserMapper userMapper;

124
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java

@ -16,35 +16,14 @@
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.Map;
/**
* user service
* access token service
*/
@Service
public class AccessTokenService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenService.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
public interface AccessTokenService {
/**
* query access token list
@ -55,123 +34,44 @@ public class AccessTokenService extends BaseService {
* @param pageSize page size
* @return token list for page number and page size
*/
public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<AccessToken> page = new Page(pageNo, pageSize);
int userId = loginUser.getId();
if (loginUser.getUserType() == UserType.ADMIN_USER){
userId = 0;
}
IPage<AccessToken> accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId);
pageInfo.setTotalCount((int)accessTokenList.getTotal());
pageInfo.setLists(accessTokenList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
/**
* create token
*
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return create result code
*/
public Map<String, Object> createToken(int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0.");
}
AccessToken accessToken = new AccessToken();
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setCreateTime(new Date());
accessToken.setUpdateTime(new Date());
// insert
int insert = accessTokenMapper.insert(accessToken);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR);
}
return result;
}
Map<String, Object> createToken(int userId, String expireTime, String token);
/**
* generate token
*
* @param userId token for user
* @param expireTime token expire time
* @return token string
*/
public Map<String, Object> generateToken(int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5);
String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis()));
result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> generateToken(int userId, String expireTime);
/**
* delete access token
*
* @param loginUser login user
* @param id token id
* @return delete result code
*/
public Map<String, Object> delAccessTokenById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
if (loginUser.getId() != accessToken.getUserId() &&
loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
accessTokenMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> delAccessTokenById(User loginUser, int id);
/**
* update token by id
*
* @param id token id
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return update result code
*/
public Map<String, Object> updateToken(int id,int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setUpdateTime(new Date());
accessTokenMapper.updateById(accessToken);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> updateToken(int id, int userId, String expireTime, String token);
}

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java

@ -55,7 +55,7 @@ public class AlertGroupService extends BaseService{
*/
public HashMap<String, Object> queryAlertgroup() {
HashMap<String, Object> result = new HashMap<>(5);
HashMap<String, Object> result = new HashMap<>();
List<AlertGroup> alertGroups = alertGroupMapper.queryAllGroupList();
result.put(Constants.DATA_LIST, alertGroups);
putMsg(result, Status.SUCCESS);
@ -74,7 +74,7 @@ public class AlertGroupService extends BaseService{
*/
public Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -101,7 +101,7 @@ public class AlertGroupService extends BaseService{
* @return create result code
*/
public Map<String, Object> createAlertgroup(User loginUser, String groupName, AlertType groupType, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (checkAdmin(loginUser, result)){
return result;
@ -138,7 +138,7 @@ public class AlertGroupService extends BaseService{
* @return update result code
*/
public Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, AlertType groupType, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)){
return result;
@ -179,7 +179,7 @@ public class AlertGroupService extends BaseService{
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> delAlertgroupById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
@ -209,7 +209,7 @@ public class AlertGroupService extends BaseService{
* @return grant result code
*/
public Map<String, Object> grantUser(User loginUser, int alertgroupId, String userIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate

54
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java

@ -1,54 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import java.util.List;
/**
* base DAG service
*/
public class BaseDAGService extends BaseService{
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
public static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
}

17
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java

@ -16,6 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.text.MessageFormat;
import java.util.Map;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -24,11 +30,6 @@ import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import java.text.MessageFormat;
import java.util.Map;
/**
* base service
*/
@ -96,6 +97,7 @@ public class BaseService {
/**
* get cookie info by name
*
* @param request request
* @param name 'sessionId'
* @return get cookie info
@ -115,10 +117,11 @@ public class BaseService {
/**
* create tenant dir if not exists
*
* @param tenantCode tenant code
* @throws Exception if hdfs operation exception
*/
protected void createTenantDirIfNotExists(String tenantCode)throws Exception{
protected void createTenantDirIfNotExists(String tenantCode) throws Exception {
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
@ -129,7 +132,7 @@ public class BaseService {
HadoopUtils.getInstance().mkdir(udfsPath);
}
protected boolean hasPerm(User operateUser, int createUserId){
protected boolean hasPerm(User operateUser, int createUserId) {
return operateUser.getId() == createUserId || isAdmin(operateUser);
}
}

317
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java

@ -17,57 +17,14 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.CommandStateCount;
import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.apache.dolphinscheduler.dao.entity.User;
import java.text.MessageFormat;
import java.util.*;
import java.util.Map;
/**
* data analysis service
*/
@Service
public class DataAnalysisService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class);
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefinitionMapper;
@Autowired
CommandMapper commandMapper;
@Autowired
ErrorCommandMapper errorCommandMapper;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
ProcessService processService;
public interface DataAnalysisService {
/**
* statistical task instance status data
@ -78,46 +35,7 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date
* @return task state count data
*/
public Map<String,Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> taskInstanceStateCounts =
taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds);
if (taskInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
}
Map<String, Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate);
/**
* statistical process instance status data
@ -128,37 +46,7 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date
* @return process instance state count data
*/
public Map<String,Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> processInstanceStateCounts =
processInstanceMapper.countInstanceStateByUser(start, end,
projectIdArray);
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
Map<String, Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate);
/**
* statistics the process definition quantities of certain person
@ -167,20 +55,7 @@ public class DataAnalysisService extends BaseService{
* @param projectId project id
* @return definition count data
*/
public Map<String,Object> countDefinitionByUser(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>();
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), projectIdArray,isAdmin(loginUser));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> countDefinitionByUser(User loginUser, int projectId);
/**
* statistical command status data
@ -191,189 +66,15 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date
* @return command state count data
*/
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
if (startDate != null && endDate != null){
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
// count command state
List<CommandCount> commandStateCounts =
commandMapper.countCommandState(
loginUser.getId(),
start,
end,
projectIdArray);
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
//
Map<CommandType,Map<String,Integer>> dataMap = new HashMap<>();
Map<String,Integer> commonCommand = new HashMap<>();
commonCommand.put("commandState",0);
commonCommand.put("errorCommandState",0);
// init data map
/**
* START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS,
START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD;
*/
dataMap.put(CommandType.START_PROCESS,commonCommand);
dataMap.put(CommandType.START_CURRENT_TASK_PROCESS,commonCommand);
dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS,commonCommand);
dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,commonCommand);
dataMap.put(CommandType.START_FAILURE_TASK_PROCESS,commonCommand);
dataMap.put(CommandType.COMPLEMENT_DATA,commonCommand);
dataMap.put(CommandType.SCHEDULER,commonCommand);
dataMap.put(CommandType.REPEAT_RUNNING,commonCommand);
dataMap.put(CommandType.PAUSE,commonCommand);
dataMap.put(CommandType.STOP,commonCommand);
dataMap.put(CommandType.RECOVER_WAITTING_THREAD,commonCommand);
// put command state
for (CommandCount executeStatusCount : commandStateCounts){
Map<String,Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put("commandState", executeStatusCount.getCount());
dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap);
}
// put error command state
for (CommandCount errorExecutionStatus : errorCommandStateCounts){
Map<String,Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
Iterator<Map.Entry<CommandType, Map<String, Integer>>> iterator = dataMap.entrySet().iterator();
while (iterator.hasNext()){
Map.Entry<CommandType, Map<String, Integer>> next = iterator.next();
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"),
next.getValue().get("commandState"),next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
private Integer[] getProjectIdsArrays(User loginUser, int projectId) {
List<Integer> projectIds = new ArrayList<>();
if(projectId !=0){
projectIds.add(projectId);
}else if(loginUser.getUserType() == UserType.GENERAL_USER){
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if(projectIds.size() ==0 ){
projectIds.add(0);
}
}
return projectIds.toArray(new Integer[projectIds.size()]);
}
Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate);
/**
* count queue state
*
* @param loginUser login user
* @param projectId project id
* @return queue state count data
*/
public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String,Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER){
dataMap.put("taskQueue",tasksQueueList.size());
dataMap.put("taskKill",tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i =0;
for (String taskQueueStr : tasksQueueList){
if (StringUtils.isNotEmpty(taskQueueStr)){
String[] splits = taskQueueStr.split("_");
if (splits.length >= 4){
tasksQueueIds[i++] = Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList){
if (StringUtils.isNotEmpty(taskKillStr)){
String[] splits = taskKillStr.split("-");
if (splits.length == 2){
tasksKillIds[i++] = Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
if (tasksQueueIds.length != 0){
taskQueueCount = taskInstanceMapper.countTask(
projectIds,
tasksQueueIds);
}
if (tasksKillIds.length != 0){
taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds);
}
dataMap.put("taskQueue",taskQueueCount);
dataMap.put("taskKill",taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> countQueueState(User loginUser, int projectId);
private boolean checkProject(User loginUser, int projectId, Map<String, Object> result){
if(projectId != 0){
Project project = projectMapper.selectById(projectId);
return projectService.hasProjectAndPerm(loginUser, project, result);
}
return true;
}
}

92
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -53,7 +53,7 @@ import static org.apache.dolphinscheduler.common.utils.PropertyUtils.getString;
* datasource service
*/
@Service
public class DataSourceService extends BaseService{
public class DataSourceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class);
@ -65,7 +65,6 @@ public class DataSourceService extends BaseService{
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database";
public static final String USER_NAME = "userName";
public static final String PASSWORD = Constants.PASSWORD;
public static final String OTHER = "other";
@ -88,7 +87,7 @@ public class DataSourceService extends BaseService{
*/
public Map<String, Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
// check name can use or not
if (checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
@ -148,13 +147,13 @@ public class DataSourceService extends BaseService{
return result;
}
if(!hasPerm(loginUser, dataSource.getUserId())){
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//check name can use or not
if(!name.trim().equals(dataSource.getName()) && checkName(name)){
if (!name.trim().equals(dataSource.getName()) && checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
@ -190,15 +189,13 @@ public class DataSourceService extends BaseService{
private boolean checkName(String name) {
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim());
if (queryDataSource != null && queryDataSource.size() > 0) {
return true;
}
return false;
return queryDataSource != null && queryDataSource.size() > 0;
}
/**
* updateProcessInstance datasource
*
* @param id datasource id
* @return data source detail
*/
@ -222,9 +219,9 @@ public class DataSourceService extends BaseService{
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
DbConnectType connectType = null;
String hostSeperator = Constants.DOUBLE_SLASH;
if(DbType.ORACLE.equals(dataSource.getType())){
if (DbType.ORACLE.equals(dataSource.getType())) {
connectType = ((OracleDataSource) datasourceForm).getConnectType();
if(DbConnectType.ORACLE_SID.equals(connectType)){
if (DbConnectType.ORACLE_SID.equals(connectType)) {
hostSeperator = Constants.AT_SIGN;
}
}
@ -233,7 +230,7 @@ public class DataSourceService extends BaseService{
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address,hostSeperator);
String[] hostsPorts = getHostsAndPort(address, hostSeperator);
// ip host
String host = hostsPorts[0];
// prot
@ -249,6 +246,7 @@ public class DataSourceService extends BaseService{
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
case PRESTO:
separator = "&";
break;
default:
@ -284,7 +282,6 @@ public class DataSourceService extends BaseService{
return result;
}
/**
* query datasource list by keyword
*
@ -301,14 +298,14 @@ public class DataSourceService extends BaseService{
if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
}else{
} else {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList.getRecords();
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>();
handlePasswd(dataSources);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount((int)(dataSourceList.getTotal()));
pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L));
pageInfo.setLists(dataSources);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
@ -318,6 +315,7 @@ public class DataSourceService extends BaseService{
/**
* handle datasource connection password for safety
*
* @param dataSourceList
*/
private void handlePasswd(List<DataSource> dataSourceList) {
@ -340,13 +338,13 @@ public class DataSourceService extends BaseService{
* @return data source list page
*/
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<DataSource> datasourceList;
if (isAdmin(loginUser)) {
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
}else{
} else {
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
}
@ -359,11 +357,10 @@ public class DataSourceService extends BaseService{
/**
* verify datasource exists
*
* @param loginUser login user
* @param name datasource name
* @return true if data datasource not exists, otherwise return false
*/
public Result verifyDataSourceName(User loginUser, String name) {
public Result verifyDataSourceName(String name) {
Result result = new Result();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && dataSourceList.size() > 0) {
@ -407,9 +404,9 @@ public class DataSourceService extends BaseService{
UserGroupInformation.loginUserFromKeytab(getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME),
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
}
if (dbType == DbType.HIVE){
if (dbType == DbType.HIVE) {
datasource = JSONUtils.parseObject(parameter, HiveDataSource.class);
}else if (dbType == DbType.SPARK){
} else if (dbType == DbType.SPARK) {
datasource = JSONUtils.parseObject(parameter, SparkDataSource.class);
}
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
@ -430,20 +427,23 @@ public class DataSourceService extends BaseService{
datasource = JSONUtils.parseObject(parameter, DB2ServerDataSource.class);
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
case PRESTO:
datasource = JSONUtils.parseObject(parameter, PrestoDataSource.class);
Class.forName(Constants.COM_PRESTO_JDBC_DRIVER);
break;
default:
break;
}
if(datasource != null){
if (datasource != null) {
connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword());
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
logger.error(e.getMessage(), e);
}
return connection;
}
/**
* check connection
*
@ -465,24 +465,24 @@ public class DataSourceService extends BaseService{
return isConnection;
}
/**
* test connection
*
* @param loginUser login user
* @param id datasource id
* @return connect result code
*/
public boolean connectionTest(User loginUser, int id) {
public boolean connectionTest(int id) {
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource != null) {
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
} else {
return false;
}
}
/**
* build paramters
*
* @param name data source name
* @param desc data source description
* @param type data source type
* @param host data source host
* @param port data source port
@ -493,7 +493,7 @@ public class DataSourceService extends BaseService{
* @param principal principal
* @return datasource parameter
*/
public String buildParameter(String name, String desc, DbType type, String host,
public String buildParameter(DbType type, String host,
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other) {
@ -505,7 +505,7 @@ public class DataSourceService extends BaseService{
}
if (CommonUtils.getKerberosStartupState() &&
(type == DbType.HIVE || type == DbType.SPARK)){
(type == DbType.HIVE || type == DbType.SPARK)) {
jdbcUrl += ";principal=" + principal;
}
@ -513,7 +513,8 @@ public class DataSourceService extends BaseService{
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())) {
|| Constants.ORACLE.equals(type.name())
|| Constants.PRESTO.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
@ -529,14 +530,14 @@ public class DataSourceService extends BaseService{
parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password));
if (CommonUtils.getKerberosStartupState() &&
(type == DbType.HIVE || type == DbType.SPARK)){
parameterMap.put(Constants.PRINCIPAL,principal);
(type == DbType.HIVE || type == DbType.SPARK)) {
parameterMap.put(Constants.PRINCIPAL, principal);
}
if (other != null && !"".equals(other)) {
Map<String, String> map = JSONUtils.toMap(other);
if (map.size() > 0) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry: map.entrySet()) {
for (Map.Entry<String, String> entry : map.entrySet()) {
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
}
if (!Constants.DB2.equals(type.name())) {
@ -547,7 +548,7 @@ public class DataSourceService extends BaseService{
}
if(logger.isDebugEnabled()){
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap));
}
return JSONUtils.toJsonString(parameterMap);
@ -585,9 +586,12 @@ public class DataSourceService extends BaseService{
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
}else if (Constants.DB2.equals(type.name())) {
} else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
} else if (Constants.PRESTO.equals(type.name())) {
sb.append(Constants.JDBC_PRESTO);
sb.append(host).append(":").append(port);
}
return sb.toString();
@ -606,12 +610,12 @@ public class DataSourceService extends BaseService{
try {
//query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if(dataSource == null){
if (dataSource == null) {
logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if(!hasPerm(loginUser, dataSource.getUserId())){
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
@ -619,7 +623,7 @@ public class DataSourceService extends BaseService{
datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("delete datasource error",e);
logger.error("delete datasource error", e);
throw new RuntimeException("delete datasource error");
}
return result;
@ -674,7 +678,7 @@ public class DataSourceService extends BaseService{
* @return authorized result code
*/
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
@ -695,7 +699,7 @@ public class DataSourceService extends BaseService{
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address) {
return getHostsAndPort(address,Constants.DOUBLE_SLASH);
return getHostsAndPort(address, Constants.DOUBLE_SLASH);
}
/**
@ -705,7 +709,7 @@ public class DataSourceService extends BaseService{
* @param separator separator
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address,String separator) {
private String[] getHostsAndPort(String address, String separator) {
String[] result = new String[2];
String[] tmpArray = address.split(separator);
String hostsAndPorts = tmpArray[tmpArray.length - 1];

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java

@ -98,7 +98,7 @@ public class ExecutorService extends BaseService{
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
String receivers, String receiversCc, RunMode runMode,
Priority processInstancePriority, String workerGroup, Integer timeout) throws ParseException {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR);
@ -176,7 +176,7 @@ public class ExecutorService extends BaseService{
* @return check result code
*/
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (processDefinition == null) {
// check process definition exists
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId);
@ -201,7 +201,7 @@ public class ExecutorService extends BaseService{
* @return execute result code
*/
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project);
@ -294,7 +294,7 @@ public class ExecutorService extends BaseService{
*/
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
ExecutionStatus executionStatus = processInstance.getState();
boolean checkResult = false;
switch (executeType) {
@ -339,7 +339,7 @@ public class ExecutorService extends BaseService{
* @return update result
*/
private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
processInstance.setCommandType(commandType);
processInstance.addHistoryCmd(commandType);
@ -365,7 +365,7 @@ public class ExecutorService extends BaseService{
* @return insert result code
*/
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Command command = new Command();
command.setCommandType(commandType);
command.setProcessDefinitionId(processDefinitionId);

93
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java

@ -16,45 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.nio.charset.StandardCharsets;
import javax.annotation.PreDestroy;
import org.apache.commons.lang.ArrayUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* log service
*/
@Service
public class LoggerService {
private static final Logger logger = LoggerFactory.getLogger(LoggerService.class);
private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s";
@Autowired
private ProcessService processService;
private final LogClientService logClient;
public LoggerService() {
logClient = new LogClientService();
}
@PreDestroy
public void close() {
logClient.close();
}
public interface LoggerService {
/**
* view log
@ -64,36 +31,7 @@ public class LoggerService {
* @param limit limit
* @return log string data
*/
public Result queryLog(int taskInstId, int skipLineNum, int limit) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
return Result.error(Status.TASK_INSTANCE_NOT_FOUND);
}
String host = getHost(taskInstance.getHost());
Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(),
Constants.RPC_PORT);
StringBuilder log = new StringBuilder();
if (skipLineNum == 0) {
String head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR);
log.append(head);
}
log.append(logClient
.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit));
result.setData(log);
return result;
}
Result<String> queryLog(int taskInstId, int skipLineNum, int limit);
/**
@ -102,31 +40,6 @@ public class LoggerService {
* @param taskInstId task instance id
* @return log byte array
*/
public byte[] getLogBytes(int taskInstId) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
throw new RuntimeException("task instance is null or host is null");
}
String host = getHost(taskInstance.getHost());
byte[] head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8);
return ArrayUtils.addAll(head,
logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()));
}
byte[] getLogBytes(int taskInstId);
/**
* get host
*
* @param address address
* @return old version return true ,otherwise return false
*/
private String getHost(String address) {
if (Host.isOldVersion(address)) {
return address;
}
return Host.of(address).getIp();
}
}

8
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java

@ -56,7 +56,7 @@ public class MonitorService extends BaseService {
* @return data base state
*/
public Map<String,Object> queryDatabaseState(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<MonitorRecord> monitorRecordList = monitorDBDao.queryDatabaseState();
@ -75,7 +75,7 @@ public class MonitorService extends BaseService {
*/
public Map<String,Object> queryMaster(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<Server> masterServers = getServerListFromZK(true);
result.put(Constants.DATA_LIST, masterServers);
@ -91,7 +91,7 @@ public class MonitorService extends BaseService {
* @return zookeeper information list
*/
public Map<String,Object> queryZookeeperState(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<ZookeeperRecord> zookeeperRecordList = zookeeperMonitor.zookeeperInfoList();
@ -111,7 +111,7 @@ public class MonitorService extends BaseService {
*/
public Map<String,Object> queryWorker(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<WorkerServerModel> workerServers = getServerListFromZK(false)
.stream()
.map((Server server) -> {

1359
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

File diff suppressed because it is too large Load Diff

70
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java

@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
public interface ProcessDefinitionVersionService {
/**
* add the newest version of one process definition
*
* @param processDefinition the process definition that need to record version
* @return the newest version number of this process definition
*/
long addProcessDefinitionVersion(ProcessDefinition processDefinition);
/**
* query the pagination versions info by one certain process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId process definition id
* @return the pagination process definition versions info of the certain process definition
*/
Map<String, Object> queryProcessDefinitionVersions(User loginUser, String projectName,
int pageNo, int pageSize, int processDefinitionId);
/**
* query one certain process definition version by version number and process definition id
*
* @param processDefinitionId process definition id
* @param version version number
* @return the process definition version info
*/
ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId,
long version);
/**
* delete one certain process definition by version number and process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param processDefinitionId process definition id
* @param version version number
* @return delele result code
*/
Map<String, Object> deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName,
int processDefinitionId, long version);
}

175
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -14,10 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
@ -31,20 +37,27 @@ import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
@ -52,16 +65,28 @@ import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.apache.dolphinscheduler.common.Constants.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service
*/
@Service
public class ProcessInstanceService extends BaseDAGService {
public class ProcessInstanceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class);
@ -84,6 +109,9 @@ public class ProcessInstanceService extends BaseDAGService {
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
ExecutorService execService;
@ -94,18 +122,11 @@ public class ProcessInstanceService extends BaseDAGService {
LoggerService loggerService;
@Autowired
UsersService usersService;
/**
* return top n SUCCESS process instance order by running time which started between startTime and endTime
* @param loginUser
* @param projectName
* @param size
* @param startTime
* @param endTime
* @return
*/
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) {
Map<String, Object> result = new HashMap<>();
@ -131,7 +152,7 @@ public class ProcessInstanceService extends BaseDAGService {
return result;
}
Date end = DateUtils.stringToDate(endTime);
if(start == null || end == null) {
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result;
}
@ -145,6 +166,7 @@ public class ProcessInstanceService extends BaseDAGService {
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by id
*
@ -154,7 +176,7 @@ public class ProcessInstanceService extends BaseDAGService {
* @return process instance detail
*/
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -167,7 +189,7 @@ public class ProcessInstanceService extends BaseDAGService {
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setReceivers(processDefinition.getReceivers());
processInstance.setReceiversCc(processDefinition.getReceiversCc());
result.put(Constants.DATA_LIST, processInstance);
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
return result;
@ -190,10 +212,10 @@ public class ProcessInstanceService extends BaseDAGService {
*/
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId,
String startDate, String endDate,
String searchVal, String executorName,ExecutionStatus stateType, String host,
String searchVal, String executorName, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -222,18 +244,18 @@ public class ProcessInstanceService extends BaseDAGService {
return result;
}
Page<ProcessInstance> page = new Page(pageNo, pageSize);
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList =
processInstanceMapper.queryProcessInstanceListPaging(page,
project.getId(), processDefineId, searchVal, executorId,statusArray, host, start, end);
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
for(ProcessInstance processInstance: processInstances){
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(),processInstance.getEndTime()));
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = usersService.queryUser(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
@ -242,13 +264,11 @@ public class ProcessInstanceService extends BaseDAGService {
pageInfo.setTotalCount((int) processInstanceList.getTotal());
pageInfo.setLists(processInstances);
result.put(Constants.DATA_LIST, pageInfo);
result.put(DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task list by process instance id
*
@ -273,7 +293,7 @@ public class ProcessInstanceService extends BaseDAGService {
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(Constants.DATA_LIST, resultMap);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
@ -281,14 +301,13 @@ public class ProcessInstanceService extends BaseDAGService {
/**
* add dependent result for dependent task
* @param taskInstanceList
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for(TaskInstance taskInstance: taskInstanceList){
if(taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())){
for (TaskInstance taskInstance : taskInstanceList) {
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) {
Result logResult = loggerService.queryLog(
taskInstance.getId(), 0, 4098);
if(logResult.getCode() == Status.SUCCESS.ordinal()){
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = (String) logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
@ -297,9 +316,9 @@ public class ProcessInstanceService extends BaseDAGService {
}
}
public Map<String,DependResult> parseLogForDependentResult(String log) throws IOException {
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if(StringUtils.isEmpty(log)){
if (StringUtils.isEmpty(log)) {
return resultMap;
}
@ -307,14 +326,14 @@ public class ProcessInstanceService extends BaseDAGService {
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if(line.contains(DEPENDENT_SPLIT)){
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if(tmpStringArray.length != 2){
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if(dependStringArray.length != 2){
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
@ -325,7 +344,6 @@ public class ProcessInstanceService extends BaseDAGService {
return resultMap;
}
/**
* query sub process instance detail info by task id
*
@ -362,7 +380,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("subProcessInstanceId", subWorkflowInstance.getId());
result.put(Constants.DATA_LIST, dataMap);
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
@ -438,7 +456,7 @@ public class ProcessInstanceService extends BaseDAGService {
processInstance.setTimeout(timeout);
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if(tenant != null){
if (tenant != null) {
processInstance.setTenantCode(tenant.getTenantCode());
}
processInstance.setProcessInstanceJson(processInstanceJson);
@ -453,6 +471,11 @@ public class ProcessInstanceService extends BaseDAGService {
processDefinition.setLocations(locations);
processDefinition.setConnects(connects);
processDefinition.setTimeout(timeout);
processDefinition.setUpdateTime(new Date());
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
processDefinition.setVersion(version);
updateDefine = processDefineMapper.updateById(processDefinition);
}
if (update > 0 && updateDefine > 0) {
@ -461,7 +484,6 @@ public class ProcessInstanceService extends BaseDAGService {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
return result;
}
@ -501,13 +523,14 @@ public class ProcessInstanceService extends BaseDAGService {
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId());
result.put(Constants.DATA_LIST, dataMap);
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete process instance by id, at the same timedelete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
@ -516,7 +539,7 @@ public class ProcessInstanceService extends BaseDAGService {
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -530,13 +553,10 @@ public class ProcessInstanceService extends BaseDAGService {
return result;
}
processService.removeTaskLogFile(processInstanceId);
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
@ -556,7 +576,7 @@ public class ProcessInstanceService extends BaseDAGService {
* @return variables data
*/
public Map<String, Object> viewVariables(Integer processInstanceId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
@ -568,7 +588,6 @@ public class ProcessInstanceService extends BaseDAGService {
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String workflowInstanceJson = processInstance.getProcessInstanceJson();
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class);
@ -582,7 +601,6 @@ public class ProcessInstanceService extends BaseDAGService {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
List<TaskNode> taskNodeList = workflowData.getTasks();
// global param string
@ -594,7 +612,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
// local params
Map<String, Map<String,Object>> localUserDefParams = new HashMap<>();
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
Map<String, String> map = JSONUtils.toMap(parameter);
@ -603,9 +621,9 @@ public class ProcessInstanceService extends BaseDAGService {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String,Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType",taskNode.getType());
localParamsMap.put("localParamsList",localParamsList);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType", taskNode.getType());
localParamsMap.put("localParamsList", localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskNode.getName(), localParamsMap);
}
@ -618,7 +636,7 @@ public class ProcessInstanceService extends BaseDAGService {
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(Constants.DATA_LIST, resultMap);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
@ -668,9 +686,48 @@ public class ProcessInstanceService extends BaseDAGService {
}
ganttDto.setTasks(taskList);
result.put(Constants.DATA_LIST, ganttDto);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
private static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
/**
* query process instance by processDefinitionId and stateArray
* @param processDefinitionId processDefinitionId
* @param states states array
* @return process instance list
*/
public List<ProcessInstance> queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) {
return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states);
}
/**
* query process instance by processDefinitionId
* @param processDefinitionId processDefinitionId
* @param size size
* @return process instance list
*/
public List<ProcessInstance> queryByProcessDefineId(int processDefinitionId,int size) {
return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size);
}
}

326
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java

@ -16,45 +16,15 @@
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
import java.util.Map;
/**
* project service
*HttpTask./
**/
@Service
public class ProjectService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(ProjectService.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
public interface ProjectService {
/**
* create project
@ -64,38 +34,7 @@ public class ProjectService extends BaseService{
* @param desc description
* @return returns an error if it exists
*/
public Map<String, Object> createProject(User loginUser, String name, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByName(name);
if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
project = new Project();
Date now = new Date();
project.setName(name);
project.setDescription(desc);
project.setUserId(loginUser.getId());
project.setUserName(loginUser.getUserName());
project.setCreateTime(now);
project.setUpdateTime(now);
if (projectMapper.insert(project) > 0) {
Project insertedProject = projectMapper.queryByName(name);
result.put(Constants.DATA_LIST, insertedProject);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
}
Map<String, Object> createProject(User loginUser, String name, String desc);
/**
* query project details by id
@ -103,19 +42,7 @@ public class ProjectService extends BaseService{
* @param projectId project id
* @return project detail information
*/
public Map<String, Object> queryById(Integer projectId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.selectById(projectId);
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectId);
}
return result;
}
Map<String, Object> queryById(Integer projectId);
/**
* check project and authorization
@ -125,30 +52,9 @@ public class ProjectService extends BaseService{
* @param projectName project name
* @return true if the login user have permission to see the project
*/
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) {
Map<String, Object> result = new HashMap<>(5);
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
} else if (!checkReadPermission(loginUser, project)) {
// check read permission
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName);
}else {
putMsg(result, Status.SUCCESS);
}
return result;
}
Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName);
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
checkResult = true;
}
return checkResult;
}
boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result);
/**
* admin can view all projects
@ -159,29 +65,7 @@ public class ProjectService extends BaseService{
* @param pageNo page number
* @return project list which the login user have permission to see
*/
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Map<String, Object> result = new HashMap<>();
PageInfo pageInfo = new PageInfo<Project>(pageNo, pageSize);
Page<Project> page = new Page(pageNo, pageSize);
int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if(userId != 0){
for (Project project : projectList) {
project.setPerm(org.apache.dolphinscheduler.common.Constants.DEFAULT_ADMIN_PERMISSION);
}
}
pageInfo.setTotalCount((int)projectIPage.getTotal());
pageInfo.setLists(projectList);
result.put(Constants.COUNT, (int)projectIPage.getTotal());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal);
/**
* delete project by id
@ -190,50 +74,7 @@ public class ProjectService extends BaseService{
* @param projectId project id
* @return delete result code
*/
public Map<String, Object> deleteProject(User loginUser, Integer projectId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) {
return checkResult;
}
if (!hasPerm(loginUser, project.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId);
if(processDefinitionList.size() > 0){
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
int delete = projectMapper.deleteById(projectId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
}
/**
* get check result
*
* @param loginUser login user
* @param project project
* @return check result
*/
private Map<String, Object> getCheckResult(User loginUser, Project project) {
String projectName = project == null ? null:project.getName();
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
Map<String, Object> deleteProject(User loginUser, Integer projectId);
/**
* updateProcessInstance project
@ -244,37 +85,7 @@ public class ProjectService extends BaseService{
* @param desc description
* @return update result code
*/
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.selectById(projectId);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
Project tempProject = projectMapper.queryByName(projectName);
if (tempProject != null && tempProject.getId() != projectId) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName);
return result;
}
project.setName(projectName);
project.setDescription(desc);
project.setUpdateTime(new Date());
int update = projectMapper.updateById(project);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
}
Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc);
/**
* query unauthorized project
@ -283,48 +94,7 @@ public class ProjectService extends BaseService{
* @param userId user id
* @return the projects which user have not permission to see
*/
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
/**
* query all project list except specified userId
*/
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId);
List<Project> resultList = new ArrayList<>();
Set<Project> projectSet = null;
if (projectList != null && projectList.size() > 0) {
projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* get unauthorized project
*
* @param projectSet project set
* @param authedProjectList authed project list
* @return project list that authorization
*/
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) {
List<Project> resultList;
Set<Project> authedProjectSet = null;
if (authedProjectList != null && authedProjectList.size() > 0) {
authedProjectSet = new HashSet<>(authedProjectList);
projectSet.removeAll(authedProjectSet);
}
resultList = new ArrayList<>(projectSet);
return resultList;
}
Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId);
/**
* query authorized project
@ -333,83 +103,21 @@ public class ProjectService extends BaseService{
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* check whether have read permission
*
* @param user user
* @param project project
* @return true if the user have permission to see the project, otherwise return false
*/
private boolean checkReadPermission(User user, Project project) {
int permissionId = queryPermission(user, project);
return (permissionId & Constants.READ_PERMISSION) != 0;
}
Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId);
/**
* query permission id
* query authorized project
*
* @param user user
* @param project project
* @return permission
* @param loginUser login user
* @return projects which the user have permission to see, Except for items created by this user
*/
private int queryPermission(User user, Project project) {
if (user.getUserType() == UserType.ADMIN_USER) {
return Constants.READ_PERMISSION;
}
if (project.getUserId() == user.getId()) {
return Constants.ALL_PERMISSIONS;
}
ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) {
return 0;
}
return projectUser.getPerm();
}
Map<String, Object> queryProjectCreatedByUser(User loginUser);
/**
* query all project list that have one or more process definitions.
*
* @return project list
*/
public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.selectList(null);
List<ProcessDefinition> processDefinitions = processDefinitionMapper.selectList(null);
if(projects != null){
Set set = new HashSet<>();
for (ProcessDefinition processDefinition : processDefinitions){
set.add(processDefinition.getProjectId());
}
List<Project> tempDeletelist = new ArrayList<Project>();
for (Project project : projects) {
if(!set.contains(project.getId())){
tempDeletelist.add(project);
}
}
projects.removeAll(tempDeletelist);
}
result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS);
return result;
}
Map<String, Object> queryAllProjectList();
}

8
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java

@ -59,7 +59,7 @@ public class QueueService extends BaseService {
* @return queue list
*/
public Map<String, Object> queryList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -81,7 +81,7 @@ public class QueueService extends BaseService {
* @return queue list
*/
public Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -110,7 +110,7 @@ public class QueueService extends BaseService {
* @return create result
*/
public Map<String, Object> createQueue(User loginUser, String queue, String queueName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -159,7 +159,7 @@ public class QueueService extends BaseService {
* @return update result code
*/
public Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}

17
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java

@ -272,10 +272,7 @@ public class ResourcesService extends BaseService {
private boolean checkResourceExists(String fullName, int userId, int type ){
List<Resource> resources = resourcesMapper.queryResourceList(fullName, userId, type);
if (resources != null && resources.size() > 0) {
return true;
}
return false;
return resources != null && resources.size() > 0;
}
@ -402,7 +399,7 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>(5);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
@ -447,7 +444,7 @@ public class ResourcesService extends BaseService {
*/
public Map<String, Object> queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) {
HashMap<String, Object> result = new HashMap<>(5);
HashMap<String, Object> result = new HashMap<>();
Page<Resource> page = new Page(pageNo, pageSize);
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
@ -548,7 +545,7 @@ public class ResourcesService extends BaseService {
*/
public Map<String, Object> queryResourceList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
int userId = loginUser.getId();
if(isAdmin(loginUser)){
@ -571,7 +568,7 @@ public class ResourcesService extends BaseService {
*/
public Map<String, Object> queryResourceJarList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
int userId = loginUser.getId();
if(isAdmin(loginUser)){
userId = 0;
@ -1094,7 +1091,7 @@ public class ResourcesService extends BaseService {
* @return unauthorized result code
*/
public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (checkAdmin(loginUser, result)) {
return result;
@ -1146,7 +1143,7 @@ public class ResourcesService extends BaseService {
* @return authorized result
*/
public Map<String, Object> authorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)){
return result;
}

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -452,7 +452,7 @@ public class SchedulerService extends BaseService {
* @return schedule list
*/
public Map<String, Object> queryScheduleList(User loginUser, String projectName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
@ -534,7 +534,7 @@ public class SchedulerService extends BaseService {
*/
public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -583,7 +583,7 @@ public class SchedulerService extends BaseService {
* @return the next five fire time
*/
public Map<String,Object> previewSchedule(User loginUser, String projectName, String schedule) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
CronExpression cronExpression;
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
Date now = new Date();

110
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java

@ -16,36 +16,15 @@
*/
package org.apache.dolphinscheduler.api.service;
import javax.servlet.http.HttpServletRequest;
import org.apache.dolphinscheduler.api.controller.BaseController;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.dao.entity.Session;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.SessionMapper;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* session service
*/
@Service
public class SessionService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(SessionService.class);
@Autowired
private SessionMapper sessionMapper;
public interface SessionService {
/**
* get user session from request
@ -53,26 +32,7 @@ public class SessionService extends BaseService{
* @param request request
* @return session
*/
public Session getSession(HttpServletRequest request) {
String sessionId = request.getHeader(Constants.SESSION_ID);
if(StringUtils.isBlank(sessionId)) {
Cookie cookie = getCookie(request, Constants.SESSION_ID);
if (cookie != null) {
sessionId = cookie.getValue();
}
}
if(StringUtils.isBlank(sessionId)) {
return null;
}
String ip = BaseController.getClientIpAddress(request);
logger.debug("get session: {}, ip: {}", sessionId, ip);
return sessionMapper.selectById(sessionId);
}
Session getSession(HttpServletRequest request);
/**
* create session
@ -81,55 +41,7 @@ public class SessionService extends BaseService{
* @param ip ip
* @return session string
*/
@Transactional(rollbackFor = RuntimeException.class)
public String createSession(User user, String ip) {
Session session = null;
// logined
List<Session> sessionList = sessionMapper.queryByUserId(user.getId());
Date now = new Date();
/**
* if you have logged in and are still valid, return directly
*/
if (CollectionUtils.isNotEmpty(sessionList)) {
// is session list greater 1 , delete other ,get one
if (sessionList.size() > 1){
for (int i=1 ; i < sessionList.size();i++){
sessionMapper.deleteById(sessionList.get(i).getId());
}
}
session = sessionList.get(0);
if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) {
/**
* updateProcessInstance the latest login time
*/
session.setLastLoginTime(now);
sessionMapper.updateById(session);
return session.getId();
} else {
/**
* session expired, then delete this session first
*/
sessionMapper.deleteById(session.getId());
}
}
// assign new session
session = new Session();
session.setId(UUID.randomUUID().toString());
session.setIp(ip);
session.setUserId(user.getId());
session.setLastLoginTime(now);
sessionMapper.insert(session);
return session.getId();
}
String createSession(User user, String ip);
/**
* sign out
@ -138,17 +50,5 @@ public class SessionService extends BaseService{
* @param ip no use
* @param loginUser login user
*/
public void signOut(String ip, User loginUser) {
try {
/**
* query session by user id and ip
*/
Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(),ip);
//delete session
sessionMapper.deleteById(session.getId());
}catch (Exception e){
logger.warn("userId : {} , ip : {} , find more one session",loginUser.getId(),ip);
}
}
void signOut(String ip, User loginUser);
}

55
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java

@ -17,8 +17,6 @@
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
@ -32,11 +30,20 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.text.MessageFormat;
import java.util.*;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* task instance service
@ -79,11 +86,11 @@ public class TaskInstanceService extends BaseService {
* @param pageSize page size
* @return task list page
*/
public Map<String,Object> queryTaskListPaging(User loginUser, String projectName,
public Map<String, Object> queryTaskListPaging(User loginUser, String projectName,
Integer processInstanceId, String taskName, String executorName, String startDate,
String endDate, String searchVal, ExecutionStatus stateType,String host,
String endDate, String searchVal, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -93,23 +100,23 @@ public class TaskInstanceService extends BaseService {
}
int[] statusArray = null;
if(stateType != null){
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Date start = null;
Date end = null;
try {
if(StringUtils.isNotEmpty(startDate)){
if (StringUtils.isNotEmpty(startDate)) {
start = DateUtils.getScheduleDate(startDate);
if (start == null) {
return generateInvalidParamRes(result, "startDate");
}
if(StringUtils.isNotEmpty( endDate)){
}
if (StringUtils.isNotEmpty(endDate)) {
end = DateUtils.getScheduleDate(endDate);
if (end == null) {
return generateInvalidParamRes(result, "endDate");
}
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
return result;
}
Page<TaskInstance> page = new Page(pageNo, pageSize);
@ -124,18 +131,30 @@ public class TaskInstanceService extends BaseService {
exclusionSet.add("taskJson");
List<TaskInstance> taskInstanceList = taskInstanceIPage.getRecords();
for(TaskInstance taskInstance : taskInstanceList){
for (TaskInstance taskInstance : taskInstanceList) {
taskInstance.setDuration(DateUtils.differSec(taskInstance.getStartTime(), taskInstance.getEndTime()));
User executor = usersService.queryUser(taskInstance.getExecutorId());
if (null != executor) {
taskInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotalCount((int)taskInstanceIPage.getTotal());
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet));
pageInfo.setTotalCount((int) taskInstanceIPage.getTotal());
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet));
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/***
* generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name
* @param result exist result map
* @param params invalid params name
* @return update result map
*/
private Map<String, Object> generateInvalidParamRes(Map<String, Object> result, String params) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params));
return result;
}
}

273
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java

@ -14,63 +14,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* tenant service
*/
@Service
public class TenantService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(TenantService.class);
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private UserMapper userMapper;
public interface TenantService {
/**
* create tenant
*
*
* @param loginUser login user
* @param tenantCode tenant code
* @param tenantName tenant name
@ -79,53 +38,11 @@ public class TenantService extends BaseService{
* @return create result code
* @throws Exception exception
*/
@Transactional(rollbackFor = Exception.class)
public Map<String,Object> createTenant(User loginUser,
Map<String, Object> createTenant(User loginUser,
String tenantCode,
String tenantName,
int queueId,
String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
if (checkAdmin(loginUser, result)) {
return result;
}
if (checkTenantExists(tenantCode)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode);
return result;
}
Tenant tenant = new Tenant();
Date now = new Date();
if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")){
putMsg(result, Status.VERIFY_TENANT_CODE_ERROR);
return result;
}
tenant.setTenantCode(tenantCode);
tenant.setTenantName(tenantName);
tenant.setQueueId(queueId);
tenant.setDescription(desc);
tenant.setCreateTime(now);
tenant.setUpdateTime(now);
// save
tenantMapper.insert(tenant);
// if hdfs startup
if (PropertyUtils.getResUploadStartupState()){
createTenantDirIfNotExists(tenantCode);
}
putMsg(result, Status.SUCCESS);
return result;
}
String desc) throws Exception;
/**
* query tenant list paging
@ -136,24 +53,7 @@ public class TenantService extends BaseService{
* @param pageSize page size
* @return tenant list page
*/
public Map<String,Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Page<Tenant> page = new Page(pageNo, pageSize);
IPage<Tenant> tenantIPage = tenantMapper.queryTenantPaging(page, searchVal);
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int)tenantIPage.getTotal());
pageInfo.setLists(tenantIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
/**
* updateProcessInstance tenant
@ -167,63 +67,8 @@ public class TenantService extends BaseService{
* @return update result code
* @throws Exception exception
*/
public Map<String, Object> updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
if (checkAdmin(loginUser, result)) {
return result;
}
Tenant tenant = tenantMapper.queryById(id);
if (tenant == null){
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
// updateProcessInstance tenant
/**
* if the tenant code is modified, the original resource needs to be copied to the new tenant.
*/
if (!tenant.getTenantCode().equals(tenantCode)){
if (checkTenantExists(tenantCode)){
// if hdfs startup
if (PropertyUtils.getResUploadStartupState()){
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources";
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
//init hdfs resource
HadoopUtils.getInstance().mkdir(resourcePath);
HadoopUtils.getInstance().mkdir(udfsPath);
}
}else {
putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS);
return result;
}
}
Date now = new Date();
if (StringUtils.isNotEmpty(tenantCode)){
tenant.setTenantCode(tenantCode);
}
if (StringUtils.isNotEmpty(tenantName)){
tenant.setTenantName(tenantName);
}
if (queueId != 0){
tenant.setQueueId(queueId);
}
tenant.setDescription(desc);
tenant.setUpdateTime(now);
tenantMapper.updateById(tenant);
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
Map<String, Object> updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId,
String desc) throws Exception;
/**
* delete tenant
@ -233,56 +78,7 @@ public class TenantService extends BaseService{
* @return delete result code
* @throws Exception exception
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Tenant tenant = tenantMapper.queryById(id);
if (tenant == null){
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
List<ProcessInstance> processInstances = getProcessInstancesByTenant(tenant);
if(CollectionUtils.isNotEmpty(processInstances)){
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size());
return result;
}
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryDefinitionListByTenant(tenant.getId());
if(CollectionUtils.isNotEmpty(processDefinitions)){
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size());
return result;
}
List<User> userList = userMapper.queryUserListByTenant(tenant.getId());
if(CollectionUtils.isNotEmpty(userList)){
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size());
return result;
}
// if resource upload startup
if (PropertyUtils.getResUploadStartupState()){
String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode();
if (HadoopUtils.getInstance().exists(tenantPath)){
HadoopUtils.getInstance().delete(tenantPath, true);
}
}
tenantMapper.deleteById(id);
processInstanceMapper.updateProcessInstanceByTenantId(id, -1);
putMsg(result, Status.SUCCESS);
return result;
}
private List<ProcessInstance> getProcessInstancesByTenant(Tenant tenant) {
return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), org.apache.dolphinscheduler.common.Constants.NOT_TERMINATED_STATES);
}
Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception;
/**
* query tenant list
@ -290,35 +86,7 @@ public class TenantService extends BaseService{
* @param loginUser login user
* @return tenant list
*/
public Map<String, Object> queryTenantList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
List<Tenant> resourceList = tenantMapper.selectList(null);
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query tenant list via tenant code
* @param tenantCode tenant code
* @return tenant list
*/
public Map<String, Object> queryTenantList(String tenantCode) {
Map<String, Object> result = new HashMap<>(5);
List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode);
if (CollectionUtils.isNotEmpty(resourceList)) {
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.TENANT_NOT_EXIST);
}
return result;
}
Map<String, Object> queryTenantList(User loginUser);
/**
* verify tenant code
@ -326,26 +94,5 @@ public class TenantService extends BaseService{
* @param tenantCode tenant code
* @return true if tenant code can user, otherwise return false
*/
public Result verifyTenantCode(String tenantCode) {
Result result = new Result();
if (checkTenantExists(tenantCode)) {
logger.error("tenant {} has exist, can't create again.", tenantCode);
putMsg(result, Status.TENANT_NAME_EXIST, tenantCode);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* check tenant exists
*
* @param tenantCode tenant code
* @return ture if the tenant code exists, otherwise return false
*/
private boolean checkTenantExists(String tenantCode) {
List<Tenant> tenants = tenantMapper.queryByTenantCode(tenantCode);
return CollectionUtils.isNotEmpty(tenants);
}
Result verifyTenantCode(String tenantCode);
}

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java

@ -136,10 +136,7 @@ public class UdfFuncService extends BaseService{
*/
private boolean checkUdfFuncNameExists(String name){
List<UdfFunc> resource = udfFuncMapper.queryUdfByIdStr(null, name);
if(resource != null && resource.size() > 0){
return true;
}
return false;
return resource != null && resource.size() > 0;
}
@ -151,7 +148,7 @@ public class UdfFuncService extends BaseService{
*/
public Map<String, Object> queryUdfFuncDetail(int id) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -247,7 +244,7 @@ public class UdfFuncService extends BaseService{
* @return udf function list page
*/
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
@ -286,7 +283,7 @@ public class UdfFuncService extends BaseService{
* @return resource list
*/
public Map<String, Object> queryResourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type);
result.put(Constants.DATA_LIST, udfFuncList);

121
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java

@ -26,6 +26,7 @@ import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.*;
@ -39,6 +40,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.*;
import java.util.stream.Collectors;
@ -101,7 +103,7 @@ public class UsersService extends BaseService {
String queue,
int state) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone);
@ -229,7 +231,7 @@ public class UsersService extends BaseService {
* @return user list page
*/
public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -269,7 +271,7 @@ public class UsersService extends BaseService {
String phone,
String queue,
int state) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
User user = userMapper.selectById(userId);
@ -392,7 +394,7 @@ public class UsersService extends BaseService {
* @throws Exception exception when operate hdfs
*/
public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
@ -432,7 +434,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
@ -482,7 +484,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -579,7 +581,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
@ -626,7 +628,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
@ -706,7 +708,7 @@ public class UsersService extends BaseService {
* @return user list
*/
public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -727,7 +729,7 @@ public class UsersService extends BaseService {
* @return user list
*/
public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -771,7 +773,7 @@ public class UsersService extends BaseService {
*/
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -807,7 +809,7 @@ public class UsersService extends BaseService {
* @return authorized result code
*/
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -917,10 +919,11 @@ public class UsersService extends BaseService {
* @param repeatPassword repeat password
* @param email email
* @return register result code
* @throws Exception exception
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> registerUser(String userName, String userPassword, String repeatPassword, String email) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//check user params
String msg = this.checkUserParams(userName, userPassword, email, "");
@ -934,10 +937,100 @@ public class UsersService extends BaseService {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same");
return result;
}
User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal());
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, user);
return result;
}
/**
* activate user, only system admin have permission, change user state code 0 to 1
*
* @param loginUser login user
* @param userName user name
* @return create result code
*/
public Map<String, Object> activateUser(User loginUser, String userName) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!CheckUtils.checkUserName(userName)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User user = userMapper.queryByUserNameAccurately(userName);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userName);
return result;
}
createUser(userName, userPassword, email, 1, "", "", 0);
if (user.getState() != Flag.NO.ordinal()) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
user.setState(Flag.YES.ordinal());
Date now = new Date();
user.setUpdateTime(now);
userMapper.updateById(user);
User responseUser = userMapper.queryByUserNameAccurately(userName);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, responseUser);
return result;
}
/**
* activate user, only system admin have permission, change users state code 0 to 1
*
* @param loginUser login user
* @param userNames user name
* @return create result code
*/
public Map<String, Object> batchActivateUser(User loginUser, List<String> userNames) {
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
int totalSuccess = 0;
List<String> successUserNames = new ArrayList<>();
Map<String, Object> successRes = new HashMap<>();
int totalFailed = 0;
List<Map<String, String>> failedInfo = new ArrayList<>();
Map<String, Object> failedRes = new HashMap<>();
for (String userName : userNames) {
Map<String, Object> tmpResult = activateUser(loginUser, userName);
if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) {
totalFailed++;
Map<String, String> failedBody = new HashMap<>();
failedBody.put("userName", userName);
Status status = (Status) tmpResult.get(Constants.STATUS);
String errorMessage = MessageFormat.format(status.getMsg(), userName);
failedBody.put("msg", errorMessage);
failedInfo.add(failedBody);
} else {
totalSuccess++;
successUserNames.add(userName);
}
}
successRes.put("sum", totalSuccess);
successRes.put("userName", successUserNames);
failedRes.put("sum", totalFailed);
failedRes.put("info", failedInfo);
Map<String, Object> res = new HashMap<>();
res.put("success", successRes);
res.put("failed", failedRes);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, res);
return result;
}
}

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java

@ -63,7 +63,7 @@ public class WorkerGroupService extends BaseService {
// list to index
Integer toIndex = (pageNo - 1) * pageSize + pageSize;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}

186
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java

@ -0,0 +1,186 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.AccessTokenService;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* access token service impl
*/
@Service
public class AccessTokenServiceImpl extends BaseService implements AccessTokenService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
/**
* query access token list
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return token list for page number and page size
*/
public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<AccessToken> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getId();
if (loginUser.getUserType() == UserType.ADMIN_USER) {
userId = 0;
}
IPage<AccessToken> accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId);
pageInfo.setTotalCount((int) accessTokenList.getTotal());
pageInfo.setLists(accessTokenList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create token
*
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return create result code
*/
public Map<String, Object> createToken(int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0.");
}
AccessToken accessToken = new AccessToken();
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setCreateTime(new Date());
accessToken.setUpdateTime(new Date());
// insert
int insert = accessTokenMapper.insert(accessToken);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR);
}
return result;
}
/**
* generate token
*
* @param userId token for user
* @param expireTime token expire time
* @return token string
*/
public Map<String, Object> generateToken(int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5);
String token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis());
result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete access token
*
* @param loginUser login user
* @param id token id
* @return delete result code
*/
public Map<String, Object> delAccessTokenById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
if (loginUser.getId() != accessToken.getUserId() &&
loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
accessTokenMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update token by id
*
* @param id token id
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return update result code
*/
public Map<String, Object> updateToken(int id, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setUpdateTime(new Date());
accessTokenMapper.updateById(accessToken);
putMsg(result, Status.SUCCESS);
return result;
}
}

384
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java

@ -0,0 +1,384 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.CommandStateCount;
import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.DataAnalysisService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.TriFunction;
import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* data analysis service impl
*/
@Service
public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService {
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ProcessService processService;
private static final String COMMAND_STATE = "commandState";
private static final String ERROR_COMMAND_STATE = "errorCommandState";
/**
* statistical task instance status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return task state count data
*/
public Map<String, Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) {
return countStateByProject(
loginUser,
projectId,
startDate,
endDate,
(start, end, projectIds) -> this.taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds));
}
/**
* statistical process instance status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return process instance state count data
*/
public Map<String, Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) {
return this.countStateByProject(
loginUser,
projectId,
startDate,
endDate,
(start, end, projectIds) -> this.processInstanceMapper.countInstanceStateByUser(start, end, projectIds));
}
private Map<String, Object> countStateByProject(User loginUser, int projectId, String startDate, String endDate
, TriFunction<Date, Date, Integer[], List<ExecuteStatusCount>> instanceStateCounter) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
Date start;
Date end;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(), e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> processInstanceStateCounts =
instanceStateCounter.apply(start, end, projectIdArray);
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* statistics the process definition quantities of certain person
*
* @param loginUser login user
* @param projectId project id
* @return definition count data
*/
public Map<String, Object> countDefinitionByUser(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>();
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), projectIdArray, isAdmin(loginUser));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* statistical command status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return command state count data
*/
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
if (startDate != null && endDate != null) {
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(), e);
putErrorRequestParamsMsg(result);
return result;
}
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
// count command state
List<CommandCount> commandStateCounts =
commandMapper.countCommandState(
loginUser.getId(),
start,
end,
projectIdArray);
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
// enumMap
Map<CommandType, Map<String, Integer>> dataMap = new EnumMap<>(CommandType.class);
Map<String, Integer> commonCommand = new HashMap<>();
commonCommand.put(COMMAND_STATE, 0);
commonCommand.put(ERROR_COMMAND_STATE, 0);
// init data map
/**
* START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS,
START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD;
*/
dataMap.put(CommandType.START_PROCESS, commonCommand);
dataMap.put(CommandType.START_CURRENT_TASK_PROCESS, commonCommand);
dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS, commonCommand);
dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, commonCommand);
dataMap.put(CommandType.START_FAILURE_TASK_PROCESS, commonCommand);
dataMap.put(CommandType.COMPLEMENT_DATA, commonCommand);
dataMap.put(CommandType.SCHEDULER, commonCommand);
dataMap.put(CommandType.REPEAT_RUNNING, commonCommand);
dataMap.put(CommandType.PAUSE, commonCommand);
dataMap.put(CommandType.STOP, commonCommand);
dataMap.put(CommandType.RECOVER_WAITTING_THREAD, commonCommand);
// put command state
for (CommandCount executeStatusCount : commandStateCounts) {
Map<String, Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put(COMMAND_STATE, executeStatusCount.getCount());
dataMap.put(executeStatusCount.getCommandType(), commandStateCountsMap);
}
// put error command state
for (CommandCount errorExecutionStatus : errorCommandStateCounts) {
Map<String, Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put(ERROR_COMMAND_STATE, errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getCommandType(), errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
for (Map.Entry<CommandType, Map<String, Integer>> next : dataMap.entrySet()) {
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get(ERROR_COMMAND_STATE),
next.getValue().get(COMMAND_STATE), next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
private Integer[] getProjectIdsArrays(User loginUser, int projectId) {
List<Integer> projectIds = new ArrayList<>();
if (projectId != 0) {
projectIds.add(projectId);
} else if (loginUser.getUserType() == UserType.GENERAL_USER) {
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if (projectIds.isEmpty()) {
projectIds.add(0);
}
}
return projectIds.toArray(new Integer[0]);
}
/**
* count queue state
*
* @param loginUser login user
* @param projectId project id
* @return queue state count data
*/
public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
// TODO tasksQueueList and tasksKillList is never updated.
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String, Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER) {
dataMap.put("taskQueue", tasksQueueList.size());
dataMap.put("taskKill", tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i = 0;
for (String taskQueueStr : tasksQueueList) {
if (StringUtils.isNotEmpty(taskQueueStr)) {
String[] splits = taskQueueStr.split("_");
if (splits.length >= 4) {
tasksQueueIds[i++] = Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList) {
if (StringUtils.isNotEmpty(taskKillStr)) {
String[] splits = taskKillStr.split("-");
if (splits.length == 2) {
tasksKillIds[i++] = Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
if (tasksQueueIds.length != 0) {
taskQueueCount = taskInstanceMapper.countTask(
projectIds,
tasksQueueIds);
}
if (tasksKillIds.length != 0) {
taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds);
}
dataMap.put("taskQueue", taskQueueCount);
dataMap.put("taskKill", taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkProject(User loginUser, int projectId, Map<String, Object> result) {
if (projectId != 0) {
Project project = projectMapper.selectById(projectId);
return projectService.hasProjectAndPerm(loginUser, project, result);
}
return true;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
}
}

146
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java

@ -0,0 +1,146 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.lang.ArrayUtils;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* log service
*/
@Service
public class LoggerServiceImpl implements LoggerService {
private static final Logger logger = LoggerFactory.getLogger(LoggerServiceImpl.class);
private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s";
@Autowired
private ProcessService processService;
private LogClientService logClient;
@PostConstruct
public void init() {
if (Objects.isNull(this.logClient)) {
this.logClient = new LogClientService();
}
}
@PreDestroy
public void close() {
if (Objects.nonNull(this.logClient) && this.logClient.isRunning()) {
logClient.close();
}
}
/**
* view log
*
* @param taskInstId task instance id
* @param skipLineNum skip line number
* @param limit limit
* @return log string data
*/
@SuppressWarnings("unchecked")
public Result<String> queryLog(int taskInstId, int skipLineNum, int limit) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
return Result.error(Status.TASK_INSTANCE_NOT_FOUND);
}
String host = getHost(taskInstance.getHost());
Result<String> result = new Result<>(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(),
Constants.RPC_PORT);
StringBuilder log = new StringBuilder();
if (skipLineNum == 0) {
String head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR);
log.append(head);
}
log.append(logClient
.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit));
result.setData(log.toString());
return result;
}
/**
* get log size
*
* @param taskInstId task instance id
* @return log byte array
*/
public byte[] getLogBytes(int taskInstId) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
throw new ServiceException("task instance is null or host is null");
}
String host = getHost(taskInstance.getHost());
byte[] head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8);
return ArrayUtils.addAll(head,
logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()));
}
/**
* get host
*
* @param address address
* @return old version return true ,otherwise return false
*/
private String getHost(String address) {
if (Boolean.TRUE.equals(Host.isOldVersion(address))) {
return address;
}
return Host.of(address).getIp();
}
}

1731
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

File diff suppressed because it is too large Load Diff

181
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java

@ -0,0 +1,181 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.ImmutableMap;
@Service
public class ProcessDefinitionVersionServiceImpl extends BaseService implements
ProcessDefinitionVersionService {
@Autowired
private ProcessDefinitionVersionMapper processDefinitionVersionMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProjectMapper projectMapper;
/**
* add the newest version of one process definition
*
* @param processDefinition the process definition that need to record version
* @return the newest version number of this process definition
*/
public long addProcessDefinitionVersion(ProcessDefinition processDefinition) {
long version = this.queryMaxVersionByProcessDefinitionId(processDefinition.getId()) + 1;
ProcessDefinitionVersion processDefinitionVersion = ProcessDefinitionVersion
.newBuilder()
.processDefinitionId(processDefinition.getId())
.version(version)
.processDefinitionJson(processDefinition.getProcessDefinitionJson())
.description(processDefinition.getDescription())
.locations(processDefinition.getLocations())
.connects(processDefinition.getConnects())
.timeout(processDefinition.getTimeout())
.globalParams(processDefinition.getGlobalParams())
.createTime(processDefinition.getUpdateTime())
.receivers(processDefinition.getReceivers())
.receiversCc(processDefinition.getReceiversCc())
.resourceIds(processDefinition.getResourceIds())
.build();
processDefinitionVersionMapper.insert(processDefinitionVersion);
return version;
}
/**
* query the max version number by the process definition id
*
* @param processDefinitionId process definition id
* @return the max version number of this id
*/
private long queryMaxVersionByProcessDefinitionId(int processDefinitionId) {
Long maxVersion = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId(processDefinitionId);
if (Objects.isNull(maxVersion)) {
return 0L;
} else {
return maxVersion;
}
}
/**
* query the pagination versions info by one certain process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId process definition id
* @return the pagination process definition versions info of the certain process definition
*/
public Map<String, Object> queryProcessDefinitionVersions(User loginUser, String projectName, int pageNo, int pageSize, int processDefinitionId) {
Map<String, Object> result = new HashMap<>();
// check the if pageNo or pageSize less than 1
if (pageNo <= 0 || pageSize <= 0) {
putMsg(result
, Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
, pageNo
, pageSize);
return result;
}
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
PageInfo<ProcessDefinitionVersion> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<ProcessDefinitionVersion> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinitionVersion> processDefinitionVersionsPaging = processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, processDefinitionId);
List<ProcessDefinitionVersion> processDefinitionVersions = processDefinitionVersionsPaging.getRecords();
pageInfo.setLists(processDefinitionVersions);
pageInfo.setTotalCount((int) processDefinitionVersionsPaging.getTotal());
return ImmutableMap.of(
Constants.MSG, Status.SUCCESS.getMsg()
, Constants.STATUS, Status.SUCCESS
, Constants.DATA_LIST, pageInfo);
}
/**
* query one certain process definition version by version number and process definition id
*
* @param processDefinitionId process definition id
* @param version version number
* @return the process definition version info
*/
public ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId, long version) {
return processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
}
/**
* delete one certain process definition by version number and process definition id
*
* @param loginUser login user info to check auth
* @param projectName process definition project name
* @param processDefinitionId process definition id
* @param version version number
* @return delele result code
*/
public Map<String, Object> deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName, int processDefinitionId, long version) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version);
putMsg(result, Status.SUCCESS);
return result;
}
}

443
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java

@ -0,0 +1,443 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* project service implement
**/
@Service
public class ProjectServiceImpl extends BaseService implements ProjectService {
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create project
*
* @param loginUser login user
* @param name project name
* @param desc description
* @return returns an error if it exists
*/
public Map<String, Object> createProject(User loginUser, String name, String desc) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByName(name);
if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
Date now = new Date();
project = Project
.newBuilder()
.name(name)
.description(desc)
.userId(loginUser.getId())
.userName(loginUser.getUserName())
.createTime(now)
.updateTime(now)
.build();
if (projectMapper.insert(project) > 0) {
Project insertedProject = projectMapper.queryByName(name);
result.put(Constants.DATA_LIST, insertedProject);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
}
/**
* query project details by id
*
* @param projectId project id
* @return project detail information
*/
public Map<String, Object> queryById(Integer projectId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.selectById(projectId);
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectId);
}
return result;
}
/**
* check project and authorization
*
* @param loginUser login user
* @param project project
* @param projectName project name
* @return true if the login user have permission to see the project
*/
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) {
Map<String, Object> result = new HashMap<>();
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
} else if (!checkReadPermission(loginUser, project)) {
// check read permission
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
checkResult = true;
}
return checkResult;
}
/**
* admin can view all projects
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return project list which the login user have permission to see
*/
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Map<String, Object> result = new HashMap<>();
PageInfo<Project> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<Project> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if (userId != 0) {
for (Project project : projectList) {
project.setPerm(Constants.DEFAULT_ADMIN_PERMISSION);
}
}
pageInfo.setTotalCount((int) projectIPage.getTotal());
pageInfo.setLists(projectList);
result.put(Constants.COUNT, (int) projectIPage.getTotal());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete project by id
*
* @param loginUser login user
* @param projectId project id
* @return delete result code
*/
public Map<String, Object> deleteProject(User loginUser, Integer projectId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) {
return checkResult;
}
if (!hasPerm(loginUser, project.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId);
if (!processDefinitionList.isEmpty()) {
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
int delete = projectMapper.deleteById(projectId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
}
/**
* get check result
*
* @param loginUser login user
* @param project project
* @return check result
*/
private Map<String, Object> getCheckResult(User loginUser, Project project) {
String projectName = project == null ? null : project.getName();
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
/**
* updateProcessInstance project
*
* @param loginUser login user
* @param projectId project id
* @param projectName project name
* @param desc description
* @return update result code
*/
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.selectById(projectId);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
Project tempProject = projectMapper.queryByName(projectName);
if (tempProject != null && tempProject.getId() != projectId) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName);
return result;
}
project.setName(projectName);
project.setDescription(desc);
project.setUpdateTime(new Date());
int update = projectMapper.updateById(project);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
}
/**
* query unauthorized project
*
* @param loginUser login user
* @param userId user id
* @return the projects which user have not permission to see
*/
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
/**
* query all project list except specified userId
*/
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId);
List<Project> resultList = new ArrayList<>();
Set<Project> projectSet = null;
if (projectList != null && !projectList.isEmpty()) {
projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get unauthorized project
*
* @param projectSet project set
* @param authedProjectList authed project list
* @return project list that authorization
*/
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) {
List<Project> resultList;
Set<Project> authedProjectSet = null;
if (authedProjectList != null && !authedProjectList.isEmpty()) {
authedProjectSet = new HashSet<>(authedProjectList);
projectSet.removeAll(authedProjectSet);
}
resultList = new ArrayList<>(projectSet);
return resultList;
}
/**
* query authorized project
*
* @param loginUser login user
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query authorized project
*
* @param loginUser login user
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryProjectCreatedByUser(User loginUser) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryProjectCreatedByUser(loginUser.getId());
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check whether have read permission
*
* @param user user
* @param project project
* @return true if the user have permission to see the project, otherwise return false
*/
private boolean checkReadPermission(User user, Project project) {
int permissionId = queryPermission(user, project);
return (permissionId & Constants.READ_PERMISSION) != 0;
}
/**
* query permission id
*
* @param user user
* @param project project
* @return permission
*/
private int queryPermission(User user, Project project) {
if (user.getUserType() == UserType.ADMIN_USER) {
return Constants.READ_PERMISSION;
}
if (project.getUserId() == user.getId()) {
return Constants.ALL_PERMISSIONS;
}
ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) {
return 0;
}
return projectUser.getPerm();
}
/**
* query all project list that have one or more process definitions.
*
* @return project list
*/
public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.selectList(null);
List<ProcessDefinition> processDefinitions = processDefinitionMapper.selectList(null);
if (projects != null) {
Set<Integer> set = new HashSet<>();
for (ProcessDefinition processDefinition : processDefinitions) {
set.add(processDefinition.getProjectId());
}
List<Project> tempDeletelist = new ArrayList<>();
for (Project project : projects) {
if (!set.contains(project.getId())) {
tempDeletelist.add(project);
}
}
projects.removeAll(tempDeletelist);
}
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
}

158
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java

@ -0,0 +1,158 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.controller.BaseController;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.dao.entity.Session;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.SessionMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* session service implement
*/
@Service
public class SessionServiceImpl extends BaseService implements SessionService {
private static final Logger logger = LoggerFactory.getLogger(SessionService.class);
@Autowired
private SessionMapper sessionMapper;
/**
* get user session from request
*
* @param request request
* @return session
*/
public Session getSession(HttpServletRequest request) {
String sessionId = request.getHeader(Constants.SESSION_ID);
if (StringUtils.isBlank(sessionId)) {
Cookie cookie = getCookie(request, Constants.SESSION_ID);
if (cookie != null) {
sessionId = cookie.getValue();
}
}
if (StringUtils.isBlank(sessionId)) {
return null;
}
String ip = BaseController.getClientIpAddress(request);
logger.debug("get session: {}, ip: {}", sessionId, ip);
return sessionMapper.selectById(sessionId);
}
/**
* create session
*
* @param user user
* @param ip ip
* @return session string
*/
@Transactional(rollbackFor = RuntimeException.class)
public String createSession(User user, String ip) {
Session session = null;
// logined
List<Session> sessionList = sessionMapper.queryByUserId(user.getId());
Date now = new Date();
/**
* if you have logged in and are still valid, return directly
*/
if (CollectionUtils.isNotEmpty(sessionList)) {
// is session list greater 1 , delete other ,get one
if (sessionList.size() > 1) {
for (int i = 1; i < sessionList.size(); i++) {
sessionMapper.deleteById(sessionList.get(i).getId());
}
}
session = sessionList.get(0);
if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) {
/**
* updateProcessInstance the latest login time
*/
session.setLastLoginTime(now);
sessionMapper.updateById(session);
return session.getId();
} else {
/**
* session expired, then delete this session first
*/
sessionMapper.deleteById(session.getId());
}
}
// assign new session
session = new Session();
session.setId(UUID.randomUUID().toString());
session.setIp(ip);
session.setUserId(user.getId());
session.setLastLoginTime(now);
sessionMapper.insert(session);
return session.getId();
}
/**
* sign out
* remove ip restrictions
*
* @param ip no use
* @param loginUser login user
*/
public void signOut(String ip, User loginUser) {
try {
/**
* query session by user id and ip
*/
Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(), ip);
//delete session
sessionMapper.deleteById(session.getId());
} catch (Exception e) {
logger.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip);
}
}
}

331
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java

@ -0,0 +1,331 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.TenantService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* tenant service
*/
@Service
public class TenantServiceImpl extends BaseService implements TenantService {
private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class);
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private UserMapper userMapper;
/**
* create tenant
*
* @param loginUser login user
* @param tenantCode tenant code
* @param tenantName tenant name
* @param queueId queue id
* @param desc description
* @return create result code
* @throws Exception exception
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> createTenant(User loginUser,
String tenantCode,
String tenantName,
int queueId,
String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
if (checkAdmin(loginUser, result)) {
return result;
}
if (checkTenantExists(tenantCode)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode);
return result;
}
Tenant tenant = new Tenant();
Date now = new Date();
if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")) {
putMsg(result, Status.VERIFY_TENANT_CODE_ERROR);
return result;
}
tenant.setTenantCode(tenantCode);
tenant.setTenantName(tenantName);
tenant.setQueueId(queueId);
tenant.setDescription(desc);
tenant.setCreateTime(now);
tenant.setUpdateTime(now);
// save
tenantMapper.insert(tenant);
// if hdfs startup
if (PropertyUtils.getResUploadStartupState()) {
createTenantDirIfNotExists(tenantCode);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query tenant list paging
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return tenant list page
*/
public Map<String, Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Page<Tenant> page = new Page<>(pageNo, pageSize);
IPage<Tenant> tenantIPage = tenantMapper.queryTenantPaging(page, searchVal);
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) tenantIPage.getTotal());
pageInfo.setLists(tenantIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance tenant
*
* @param loginUser login user
* @param id tennat id
* @param tenantCode tennat code
* @param tenantName tennat name
* @param queueId queue id
* @param desc description
* @return update result code
* @throws Exception exception
*/
public Map<String, Object> updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId,
String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
if (checkAdmin(loginUser, result)) {
return result;
}
Tenant tenant = tenantMapper.queryById(id);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
// updateProcessInstance tenant
/**
* if the tenant code is modified, the original resource needs to be copied to the new tenant.
*/
if (!tenant.getTenantCode().equals(tenantCode)) {
if (checkTenantExists(tenantCode)) {
// if hdfs startup
if (PropertyUtils.getResUploadStartupState()) {
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources";
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
//init hdfs resource
HadoopUtils.getInstance().mkdir(resourcePath);
HadoopUtils.getInstance().mkdir(udfsPath);
}
} else {
putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS);
return result;
}
}
Date now = new Date();
if (StringUtils.isNotEmpty(tenantCode)) {
tenant.setTenantCode(tenantCode);
}
if (StringUtils.isNotEmpty(tenantName)) {
tenant.setTenantName(tenantName);
}
if (queueId != 0) {
tenant.setQueueId(queueId);
}
tenant.setDescription(desc);
tenant.setUpdateTime(now);
tenantMapper.updateById(tenant);
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
/**
* delete tenant
*
* @param loginUser login user
* @param id tenant id
* @return delete result code
* @throws Exception exception
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Tenant tenant = tenantMapper.queryById(id);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
List<ProcessInstance> processInstances = getProcessInstancesByTenant(tenant);
if (CollectionUtils.isNotEmpty(processInstances)) {
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size());
return result;
}
List<ProcessDefinition> processDefinitions =
processDefinitionMapper.queryDefinitionListByTenant(tenant.getId());
if (CollectionUtils.isNotEmpty(processDefinitions)) {
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size());
return result;
}
List<User> userList = userMapper.queryUserListByTenant(tenant.getId());
if (CollectionUtils.isNotEmpty(userList)) {
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size());
return result;
}
// if resource upload startup
if (PropertyUtils.getResUploadStartupState()) {
String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode();
if (HadoopUtils.getInstance().exists(tenantPath)) {
HadoopUtils.getInstance().delete(tenantPath, true);
}
}
tenantMapper.deleteById(id);
processInstanceMapper.updateProcessInstanceByTenantId(id, -1);
putMsg(result, Status.SUCCESS);
return result;
}
private List<ProcessInstance> getProcessInstancesByTenant(Tenant tenant) {
return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES);
}
/**
* query tenant list
*
* @param loginUser login user
* @return tenant list
*/
public Map<String, Object> queryTenantList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
List<Tenant> resourceList = tenantMapper.selectList(null);
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify tenant code
*
* @param tenantCode tenant code
* @return true if tenant code can user, otherwise return false
*/
public Result verifyTenantCode(String tenantCode) {
Result result = new Result();
if (checkTenantExists(tenantCode)) {
putMsg(result, Status.TENANT_NAME_EXIST, tenantCode);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* check tenant exists
*
* @param tenantCode tenant code
* @return ture if the tenant code exists, otherwise return false
*/
private boolean checkTenantExists(String tenantCode) {
List<Tenant> tenants = tenantMapper.queryByTenantCode(tenantCode);
return CollectionUtils.isNotEmpty(tenants);
}
}

10
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@ -173,7 +173,6 @@ PROCESS_DEFINITION_ID=process definition id
PROCESS_DEFINITION_IDS=process definition ids
RELEASE_PROCESS_DEFINITION_NOTES=release process definition
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id
COPY_PROCESS_DEFINITION_NOTES=copy process definition
QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging
QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list
@ -254,3 +253,12 @@ DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids
QUERY_USER_CREATED_PROJECT_NOTES= query user created project
COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
TARGET_PROJECT_ID= target project id
IS_COPY = is copy
DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version
VERSION=version

10
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@ -173,7 +173,6 @@ PROCESS_DEFINITION_ID=process definition id
PROCESS_DEFINITION_IDS=process definition ids
RELEASE_PROCESS_DEFINITION_NOTES=release process definition
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id
COPY_PROCESS_DEFINITION_NOTES=copy process definition
QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging
QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list
@ -254,3 +253,12 @@ DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids
QUERY_USER_CREATED_PROJECT_NOTES= query user created project
COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
TARGET_PROJECT_ID= target project id
IS_COPY = is copy
DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version
VERSION=version

11
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@ -171,7 +171,6 @@ UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义
PROCESS_DEFINITION_ID=流程定义ID
RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID
COPY_PROCESS_DEFINITION_NOTES=复制流程定义
QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表
QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义
@ -252,4 +251,12 @@ DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义
QUERY_USER_CREATED_PROJECT_NOTES= 查询用户创建的项目
COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义
MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义
TARGET_PROJECT_ID= 目标项目ID
IS_COPY = 是否复制
DELETE_PROCESS_DEFINITION_VERSION_NOTES=删除流程历史版本
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=查询流程历史版本信息
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本
VERSION=版本号

258
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java

@ -14,20 +14,33 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.junit.*;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
@ -36,18 +49,12 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mock.web.MockHttpServletResponse;
import javax.servlet.http.HttpServletResponse;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* process definition controller test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class ProcessDefinitionControllerTest{
public class ProcessDefinitionControllerTest {
private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class);
@ -55,12 +62,15 @@ public class ProcessDefinitionControllerTest{
private ProcessDefinitionController processDefinitionController;
@Mock
private ProcessDefinitionService processDefinitionService;
private ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private ProcessDefinitionVersionService processDefinitionVersionService;
protected User user;
@Before
public void before(){
public void before() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
@ -71,23 +81,27 @@ public class ProcessDefinitionControllerTest{
@Test
public void testCreateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\""
+ ":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\"
+ "necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\""
+ ",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
result.put("processDefinitionId", 1);
Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
@ -102,56 +116,64 @@ public class ProcessDefinitionControllerTest{
@Test
public void testVerifyProcessDefinitionName() throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROCESS_INSTANCE_EXIST);
String projectName = "test";
String name = "dag_test";
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user,projectName,name)).thenReturn(result);
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user, projectName, name)).thenReturn(result);
Result response = processDefinitionController.verifyProcessDefinitionName(user,projectName,name);
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),response.getCode().intValue());
Result response = processDefinitionController.verifyProcessDefinitionName(user, projectName, name);
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(), response.getCode().intValue());
}
@Test
public void updateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\""
+ ",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"}"
+ ",\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
result.put("processDefinitionId", 1);
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id,name, json,
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id, name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name,id, json,
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name, id, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testReleaseProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result);
Result response = processDefinitionController.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal());
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal())).thenReturn(result);
Result response = processDefinitionController.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionById() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":"
+ "\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
@ -168,31 +190,47 @@ public class ProcessDefinitionControllerTest{
processDefinition.setName(name);
processDefinition.setProcessDefinitionJson(json);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName,id);
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName, id)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testCopyProcessDefinition() throws Exception {
public void testBatchCopyProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
int targetProjectId = 2;
String id = "1";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.copyProcessDefinition(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName,id);
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName, id, targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testBatchMoveProcessDefinition() throws Exception {
String projectName = "test";
int targetProjectId = 2;
String id = "1";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
Result response = processDefinitionController.moveProcessDefinition(user, projectName, id, targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionList() throws Exception {
@ -200,22 +238,25 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
List<ProcessDefinition> resourceList = getDefinitionList();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, resourceList);
Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionList(user, projectName);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
public List<ProcessDefinition> getDefinitionList(){
public List<ProcessDefinition> getDefinitionList() {
List<ProcessDefinition> resourceList = new ArrayList<>();
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval"
+ "\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
@ -255,13 +296,13 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName,id);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName, id)).thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@ -269,13 +310,13 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionId(user,projectName,id);
Result response = processDefinitionController.getNodeListByDefinitionId(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
@ -283,69 +324,130 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
String idList = "1,2,3";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionIdList(user,projectName,idList);
Result response = processDefinitionController.getNodeListByDefinitionIdList(user, projectName, idList);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionAllByProjectId() throws Exception{
public void testQueryProcessDefinitionAllByProjectId() throws Exception {
int projectId = 1;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user,projectId);
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user, projectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testViewTree() throws Exception{
public void testViewTree() throws Exception {
String projectName = "test";
int processId = 1;
int limit = 2;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.viewTree(processId,limit)).thenReturn(result);
Result response = processDefinitionController.viewTree(user,projectName,processId,limit);
Mockito.when(processDefinitionService.viewTree(processId, limit)).thenReturn(result);
Result response = processDefinitionController.viewTree(user, projectName, processId, limit);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionListPaging() throws Exception{
public void testQueryProcessDefinitionListPaging() throws Exception {
String projectName = "test";
int pageNo = 1;
int pageSize = 10;
String searchVal = "";
int userId = 1;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
result.put(Constants.DATA_LIST,new PageInfo<Resource>(1,10));
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, new PageInfo<Resource>(1, 10));
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user,projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionListPaging(user,projectName,pageNo,searchVal,userId,pageSize);
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user, projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionListPaging(user, projectName, pageNo, searchVal, userId, pageSize);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testBatchExportProcessDefinitionByIds() throws Exception{
public void testBatchExportProcessDefinitionByIds() throws Exception {
String processDefinitionIds = "1,2";
String projectName = "test";
HttpServletResponse response = new MockHttpServletResponse();
ProcessDefinitionService service = new ProcessDefinitionService();
ProcessDefinitionService spy = Mockito.spy(service);
Mockito.doNothing().when(spy).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
Mockito.doNothing().when(this.processDefinitionService).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
}
@Test
public void testQueryProcessDefinitionVersions() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
resultMap.put(Constants.DATA_LIST, new PageInfo<ProcessDefinitionVersion>(1, 10));
Mockito.when(processDefinitionVersionService.queryProcessDefinitionVersions(
user
, projectName
, 1
, 10
, 1))
.thenReturn(resultMap);
Result result = processDefinitionController.queryProcessDefinitionVersions(
user
, projectName
, 1
, 10
, 1);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
@Test
public void testSwitchProcessDefinitionVersion() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
Mockito.when(processDefinitionService.switchProcessDefinitionVersion(
user
, projectName
, 1
, 10))
.thenReturn(resultMap);
Result result = processDefinitionController.switchProcessDefinitionVersion(
user
, projectName
, 1
, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
@Test
public void testDeleteProcessDefinitionVersion() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
Mockito.when(processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
user
, projectName
, 1
, 10))
.thenReturn(resultMap);
Result result = processDefinitionController.deleteProcessDefinitionVersion(
user
, projectName
, 1
, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
}

83
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java

@ -16,29 +16,27 @@
*/
package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* process instance controller test
*/
public class ProcessInstanceControllerTest extends AbstractControllerTest {
private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class);
@Test
public void testQueryProcessInstanceList() throws Exception {
@ -52,31 +50,30 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest {
paramsMap.add("pageNo", "2");
paramsMap.add("pageSize", "2");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/list-paging","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/list-paging", "cxc_1113")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryTaskListByProcessId() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1203"))
.param("processInstanceId", "1203"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assert result != null;
Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(), result.getCode().intValue());
}
@Test
@ -91,110 +88,108 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest {
paramsMap.add("syncDefine", "false");
paramsMap.add("locations", locations);
paramsMap.add("connects", "[]");
// paramsMap.add("flag", "2");
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/instance/update","cxc_1113")
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/instance/update", "cxc_1113")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryProcessInstanceById() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-by-id","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-by-id", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1203"))
.param("processInstanceId", "1203"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testQuerySubProcessInstanceByTaskId() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-sub-process","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-sub-process", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("taskId","1203"))
.param("taskId", "1203"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS.getCode(), result.getCode().intValue());
}
@Test
public void testQueryParentInstanceBySubId() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-parent-process","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-parent-process", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("subId","1204"))
.param("subId", "1204"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE.getCode(), result.getCode().intValue());
}
@Test
public void testViewVariables() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/view-variables","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/view-variables", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1204"))
.param("processInstanceId", "1204"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testDeleteProcessInstanceById() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/delete","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/delete", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1204"))
.param("processInstanceId", "1204"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testBatchDeleteProcessInstanceByIds() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/batch-delete","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/batch-delete", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceIds","1205,1206"))
.param("processInstanceIds", "1205,1206"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), result.getCode().intValue());
}
}

23
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java

@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.api.controller;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ResourceType;
@ -53,8 +52,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -77,8 +74,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -280,8 +275,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -302,8 +295,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -323,8 +314,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -343,8 +332,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -364,8 +351,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -385,8 +370,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -405,8 +388,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -426,8 +407,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
@ -445,8 +424,6 @@ public class ResourcesControllerTest extends AbstractControllerTest{
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());

40
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
@ -33,6 +33,9 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.ArrayList;
import java.util.List;
/**
* users controller test
*/
@ -285,6 +288,39 @@ public class UsersControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testActivateUser() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("userName","user_test");
MvcResult mvcResult = mockMvc.perform(post("/users/activate")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
}
@Test
public void testBatchActivateUser() throws Exception {
List<String> userNames = new ArrayList<>();
userNames.add("user_sky_cxl");
userNames.add("19990323");
userNames.add("test_sky_post_11");
String jsonUserNames = JSONUtils.toJsonString(userNames);
MvcResult mvcResult = mockMvc.perform(post("/users/batch/activate")
.header(SESSION_ID, sessionId)
.contentType(MediaType.APPLICATION_JSON)
.content(jsonUserNames))
.andExpect(status().isOk())
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
}
}

3
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilterTest.java

@ -49,10 +49,9 @@ public class ResourceFilterTest {
allList.add(resource6);
allList.add(resource7);
ResourceFilter resourceFilter = new ResourceFilter(".jar",allList);
List<Resource> resourceList = resourceFilter.filter();
Assert.assertNotNull(resourceList);
resourceList.stream().forEach(t-> logger.info(t.toString()));
resourceList.forEach(t -> logger.info(t.toString()));
}
}

105
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java

@ -16,10 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import java.util.Calendar;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.AccessTokenServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -27,9 +29,14 @@ import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.junit.After;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
@ -38,131 +45,109 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class AccessTokenServiceTest {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceTest.class);
@InjectMocks
private AccessTokenService accessTokenService ;
private AccessTokenServiceImpl accessTokenService;
@Mock
private AccessTokenMapper accessTokenMapper;
@Before
public void setUp() {
}
@After
public void after(){
}
@Test
public void testQueryAccessTokenList(){
@SuppressWarnings("unchecked")
public void testQueryAccessTokenList() {
IPage<AccessToken> tokenPage = new Page<>();
tokenPage.setRecords(getList());
tokenPage.setTotal(1L);
when(accessTokenMapper.selectAccessTokenPage(any(Page.class),eq("zhangsan"),eq(0))).thenReturn(tokenPage);
when(accessTokenMapper.selectAccessTokenPage(any(Page.class), eq("zhangsan"), eq(0))).thenReturn(tokenPage);
User user =new User();
Map<String, Object> result = accessTokenService.queryAccessTokenList(user,"zhangsan",1,10);
User user = new User();
Map<String, Object> result = accessTokenService.queryAccessTokenList(user, "zhangsan", 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
PageInfo<AccessToken> pageInfo = (PageInfo<AccessToken>) result.get(Constants.DATA_LIST);
Assert.assertTrue(pageInfo.getTotalCount()>0);
Assert.assertTrue(pageInfo.getTotalCount() > 0);
}
@Test
public void testCreateToken(){
public void testCreateToken() {
when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2);
Map<String, Object> result = accessTokenService.createToken(1,getDate(),"AccessTokenServiceTest");
Map<String, Object> result = accessTokenService.createToken(1, getDate(), "AccessTokenServiceTest");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGenerateToken(){
public void testGenerateToken() {
Map<String, Object> result = accessTokenService.generateToken(Integer.MAX_VALUE,getDate());
Map<String, Object> result = accessTokenService.generateToken(Integer.MAX_VALUE, getDate());
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
String token = (String) result.get(Constants.DATA_LIST);
Assert.assertNotNull(token);
}
@Test
public void testDelAccessTokenById(){
public void testDelAccessTokenById() {
when(accessTokenMapper.selectById(1)).thenReturn(getEntity());
User userLogin = new User();
// not exist
Map<String, Object> result = accessTokenService.delAccessTokenById(userLogin,0);
Map<String, Object> result = accessTokenService.delAccessTokenById(userLogin, 0);
logger.info(result.toString());
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST,result.get(Constants.STATUS));
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS));
// no operate
result = accessTokenService.delAccessTokenById(userLogin,1);
result = accessTokenService.delAccessTokenById(userLogin, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
userLogin.setId(1);
userLogin.setUserType(UserType.ADMIN_USER);
result = accessTokenService.delAccessTokenById(userLogin,1);
result = accessTokenService.delAccessTokenById(userLogin, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testUpdateToken(){
public void testUpdateToken() {
when(accessTokenMapper.selectById(1)).thenReturn(getEntity());
Map<String, Object> result = accessTokenService.updateToken(1,Integer.MAX_VALUE,getDate(),"token");
Map<String, Object> result = accessTokenService.updateToken(1, Integer.MAX_VALUE, getDate(), "token");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
// not exist
result = accessTokenService.updateToken(2,Integer.MAX_VALUE,getDate(),"token");
result = accessTokenService.updateToken(2, Integer.MAX_VALUE, getDate(), "token");
logger.info(result.toString());
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST,result.get(Constants.STATUS));
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS));
}
/**
* create entity
* @return
*/
private AccessToken getEntity(){
private AccessToken getEntity() {
AccessToken accessToken = new AccessToken();
accessToken.setId(1);
accessToken.setUserId(1);
accessToken.setToken("AccessTokenServiceTest");
Date date = DateUtils.add(new Date(),Calendar.DAY_OF_MONTH, 30);
Date date = DateUtils.add(new Date(), Calendar.DAY_OF_MONTH, 30);
accessToken.setExpireTime(date);
return accessToken;
}
/**
* entity list
* @return
*/
private List<AccessToken> getList(){
private List<AccessToken> getList() {
List<AccessToken> list = new ArrayList<>();
list.add(getEntity());
@ -170,12 +155,10 @@ public class AccessTokenServiceTest {
}
/**
* get dateStr
* @return
*/
private String getDate(){
private String getDate() {
Date date = DateUtils.add(new Date(), Calendar.DAY_OF_MONTH, 30);
return DateUtils.dateToString(date);
}

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java

@ -76,7 +76,7 @@ public class AlertGroupServiceTest {
@Test
public void testQueryAlertgroup(){
public void testQueryAlertGroup(){
Mockito.when(alertGroupMapper.queryAllGroupList()).thenReturn(getList());
HashMap<String, Object> result= alertGroupService.queryAlertgroup();

50
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java

@ -1,50 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class BaseDAGServiceTest {
@Test
public void testProcessInstance2DAG(){
ProcessInstance processInstance = new ProcessInstance();
processInstance.setProcessInstanceJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-61567\"," +
"\"name\":\"开始\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo '1'\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[]},{\"type\":\"SHELL\",\"id\":\"tasks-6-3ug5ej\",\"name\":\"结束\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo '1'\"},\"description\":\"\"," +
"\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[\"开始\"]}],\"tenantId\":-1,\"timeout\":0}");
DAG<String, TaskNode, TaskNodeRelation> relationDAG = BaseDAGService.processInstance2DAG(processInstance);
Assert.assertTrue(relationDAG.containsNode("开始"));
}
}

70
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java

@ -17,17 +17,29 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -36,25 +48,19 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class)
public class DataAnalysisServiceTest {
@InjectMocks
private DataAnalysisService dataAnalysisService;
private DataAnalysisServiceImpl dataAnalysisService;
@Mock
ProjectMapper projectMapper;
@Mock
ProjectService projectService;
ProjectServiceImpl projectService;
@Mock
ProcessInstanceMapper processInstanceMapper;
@ -71,13 +77,9 @@ public class DataAnalysisServiceTest {
@Mock
TaskInstanceMapper taskInstanceMapper;
@Mock
ProcessService processService;
private Project project;
private Map<String, Object> resultMap;
private User user;
@ -86,26 +88,25 @@ public class DataAnalysisServiceTest {
public void setUp() {
user = new User();
project = new Project();
Project project = new Project();
project.setId(1);
resultMap = new HashMap<>();
Mockito.when(projectMapper.selectById(1)).thenReturn(project);
Mockito.when(projectService.hasProjectAndPerm(user,project,resultMap)).thenReturn(true);
Mockito.when(projectService.hasProjectAndPerm(user, project, resultMap)).thenReturn(true);
}
@After
public void after(){
public void after() {
user = null;
projectMapper = null;
resultMap = null;
}
@Test
public void testCountTaskStateByProject(){
public void testCountTaskStateByProject() {
String startDate = "2020-02-11 16:02:18";
String endDate = "2020-02-11 16:03:18";
@ -120,42 +121,40 @@ public class DataAnalysisServiceTest {
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts());
result = dataAnalysisService.countTaskStateByProject(user, 1, startDate, endDate);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCountProcessInstanceStateByProject(){
public void testCountProcessInstanceStateByProject() {
String startDate = "2020-02-11 16:02:18";
String endDate = "2020-02-11 16:03:18";
//checkProject false
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(user,2,startDate,endDate);
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(user, 2, startDate, endDate);
Assert.assertTrue(result.isEmpty());
//SUCCESS
Mockito.when(processInstanceMapper.countInstanceStateByUser(DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts());
result = dataAnalysisService.countProcessInstanceStateByProject(user,1,startDate,endDate);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
result = dataAnalysisService.countProcessInstanceStateByProject(user, 1, startDate, endDate);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCountDefinitionByUser(){
public void testCountDefinitionByUser() {
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(user,1);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(user, 1);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCountCommandState(){
public void testCountCommandState() {
String startDate = "2020-02-11 16:02:18";
String endDate = "2020-02-11 16:03:18";
//checkProject false
Map<String, Object> result = dataAnalysisService.countCommandState(user,2,startDate,endDate);
Map<String, Object> result = dataAnalysisService.countCommandState(user, 2, startDate, endDate);
Assert.assertTrue(result.isEmpty());
List<CommandCount> commandCounts = new ArrayList<>(1);
CommandCount commandCount = new CommandCount();
@ -164,19 +163,18 @@ public class DataAnalysisServiceTest {
Mockito.when(commandMapper.countCommandState(0, DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(commandCounts);
Mockito.when(errorCommandMapper.countCommandState( DateUtils.getScheduleDate(startDate),
Mockito.when(errorCommandMapper.countCommandState(DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(commandCounts);
result = dataAnalysisService.countCommandState(user,1,startDate,endDate);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
result = dataAnalysisService.countCommandState(user, 1, startDate, endDate);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
/**
* get list
* @return
*/
private List<ExecuteStatusCount> getTaskInstanceStateCounts(){
private List<ExecuteStatusCount> getTaskInstanceStateCounts() {
List<ExecuteStatusCount> taskInstanceStateCounts = new ArrayList<>(1);
ExecuteStatusCount executeStatusCount = new ExecuteStatusCount();

218
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java

@ -22,10 +22,14 @@ import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.MySQLDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -35,8 +39,6 @@ import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
@ -45,16 +47,172 @@ import java.util.Map;
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"sun.security.*", "javax.net.*"})
public class DataSourceServiceTest {
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceTest.class);
@InjectMocks
private DataSourceService dataSourceService;
@Mock
private DataSourceMapper dataSourceMapper;
@Mock
private DataSourceUserMapper datasourceUserMapper;
public void createDataSourceTest() {
User loginUser = getAdminUser();
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null);
// data source exits
List<DataSource> dataSourceList = new ArrayList<>();
DataSource dataSource = new DataSource();
dataSource.setName(dataSourceName);
dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList);
Map<String, Object> dataSourceExitsResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_EXIST, dataSourceExitsResult.get(Constants.STATUS));
// data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(false);
Map<String, Object> connectFailedResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED, connectFailedResult.get(Constants.STATUS));
// data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true);
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(null);
Map<String, Object> notValidError = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, notValidError.get(Constants.STATUS));
// success
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true);
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(JSONUtils.parseObject(parameter, MySQLDataSource.class));
Map<String, Object> success = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
public void updateDataSourceTest() {
User loginUser = getAdminUser();
int dataSourceId = 12;
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null);
// data source not exits
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Map<String, Object> resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST, resourceNotExits.get(Constants.STATUS));
// user no operation perm
DataSource dataSource = new DataSource();
dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Map<String, Object> userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, userNoOperationPerm.get(Constants.STATUS));
// data source name exits
dataSource.setUserId(-1);
List<DataSource> dataSourceList = new ArrayList<>();
dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList);
Map<String, Object> dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_EXIST, dataSourceNameExist.get(Constants.STATUS));
// data source connect failed
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true);
Map<String, Object> connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED, connectFailed.get(Constants.STATUS));
//success
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(false);
Map<String, Object> success = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.SUCCESS, connectFailed.get(Constants.STATUS));
}
@Test
public void queryDataSourceListPagingTest() {
User loginUser = getAdminUser();
String searchVal = "";
int pageNo = 1;
int pageSize = 10;
Map<String, Object> success = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void connectionTest() {
int dataSourceId = -1;
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Assert.assertFalse(dataSourceService.connectionTest(dataSourceId));
}
@Test
public void deleteTest() {
User loginUser = getAdminUser();
int dataSourceId = 1;
Result result = new Result();
//resource not exist
dataSourceService.putMsg(result, Status.RESOURCE_NOT_EXIST);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
// user no operation perm
dataSourceService.putMsg(result, Status.USER_NO_OPERATION_PERM);
DataSource dataSource = new DataSource();
dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
// success
dataSourceService.putMsg(result, Status.SUCCESS);
dataSource.setUserId(-1);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
}
@Test
public void queryDataSourceListTest(){
public void unauthDatasourceTest() {
User loginUser = getAdminUser();
int userId = -1;
//user no operation perm
Map<String, Object> noOperationPerm = dataSourceService.unauthDatasource(loginUser, userId);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> success = dataSourceService.unauthDatasource(loginUser, userId);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void authedDatasourceTest() {
User loginUser = getAdminUser();
int userId = -1;
//user no operation perm
Map<String, Object> noOperationPerm = dataSourceService.authedDatasource(loginUser, userId);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> success = dataSourceService.authedDatasource(loginUser, userId);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void queryDataSourceListTest() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal());
@ -62,35 +220,34 @@ public class DataSourceServiceTest {
}
@Test
public void verifyDataSourceNameTest(){
public void verifyDataSourceNameTest() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
String dataSourceName = "dataSource1";
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(getDataSourceList());
Result result = dataSourceService.verifyDataSourceName(loginUser, dataSourceName);
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(),result.getMsg());
Result result = dataSourceService.verifyDataSourceName(dataSourceName);
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(), result.getMsg());
}
@Test
public void queryDataSourceTest(){
public void queryDataSourceTest() {
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(null);
Map<String, Object> result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status)result.get(Constants.STATUS)).getCode(),Status.RESOURCE_NOT_EXIST.getCode());
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.RESOURCE_NOT_EXIST.getCode());
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(getOracleDataSource());
result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status)result.get(Constants.STATUS)).getCode(),Status.SUCCESS.getCode());
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.SUCCESS.getCode());
}
private List<DataSource> getDataSourceList(){
private List<DataSource> getDataSourceList() {
List<DataSource> dataSources = new ArrayList<>();
dataSources.add(getOracleDataSource());
return dataSources;
}
private DataSource getOracleDataSource(){
private DataSource getOracleDataSource() {
DataSource dataSource = new DataSource();
dataSource.setName("test");
dataSource.setNote("Note");
@ -101,31 +258,40 @@ public class DataSourceServiceTest {
}
@Test
public void buildParameter(){
String param = dataSourceService.buildParameter("","", DbType.ORACLE, "192.168.9.1","1521","im"
,"","test","test", DbConnectType.ORACLE_SERVICE_NAME,"");
public void buildParameter() {
String param = dataSourceService.buildParameter(DbType.ORACLE, "192.168.9.1", "1521", "im"
, "", "test", "test", DbConnectType.ORACLE_SERVICE_NAME, "");
String expected = "{\"connectType\":\"ORACLE_SERVICE_NAME\",\"type\":\"ORACLE_SERVICE_NAME\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"test\"}";
Assert.assertEquals(expected, param);
}
@Test
public void buildParameterWithDecodePassword(){
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"true");
String param = dataSourceService.buildParameter("name","desc", DbType.MYSQL, "192.168.9.1","1521","im"
,"","test","123456", null,"");
public void buildParameterWithDecodePassword() {
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true");
String param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im"
, "", "test", "123456", null, "");
String expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\"}";
Assert.assertEquals(expected, param);
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"false");
param = dataSourceService.buildParameter("name","desc", DbType.MYSQL, "192.168.9.1","1521","im"
,"","test","123456", null,"");
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false");
param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im"
, "", "test", "123456", null, "");
expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"123456\"}";
Assert.assertEquals(expected, param);
}
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserName("admin");
loginUser.setUserType(UserType.GENERAL_USER);
return loginUser;
}
}

63
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java

@ -16,17 +16,36 @@
*/
package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -36,13 +55,6 @@ import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import java.text.ParseException;
import java.util.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.times;
/**
* test for ExecutorService
*/
@ -62,7 +74,7 @@ public class ExecutorService2Test {
private ProjectMapper projectMapper;
@Mock
private ProjectService projectService;
private ProjectServiceImpl projectService;
@Mock
private MonitorService monitorService;
@ -84,7 +96,7 @@ public class ExecutorService2Test {
private String cronTime;
@Before
public void init(){
public void init() {
// user
loginUser.setId(userId);
@ -111,7 +123,6 @@ public class ExecutorService2Test {
/**
* not complement
* @throws ParseException
*/
@Test
public void testNoComplement() throws ParseException {
@ -125,13 +136,12 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* date error
* @throws ParseException
*/
@Test
public void testDateError() throws ParseException {
@ -145,13 +155,12 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS));
verify(processService, times(0)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* serial
* @throws ParseException
*/
@Test
public void testSerial() throws ParseException {
@ -165,17 +174,16 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* without schedule
* @throws ParseException
*/
@Test
public void testParallelWithOutSchedule() throws ParseException {
try{
try {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
@ -185,17 +193,16 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(31)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* with schedule
* @throws ParseException
*/
@Test
public void testParallelWithSchedule() throws ParseException {
try{
try {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
@ -205,13 +212,13 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(15)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
@Test
public void testNoMsterServers() throws ParseException{
public void testNoMsterServers() throws ParseException {
Mockito.when(monitorService.getServerListFromZK(true)).thenReturn(new ArrayList<Server>());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
@ -220,11 +227,11 @@ public class ExecutorService2Test {
null, null, 0,
"", "", RunMode.RUN_MODE_PARALLEL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(result.get(Constants.STATUS),Status.MASTER_NOT_EXISTS);
Assert.assertEquals(result.get(Constants.STATUS), Status.MASTER_NOT_EXISTS);
}
private List<Server> getMasterServersList(){
private List<Server> getMasterServersList() {
List<Server> masterServerList = new ArrayList<>();
Server masterServer1 = new Server();
masterServer1.setId(1);
@ -242,11 +249,11 @@ public class ExecutorService2Test {
}
private List<Schedule> zeroSchedulerList(){
private List<Schedule> zeroSchedulerList() {
return Collections.EMPTY_LIST;
}
private List<Schedule> oneSchedulerList(){
private List<Schedule> oneSchedulerList() {
List<Schedule> schedulerList = new LinkedList<>();
Schedule schedule = new Schedule();
schedule.setCrontab("0 0 0 1/2 * ?");
@ -254,7 +261,7 @@ public class ExecutorService2Test {
return schedulerList;
}
private Map<String, Object> checkProjectAndAuth(){
private Map<String, Object> checkProjectAndAuth() {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, Status.SUCCESS);
return result;

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java

@ -54,7 +54,7 @@ public class ExecutorServiceTest {
@Test
public void putMsgWithParamsTest() {
Map<String,Object> map = new HashMap<>(5);
Map<String,Object> map = new HashMap<>();
putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS);
logger.info(map.toString());

44
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java

@ -17,10 +17,14 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
@ -32,25 +36,30 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@RunWith(MockitoJUnitRunner.class)
@PrepareForTest({LoggerService.class})
@PrepareForTest({LoggerServiceImpl.class})
public class LoggerServiceTest {
private static final Logger logger = LoggerFactory.getLogger(LoggerServiceTest.class);
@InjectMocks
private LoggerService loggerService;
private LoggerServiceImpl loggerService;
@Mock
private ProcessService processService;
@Before
public void init() {
this.loggerService.init();
}
@Test
public void testQueryDataSourceList(){
public void testQueryDataSourceList() {
TaskInstance taskInstance = new TaskInstance();
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
Result result = loggerService.queryLog(2,1,1);
Result result = loggerService.queryLog(2, 1, 1);
//TASK_INSTANCE_NOT_FOUND
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), result.getCode().intValue());
try {
//HOST NOT FOUND OR ILLEGAL
@ -59,36 +68,36 @@ public class LoggerServiceTest {
Assert.assertTrue(true);
logger.error("testQueryDataSourceList error {}", e.getMessage());
}
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), result.getCode().intValue());
//SUCCESS
taskInstance.setHost("127.0.0.1:8080");
taskInstance.setLogPath("/temp/log");
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
result = loggerService.queryLog(1,1,1);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
result = loggerService.queryLog(1, 1, 1);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testGetLogBytes(){
public void testGetLogBytes() {
TaskInstance taskInstance = new TaskInstance();
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
//task instance is null
try{
try {
loggerService.getLogBytes(2);
}catch (RuntimeException e){
} catch (RuntimeException e) {
Assert.assertTrue(true);
logger.error("testGetLogBytes error: {}","task instance is null");
logger.error("testGetLogBytes error: {}", "task instance is null");
}
//task instance host is null
try{
try {
loggerService.getLogBytes(1);
}catch (RuntimeException e){
} catch (RuntimeException e) {
Assert.assertTrue(true);
logger.error("testGetLogBytes error: {}","task instance host is null");
logger.error("testGetLogBytes error: {}", "task instance host is null");
}
//success
@ -100,4 +109,9 @@ public class LoggerServiceTest {
}
@After
public void close() {
this.loggerService.close();
}
}

910
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

File diff suppressed because it is too large Load Diff

274
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java

@ -0,0 +1,274 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionVersionServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.Lists;
@RunWith(MockitoJUnitRunner.class)
public class ProcessDefinitionVersionServiceTest {
@InjectMocks
private ProcessDefinitionVersionServiceImpl processDefinitionVersionService;
@Mock
private ProcessDefinitionVersionMapper processDefinitionVersionMapper;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectServiceImpl projectService;
@Test
public void testAddProcessDefinitionVersion() {
long expectedVersion = 5L;
ProcessDefinition processDefinition = getProcessDefinition();
Mockito.when(processDefinitionVersionMapper
.queryMaxVersionByProcessDefinitionId(processDefinition.getId()))
.thenReturn(expectedVersion);
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
Assert.assertEquals(expectedVersion + 1, version);
}
@Test
@SuppressWarnings("unchecked")
public void testQueryProcessDefinitionVersions() {
// pageNo <= 0
int pageNo = -1;
int pageSize = 10;
int processDefinitionId = 66;
String projectName = "project_test1";
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> resultMap1 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
, resultMap1.get(Constants.STATUS));
// pageSize <= 0
pageNo = 1;
pageSize = -1;
Map<String, Object> resultMap2 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
, resultMap2.get(Constants.STATUS));
Map<String, Object> res = new HashMap<>();
putMsg(res, Status.PROJECT_NOT_FOUNT);
Project project = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName))
.thenReturn(project);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(res);
// project auth fail
pageNo = 1;
pageSize = 10;
Map<String, Object> resultMap3 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, resultMap3.get(Constants.STATUS));
putMsg(res, Status.SUCCESS);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(res);
ProcessDefinitionVersion processDefinitionVersion = getProcessDefinitionVersion(getProcessDefinition());
Mockito.when(processDefinitionVersionMapper
.queryProcessDefinitionVersionsPaging(Mockito.any(Page.class), Mockito.eq(processDefinitionId)))
.thenReturn(new Page<ProcessDefinitionVersion>()
.setRecords(Lists.newArrayList(processDefinitionVersion)));
Map<String, Object> resultMap4 = processDefinitionVersionService.queryProcessDefinitionVersions(
loginUser
, projectName
, pageNo
, pageSize
, processDefinitionId);
Assert.assertEquals(Status.SUCCESS, resultMap4.get(Constants.STATUS));
Assert.assertEquals(processDefinitionVersion
, ((PageInfo<ProcessDefinitionVersion>) resultMap4.get(Constants.DATA_LIST))
.getLists().get(0));
}
@Test
public void testQueryByProcessDefinitionIdAndVersion() {
ProcessDefinitionVersion expectedProcessDefinitionVersion =
getProcessDefinitionVersion(getProcessDefinition());
int processDefinitionId = 66;
long version = 10;
Mockito.when(processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version))
.thenReturn(expectedProcessDefinitionVersion);
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
Assert.assertEquals(expectedProcessDefinitionVersion, processDefinitionVersion);
}
@Test
public void testDeleteByProcessDefinitionIdAndVersion() {
String projectName = "project_test1";
int processDefinitionId = 66;
long version = 10;
Project project = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName))
.thenReturn(project);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
// project auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(new HashMap<>());
Map<String, Object> resultMap1 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
loginUser
, projectName
, processDefinitionId
, version);
Assert.assertEquals(0, resultMap1.size());
Map<String, Object> res = new HashMap<>();
putMsg(res, Status.SUCCESS);
Mockito.when(processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version))
.thenReturn(1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
.thenReturn(res);
Map<String, Object> resultMap2 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
loginUser
, projectName
, processDefinitionId
, version);
Assert.assertEquals(Status.SUCCESS, resultMap2.get(Constants.STATUS));
}
/**
* get mock processDefinitionVersion by processDefinition
*
* @return processDefinitionVersion
*/
private ProcessDefinitionVersion getProcessDefinitionVersion(ProcessDefinition processDefinition) {
return ProcessDefinitionVersion
.newBuilder()
.processDefinitionId(processDefinition.getId())
.version(1)
.processDefinitionJson(processDefinition.getProcessDefinitionJson())
.description(processDefinition.getDescription())
.locations(processDefinition.getLocations())
.connects(processDefinition.getConnects())
.timeout(processDefinition.getTimeout())
.globalParams(processDefinition.getGlobalParams())
.createTime(processDefinition.getUpdateTime())
.receivers(processDefinition.getReceivers())
.receiversCc(processDefinition.getReceiversCc())
.resourceIds(processDefinition.getResourceIds())
.build();
}
/**
* get mock processDefinition
*
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(66);
processDefinition.setName("test_pdf");
processDefinition.setProjectId(2);
processDefinition.setTenantId(1);
processDefinition.setDescription("");
return processDefinition;
}
/**
* get mock Project
*
* @param projectName projectName
* @return Project
*/
private Project getProject(String projectName) {
Project project = new Project();
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
}

115
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java

@ -14,18 +14,46 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.IOException;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -33,22 +61,11 @@ import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import java.io.IOException;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.Silent.class)
@SpringBootTest(classes = ApiApplicationServer.class)
public class ProcessInstanceServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class);
@InjectMocks
ProcessInstanceService processInstanceService;
@ -57,7 +74,7 @@ public class ProcessInstanceServiceTest {
ProjectMapper projectMapper;
@Mock
ProjectService projectService;
ProjectServiceImpl projectService;
@Mock
ProcessService processService;
@ -71,6 +88,9 @@ public class ProcessInstanceServiceTest {
@Mock
ProcessDefinitionService processDefinitionService;
@Mock
ProcessDefinitionVersionService processDefinitionVersionService;
@Mock
ExecutorService execService;
@ -78,25 +98,22 @@ public class ProcessInstanceServiceTest {
TaskInstanceMapper taskInstanceMapper;
@Mock
LoggerService loggerService;
LoggerServiceImpl loggerService;
@Mock
UsersService usersService;
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\","
+ "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"},"
+ "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\","
+ "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\","
+ "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
@Test
public void testQueryProcessInstanceList() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -153,30 +170,28 @@ public class ProcessInstanceServiceTest {
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
int size=10;
String startTime="2020-01-01 00:00:00";
String endTime="2020-08-02 00:00:00";
int size = 10;
String startTime = "2020-01-01 00:00:00";
String endTime = "2020-08-02 00:00:00";
Date start = DateUtils.getScheduleDate(startTime);
Date end = DateUtils.getScheduleDate(endTime);
//project auth fail
when(projectMapper.queryByName(projectName)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser,projectName,size,startTime,endTime);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectName, size, startTime, endTime);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
//project auth success
putMsg(result, Status.SUCCESS, projectName);
Project project = getProject(projectName);
ProcessInstance processInstance = getProcessInstance();
List<ProcessInstance> processInstanceList = new ArrayList<>();
processInstanceList.add(processInstance);
when(projectMapper.queryByName(projectName)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId());
when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser);
Map<String, Object> successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser,projectName,size,startTime,endTime);
Map<String, Object> successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectName, size, startTime, endTime);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@ -185,7 +200,7 @@ public class ProcessInstanceServiceTest {
public void testQueryProcessInstanceById() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -223,7 +238,7 @@ public class ProcessInstanceServiceTest {
public void testQueryTaskListByProcessId() throws IOException {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -253,26 +268,23 @@ public class ProcessInstanceServiceTest {
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testParseLogForDependentResult() {
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" +
"[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" +
"[root@node2 current]# ";
try {
public void testParseLogForDependentResult() throws IOException {
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172]"
+ " - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n"
+ "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172]"
+ " - task : 223_10739_452334 exit status code : 0\n"
+ "[root@node2 current]# ";
Map<String, DependResult> resultMap =
processInstanceService.parseLogForDependentResult(logString);
Assert.assertEquals(1, resultMap.size());
} catch (IOException e) {
}
}
@Test
public void testQuerySubProcessInstanceByTaskId() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -318,7 +330,7 @@ public class ProcessInstanceServiceTest {
public void testUpdateProcessInstance() throws ParseException {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -359,6 +371,7 @@ public class ProcessInstanceServiceTest {
when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant);
when(processService.updateProcessInstance(processInstance)).thenReturn(1);
when(processDefinitionService.checkProcessNodeList(Mockito.any(), eq(shellJson))).thenReturn(result);
when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L);
Map<String, Object> processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1,
shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", "");
Assert.assertEquals(Status.UPDATE_PROCESS_INSTANCE_ERROR, processInstanceFinishRes.get(Constants.STATUS));
@ -374,7 +387,7 @@ public class ProcessInstanceServiceTest {
public void testQueryParentInstanceBySubId() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -389,6 +402,7 @@ public class ProcessInstanceServiceTest {
when(projectMapper.queryByName(projectName)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> processInstanceNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
@ -415,7 +429,7 @@ public class ProcessInstanceServiceTest {
public void testDeleteProcessInstanceById() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -547,5 +561,4 @@ public class ProcessInstanceServiceTest {
}
}
}

203
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java

@ -14,11 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -30,10 +30,13 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.junit.After;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
@ -43,111 +46,113 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class ProjectServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class);
@InjectMocks
private ProjectService projectService;
private ProjectServiceImpl projectService;
@Mock
private ProjectMapper projectMapper;
@Mock
private UserMapper userMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
private String projectName = "ProjectServiceTest";
private String userName = "ProjectServiceTest";
@Before
public void setUp() {
}
@After
public void after(){
}
@Test
public void testCreateProject(){
public void testCreateProject() {
User loginUser = getLoginUser();
loginUser.setId(1);
Map<String, Object> result = projectService.createProject(loginUser, projectName, getDesc());
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS));
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//project name exist
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject());
result = projectService.createProject(loginUser, projectName, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.insert(Mockito.any(Project.class))).thenReturn(1);
result = projectService.createProject(loginUser, "test", "test");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testQueryById(){
public void testQueryById() {
//not exist
Map<String, Object> result = projectService.queryById(Integer.MAX_VALUE);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
logger.info(result.toString());
//success
Mockito.when(projectMapper.selectById(1)).thenReturn(getProject());
result = projectService.queryById(1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCheckProjectAndAuth(){
public void testCheckProjectAndAuth() {
Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
User loginUser = getLoginUser();
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser,null,projectName);
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, null, projectName);
logger.info(result.toString());
Status status = (Status)result.get(Constants.STATUS);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Status status = (Status) result.get(Constants.STATUS);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
Project project = getProject();
//USER_NO_OPERATION_PROJECT_PERM
project.setUserId(2);
result = projectService.checkProjectAndAuth(loginUser,project,projectName);
result = projectService.checkProjectAndAuth(loginUser, project, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
//success
project.setUserId(1);
result = projectService.checkProjectAndAuth(loginUser,project,projectName);
result = projectService.checkProjectAndAuth(loginUser, project, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
Map<String, Object> result2 = new HashMap<>();
result2 = projectService.checkProjectAndAuth(loginUser, null, projectName);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result2.get(Constants.STATUS));
Project project1 = getProject();
// USER_NO_OPERATION_PROJECT_PERM
project1.setUserId(2);
result2 = projectService.checkProjectAndAuth(loginUser, project1, projectName);
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result2.get(Constants.STATUS));
//success
project1.setUserId(1);
projectService.checkProjectAndAuth(loginUser, project1, projectName);
}
@Test
public void testHasProjectAndPerm(){
public void testHasProjectAndPerm() {
// Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
User loginUser = getLoginUser();
@ -156,28 +161,29 @@ public class ProjectServiceTest {
// not exist user
User tempUser = new User();
tempUser.setId(Integer.MAX_VALUE);
boolean checkResult = projectService.hasProjectAndPerm(tempUser,project,result);
boolean checkResult = projectService.hasProjectAndPerm(tempUser, project, result);
logger.info(result.toString());
Assert.assertFalse(checkResult);
//success
result = new HashMap<>();
project.setUserId(1);
checkResult = projectService.hasProjectAndPerm(loginUser,project,result);
checkResult = projectService.hasProjectAndPerm(loginUser, project, result);
logger.info(result.toString());
Assert.assertTrue(checkResult);
}
@Test
public void testQueryProjectListPaging(){
public void testQueryProjectListPaging() {
IPage<Project> page = new Page<>(1,10);
IPage<Project> page = new Page<>(1, 10);
page.setRecords(getList());
page.setTotal(1L);
Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(1), Mockito.eq(projectName))).thenReturn(page);
User loginUser = getLoginUser();
// project owner
Map<String, Object> result = projectService.queryProjectListPaging(loginUser,10,1,projectName);
Map<String, Object> result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName);
logger.info(result.toString());
PageInfo<Project> pageInfo = (PageInfo<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists()));
@ -185,90 +191,112 @@ public class ProjectServiceTest {
//admin
Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(0), Mockito.eq(projectName))).thenReturn(page);
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryProjectListPaging(loginUser,10,1,projectName);
result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName);
logger.info(result.toString());
pageInfo = (PageInfo<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists()));
}
@Test
public void testDeleteProject(){
public void testDeleteProject() {
Mockito.when(projectMapper.selectById(1)).thenReturn(getProject());
User loginUser = getLoginUser();
//PROJECT_NOT_FOUNT
Map<String, Object> result= projectService.deleteProject(loginUser,12);
Map<String, Object> result = projectService.deleteProject(loginUser, 12);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
loginUser.setId(2);
//USER_NO_OPERATION_PROJECT_PERM
result= projectService.deleteProject(loginUser,1);
result = projectService.deleteProject(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
//DELETE_PROJECT_ERROR_DEFINES_NOT_NULL
Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(getProcessDefinitions());
loginUser.setUserType(UserType.ADMIN_USER);
result= projectService.deleteProject(loginUser,1);
result = projectService.deleteProject(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL,result.get(Constants.STATUS));
Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.deleteById(1)).thenReturn(1);
Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(new ArrayList<>());
result= projectService.deleteProject(loginUser,1);
result = projectService.deleteProject(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testUpdate(){
public void testUpdate() {
User loginUser = getLoginUser();
Project project = getProject();
project.setId(2);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project);
Mockito.when( projectMapper.selectById(1)).thenReturn(getProject());
Mockito.when(projectMapper.selectById(1)).thenReturn(getProject());
// PROJECT_NOT_FOUNT
Map<String, Object> result = projectService.update(loginUser,12,projectName,"desc");
Map<String, Object> result = projectService.update(loginUser, 12, projectName, "desc");
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
//PROJECT_ALREADY_EXISTS
result = projectService.update(loginUser,1,projectName,"desc");
result = projectService.update(loginUser, 1, projectName, "desc");
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS));
//success
project.setUserId(1);
Mockito.when(projectMapper.updateById(Mockito.any(Project.class))).thenReturn(1);
result = projectService.update(loginUser,1,"test","desc");
result = projectService.update(loginUser, 1, "test", "desc");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testQueryAuthorizedProject(){
public void testQueryAuthorizedProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
//USER_NO_OPERATION_PERM
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser,3);
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, 3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryAuthorizedProject(loginUser, 1);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryCreatedProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(getList());
//USER_NO_OPERATION_PERM
Map<String, Object> result = projectService.queryProjectCreatedByUser(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryAuthorizedProject(loginUser,1);
result = projectService.queryProjectCreatedByUser(loginUser);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryAllProjectList(){
public void testQueryAllProjectList() {
Mockito.when(projectMapper.selectList(null)).thenReturn(getList());
Mockito.when(processDefinitionMapper.selectList(null)).thenReturn(getProcessDefinitions());
@ -279,22 +307,22 @@ public class ProjectServiceTest {
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryUnauthorizedProject(){
public void testQueryUnauthorizedProject() {
// Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
Mockito.when(projectMapper.queryProjectExceptUserId(2)).thenReturn(getList());
User loginUser = new User();
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser,2);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, 2);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
private Project getProject(){
private Project getProject() {
Project project = new Project();
project.setId(1);
project.setName(projectName);
@ -302,18 +330,16 @@ public class ProjectServiceTest {
return project;
}
private List<Project> getList(){
private List<Project> getList() {
List<Project> list = new ArrayList<>();
list.add(getProject());
return list;
}
/**
* create admin user
* @return
*/
private User getLoginUser(){
private User getLoginUser() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
@ -325,16 +351,15 @@ public class ProjectServiceTest {
/**
* get project user
*/
private ProjectUser getProjectUser(){
private ProjectUser getProjectUser() {
ProjectUser projectUser = new ProjectUser();
projectUser.setProjectId(1);
projectUser.setUserId(1);
return projectUser;
}
private List<ProcessDefinition> getProcessDefinitions(){
private List<ProcessDefinition> getProcessDefinitions() {
List<ProcessDefinition> list = new ArrayList<>();
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectId(1);
@ -342,15 +367,11 @@ public class ProjectServiceTest {
return list;
}
private String getDesc(){
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" +
".deleteProjectRelation(projectId,userId)projectUserMappe" +
"r.deleteProjectRelation(projectId,userId)projectUserMapper" +
".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
private String getDesc() {
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe"
+ ".deleteProjectRelation(projectId,userId)projectUserMappe"
+ "r.deleteProjectRelation(projectId,userId)projectUserMapper"
+ ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
}
}

32
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.model.Server;
@ -24,12 +25,16 @@ import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -40,13 +45,6 @@ import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.quartz.Scheduler;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class)
@PrepareForTest(QuartzExecutors.class)
@ -57,10 +55,6 @@ public class SchedulerServiceTest {
@InjectMocks
private SchedulerService schedulerService;
@Autowired
private ExecutorService executorService;
@Mock
private MonitorService monitorService;
@ -72,21 +66,13 @@ public class SchedulerServiceTest {
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ProjectService projectService;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
private ProjectServiceImpl projectService;
@Mock
private QuartzExecutors quartzExecutors;
@Mock
private Scheduler scheduler;
@Before
public void setUp() {
@ -179,7 +165,7 @@ public class SchedulerServiceTest {
boolean flag = true;
try {
schedulerService.deleteSchedule(1, 1);
}catch (Exception e){
} catch (Exception e) {
flag = false;
}
Assert.assertTrue(flag);

11
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java

@ -16,7 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.apache.dolphinscheduler.api.service.impl.SessionServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
@ -38,10 +43,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.mock.web.MockCookie;
import org.springframework.mock.web.MockHttpServletRequest;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@RunWith(MockitoJUnitRunner.class)
public class SessionServiceTest {
@ -49,7 +50,7 @@ public class SessionServiceTest {
private static final Logger logger = LoggerFactory.getLogger(SessionServiceTest.class);
@InjectMocks
private SessionService sessionService;
private SessionServiceImpl sessionService;
@Mock
private SessionMapper sessionMapper;

58
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java

@ -16,9 +16,13 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -30,6 +34,14 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -41,11 +53,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import java.text.MessageFormat;
import java.util.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.Silent.class)
@SpringBootTest(classes = ApiApplicationServer.class)
@ -59,7 +67,7 @@ public class TaskInstanceServiceTest {
ProjectMapper projectMapper;
@Mock
ProjectService projectService;
ProjectServiceImpl projectService;
@Mock
ProcessService processService;
@ -67,28 +75,23 @@ public class TaskInstanceServiceTest {
@Mock
TaskInstanceMapper taskInstanceMapper;
@Mock
ProcessInstanceService processInstanceService;
@Mock
UsersService usersService;
@Test
public void queryTaskListPaging(){
public void queryTaskListPaging() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
when(projectMapper.queryByName(projectName)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser,null,projectName)).thenReturn(result);
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "",
"test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
//project
putMsg(result, Status.SUCCESS, projectName);
Project project = getProject(projectName);
@ -101,7 +104,7 @@ public class TaskInstanceServiceTest {
taskInstanceList.add(taskInstance);
pageReturn.setRecords(taskInstanceList);
when(projectMapper.queryByName(Mockito.anyString())).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId());
when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""),
@ -126,10 +129,28 @@ public class TaskInstanceServiceTest {
Map<String, Object> executorNullRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
"test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
Assert.assertEquals(Status.SUCCESS, executorNullRes.get(Constants.STATUS));
//start/end date null
when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""),
eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn);
Map<String, Object> executorNullDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
"", null, null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
Assert.assertEquals(Status.SUCCESS, executorNullDateRes.get(Constants.STATUS));
//start date error format
when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""),
eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn);
Map<String, Object> executorErrorStartDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
"", "error date", null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorStartDateRes.get(Constants.STATUS));
Map<String, Object> executorErrorEndDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
"", null, "error date", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorEndDateRes.get(Constants.STATUS));
}
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
@ -142,10 +163,11 @@ public class TaskInstanceServiceTest {
/**
* get mock Project
*
* @param projectName projectName
* @return Project
*/
private Project getProject(String projectName){
private Project getProject(String projectName) {
Project project = new Project();
project.setId(1);
project.setName(projectName);
@ -155,6 +177,7 @@ public class TaskInstanceServiceTest {
/**
* get Mock process instance
*
* @return process instance
*/
private ProcessInstance getProcessInstance() {
@ -169,6 +192,7 @@ public class TaskInstanceServiceTest {
/**
* get Mock task instance
*
* @return task instance
*/
private TaskInstance getTaskInstance() {

131
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java

@ -14,14 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.TenantServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -35,6 +32,12 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -54,54 +57,61 @@ public class TenantServiceTest {
private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class);
@InjectMocks
private TenantService tenantService;
private TenantServiceImpl tenantService;
@Mock
private TenantMapper tenantMapper;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
@Mock
private ProcessInstanceMapper processInstanceMapper;
@Mock
private UserMapper userMapper;
private String tenantCode = "TenantServiceTest";
private String tenantName = "TenantServiceTest";
private static final String tenantCode = "TenantServiceTest";
private static final String tenantName = "TenantServiceTest";
@Test
public void testCreateTenant(){
public void testCreateTenant() {
User loginUser = getLoginUser();
Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getList());
try {
//check tenantCode
Map<String, Object> result = tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest");
Map<String, Object> result =
tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest");
logger.info(result.toString());
Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR,result.get(Constants.STATUS));
Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR, result.get(Constants.STATUS));
//check exist
result = tenantService.createTenant(loginUser, tenantCode, tenantName, 1, "TenantServiceTest");
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS));
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
// success
result = tenantService.createTenant(loginUser, "test", "test", 1, "TenantServiceTest");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("create tenant error",e);
Assert.assertTrue(false);
logger.error("create tenant error", e);
Assert.fail();
}
}
@Test
public void testQueryTenantListPage(){
@SuppressWarnings("unchecked")
public void testQueryTenantListPage() {
IPage<Tenant> page = new Page<>(1,10);
IPage<Tenant> page = new Page<>(1, 10);
page.setRecords(getList());
page.setTotal(1L);
Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest"))).thenReturn(page);
Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest")))
.thenReturn(page);
Map<String, Object> result = tenantService.queryTenantList(getLoginUser(), "TenantServiceTest", 1, 10);
logger.info(result.toString());
PageInfo<Tenant> pageInfo = (PageInfo<Tenant>) result.get(Constants.DATA_LIST);
@ -110,87 +120,71 @@ public class TenantServiceTest {
}
@Test
public void testUpdateTenant(){
public void testUpdateTenant() {
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
try {
// id not exist
Map<String, Object> result = tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc");
Map<String, Object> result =
tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc");
logger.info(result.toString());
// success
Assert.assertEquals(Status.TENANT_NOT_EXIST,result.get(Constants.STATUS));
Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS));
result = tenantService.updateTenant(getLoginUser(), 1, tenantCode, "TenantServiceTest001", 1, "desc");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("update tenant error",e);
Assert.assertTrue(false);
logger.error("update tenant error", e);
Assert.fail();
}
}
@Test
public void testDeleteTenantById(){
public void testDeleteTenantById() {
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES)).thenReturn(getInstanceList());
Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES))
.thenReturn(getInstanceList());
Mockito.when(processDefinitionMapper.queryDefinitionListByTenant(2)).thenReturn(getDefinitionsList());
Mockito.when( userMapper.queryUserListByTenant(3)).thenReturn(getUserList());
Mockito.when(userMapper.queryUserListByTenant(3)).thenReturn(getUserList());
try {
//TENANT_NOT_EXIST
Map<String, Object> result = tenantService.deleteTenantById(getLoginUser(),12);
Map<String, Object> result = tenantService.deleteTenantById(getLoginUser(), 12);
logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST,result.get(Constants.STATUS));
Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS));
//DELETE_TENANT_BY_ID_FAIL
result = tenantService.deleteTenantById(getLoginUser(),1);
result = tenantService.deleteTenantById(getLoginUser(), 1);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL,result.get(Constants.STATUS));
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL, result.get(Constants.STATUS));
//DELETE_TENANT_BY_ID_FAIL_DEFINES
Mockito.when(tenantMapper.queryById(2)).thenReturn(getTenant(2));
result = tenantService.deleteTenantById(getLoginUser(),2);
result = tenantService.deleteTenantById(getLoginUser(), 2);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES,result.get(Constants.STATUS));
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, result.get(Constants.STATUS));
//DELETE_TENANT_BY_ID_FAIL_USERS
Mockito.when(tenantMapper.queryById(3)).thenReturn(getTenant(3));
result = tenantService.deleteTenantById(getLoginUser(),3);
result = tenantService.deleteTenantById(getLoginUser(), 3);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_USERS,result.get(Constants.STATUS));
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_USERS, result.get(Constants.STATUS));
// success
Mockito.when(tenantMapper.queryById(4)).thenReturn(getTenant(4));
result = tenantService.deleteTenantById(getLoginUser(),4);
result = tenantService.deleteTenantById(getLoginUser(), 4);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("delete tenant error",e);
Assert.assertTrue(false);
logger.error("delete tenant error", e);
Assert.fail();
}
}
@Test
public void testQueryTenantList(){
Mockito.when( tenantMapper.selectList(null)).thenReturn(getList());
Map<String, Object> result = tenantService.queryTenantList(getLoginUser());
logger.info(result.toString());
List<Tenant> tenantList = (List<Tenant>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(tenantList));
Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(getList());
Map<String, Object> successRes = tenantService.queryTenantList("1");
Assert.assertEquals(Status.SUCCESS,successRes.get(Constants.STATUS));
Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(null);
Map<String, Object> tenantNotExistRes = tenantService.queryTenantList("1");
Assert.assertEquals(Status.TENANT_NOT_EXIST,tenantNotExistRes.get(Constants.STATUS));
}
@Test
public void testVerifyTenantCode(){
public void testVerifyTenantCode() {
Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getList());
// tenantCode not exist
@ -209,12 +203,10 @@ public class TenantServiceTest {
Assert.assertEquals(resultString, result.getMsg());
}
/**
* get user
* @return
*/
private User getLoginUser(){
private User getLoginUser() {
User loginUser = new User();
loginUser.setUserType(UserType.ADMIN_USER);
@ -223,9 +215,8 @@ public class TenantServiceTest {
/**
* get list
* @return
*/
private List<Tenant> getList(){
private List<Tenant> getList() {
List<Tenant> tenantList = new ArrayList<>();
tenantList.add(getTenant());
return tenantList;
@ -233,16 +224,15 @@ public class TenantServiceTest {
/**
* get tenant
* @return
*/
private Tenant getTenant(){
private Tenant getTenant() {
return getTenant(1);
}
/**
* get tenant
* @return
*/
private Tenant getTenant(int id){
private Tenant getTenant(int id) {
Tenant tenant = new Tenant();
tenant.setId(id);
tenant.setTenantCode(tenantCode);
@ -250,25 +240,24 @@ public class TenantServiceTest {
return tenant;
}
private List<User> getUserList(){
private List<User> getUserList() {
List<User> userList = new ArrayList<>();
userList.add(getLoginUser());
return userList;
}
private List<ProcessInstance> getInstanceList(){
private List<ProcessInstance> getInstanceList() {
List<ProcessInstance> processInstances = new ArrayList<>();
ProcessInstance processInstance = new ProcessInstance();
processInstances.add(processInstance);
return processInstances;
}
private List<ProcessDefinition> getDefinitionsList(){
private List<ProcessDefinition> getDefinitionsList() {
List<ProcessDefinition> processDefinitions = new ArrayList<>();
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinitions.add(processDefinition);
return processDefinitions;
}
}

100
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java

@ -42,14 +42,14 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@RunWith(MockitoJUnitRunner.class)
public class UsersServiceTest {
private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class);
@ -462,42 +462,122 @@ public class UsersServiceTest {
try {
//userName error
Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userName = "userTest0002";
userPassword = "userTest000111111111111111";
//password error
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userPassword = "userTest0002";
email = "1q.com";
//email error
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//repeatPassword error
email = "7400@qq.com";
repeatPassword = "userPassword";
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//success
repeatPassword = "userTest0002";
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error(Status.CREATE_USER_ERROR.getMsg(),e);
Assert.assertTrue(false);
}
}
@Test
public void testActivateUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
String userName = "userTest0002~";
try {
//not admin
Map<String, Object> result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//userName error
user.setUserType(UserType.ADMIN_USER);
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//user not exist
userName = "userTest10013";
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//user state error
userName = "userTest0001";
when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser());
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//success
when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getDisabledUser());
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
Assert.assertTrue(false);
}
}
@Test
public void testBatchActivateUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
List<String> userNames = new ArrayList<>();
userNames.add("userTest0001");
userNames.add("userTest0002");
userNames.add("userTest0003~");
userNames.add("userTest0004");
try {
//not admin
Map<String, Object> result = usersService.batchActivateUser(user, userNames);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//batch activate user names
user.setUserType(UserType.ADMIN_USER);
when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser());
when(userMapper.queryByUserNameAccurately("userTest0002")).thenReturn(getDisabledUser());
result = usersService.batchActivateUser(user, userNames);
Map<String, Object> responseData = (Map<String, Object>) result.get(Constants.DATA_LIST);
Map<String, Object> successData = (Map<String, Object>) responseData.get("success");
int totalSuccess = (Integer) successData.get("sum");
Map<String, Object> failedData = (Map<String, Object>) responseData.get("failed");
int totalFailed = (Integer) failedData.get("sum");
Assert.assertEquals(1, totalSuccess);
Assert.assertEquals(3, totalFailed);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
Assert.assertTrue(false);
}
}
/**
* get disabled user
* @return
*/
private User getDisabledUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
user.setState(0);
return user;
}
/**
* get user
* @return

5
dolphinscheduler-common/pom.xml

@ -580,6 +580,11 @@
</exclusions>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-annotation</artifactId>

15
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
@ -27,7 +28,7 @@ import java.util.regex.Pattern;
public final class Constants {
private Constants() {
throw new IllegalStateException("Constants class");
throw new UnsupportedOperationException("Construct Constants");
}
/**
@ -138,7 +139,7 @@ public final class Constants {
/**
* python home
*/
public static final String PYTHON_HOME="PYTHON_HOME";
public static final String PYTHON_HOME = "PYTHON_HOME";
/**
* resource.view.suffixs
@ -366,7 +367,6 @@ public final class Constants {
public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* default log cache rows num,output when reach the number
*/
@ -832,6 +832,7 @@ public final class Constants {
public static final int[] NOT_TERMINATED_STATES = new int[]{
ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal(),
ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
@ -865,7 +866,6 @@ public final class Constants {
public static final String PAGE_NUMBER = "pageNo";
/**
*
*/
@ -898,6 +898,7 @@ public final class Constants {
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver";
/**
* database type
@ -910,6 +911,7 @@ public final class Constants {
public static final String ORACLE = "ORACLE";
public static final String SQLSERVER = "SQLSERVER";
public static final String DB2 = "DB2";
public static final String PRESTO = "PRESTO";
/**
* jdbc url
@ -922,6 +924,7 @@ public final class Constants {
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String JDBC_PRESTO = "jdbc:presto://";
public static final String ADDRESS = "address";
@ -963,11 +966,11 @@ public final class Constants {
/**
* authorize writable perm
*/
public static final int AUTHORIZE_WRITABLE_PERM=7;
public static final int AUTHORIZE_WRITABLE_PERM = 7;
/**
* authorize readable perm
*/
public static final int AUTHORIZE_READABLE_PERM=4;
public static final int AUTHORIZE_READABLE_PERM = 4;
/**

23
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertEvent.java

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
public enum AlertEvent {
SERVER_DOWN,TIME_OUT
}

23
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertWarnLevel.java

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
public enum AlertWarnLevel {
MIDDLE,SERIOUS
}

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java

@ -33,6 +33,7 @@ public enum DbType {
* 5 oracle
* 6 sqlserver
* 7 db2
* 8 presto
*/
MYSQL(0, "mysql"),
POSTGRESQL(1, "postgresql"),
@ -41,7 +42,8 @@ public enum DbType {
CLICKHOUSE(4, "clickhouse"),
ORACLE(5, "oracle"),
SQLSERVER(6, "sqlserver"),
DB2(7, "db2");
DB2(7, "db2"),
PRESTO(8, "presto");
DbType(int code, String descp) {
this.code = code;

51
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java

@ -14,16 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import java.util.HashMap;
import com.baomidou.mybatisplus.annotation.EnumValue;
import java.util.HashMap;
/**
* running status for workflow and task nodes
*
*/
public enum ExecutionStatus {
@ -41,6 +40,7 @@ public enum ExecutionStatus {
* 9 kill
* 10 waiting thread
* 11 waiting depend node complete
* 12 delay execution
*/
SUBMITTED_SUCCESS(0, "submit success"),
RUNNING_EXECUTION(1, "running"),
@ -53,9 +53,10 @@ public enum ExecutionStatus {
NEED_FAULT_TOLERANCE(8, "need fault tolerance"),
KILL(9, "kill"),
WAITTING_THREAD(10, "waiting thread"),
WAITTING_DEPEND(11, "waiting depend node complete");
WAITTING_DEPEND(11, "waiting depend node complete"),
DELAY_EXECUTION(12, "delay execution");
ExecutionStatus(int code, String descp){
ExecutionStatus(int code, String descp) {
this.code = code;
this.descp = descp;
}
@ -64,77 +65,85 @@ public enum ExecutionStatus {
private final int code;
private final String descp;
private static HashMap<Integer, ExecutionStatus> EXECUTION_STATUS_MAP=new HashMap<>();
private static HashMap<Integer, ExecutionStatus> EXECUTION_STATUS_MAP = new HashMap<>();
static {
for (ExecutionStatus executionStatus:ExecutionStatus.values()){
EXECUTION_STATUS_MAP.put(executionStatus.code,executionStatus);
for (ExecutionStatus executionStatus : ExecutionStatus.values()) {
EXECUTION_STATUS_MAP.put(executionStatus.code, executionStatus);
}
}
/**
* status is success
*
* @return status
*/
public boolean typeIsSuccess(){
public boolean typeIsSuccess() {
return this == SUCCESS;
}
/**
* status is failure
*
* @return status
*/
public boolean typeIsFailure(){
public boolean typeIsFailure() {
return this == FAILURE || this == NEED_FAULT_TOLERANCE || this == KILL;
}
/**
* status is finished
*
* @return status
*/
public boolean typeIsFinished(){
public boolean typeIsFinished() {
return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause()
|| typeIsStop();
}
/**
* status is waiting thread
*
* @return status
*/
public boolean typeIsWaitingThread(){
public boolean typeIsWaitingThread() {
return this == WAITTING_THREAD;
}
/**
* status is pause
*
* @return status
*/
public boolean typeIsPause(){
public boolean typeIsPause() {
return this == PAUSE;
}
/**
* status is pause
*
* @return status
*/
public boolean typeIsStop(){
public boolean typeIsStop() {
return this == STOP;
}
/**
* status is running
*
* @return status
*/
public boolean typeIsRunning(){
return this == RUNNING_EXECUTION || this == WAITTING_DEPEND;
public boolean typeIsRunning() {
return this == RUNNING_EXECUTION || this == WAITTING_DEPEND || this == DELAY_EXECUTION;
}
/**
* status is cancel
*
* @return status
*/
public boolean typeIsCancel(){
return this == KILL || this == STOP ;
public boolean typeIsCancel() {
return this == KILL || this == STOP;
}
public int getCode() {
@ -145,8 +154,8 @@ public enum ExecutionStatus {
return descp;
}
public static ExecutionStatus of(int status){
if(EXECUTION_STATUS_MAP.containsKey(status)){
public static ExecutionStatus of(int status) {
if (EXECUTION_STATUS_MAP.containsKey(status)) {
return EXECUTION_STATUS_MAP.get(status);
}
throw new IllegalArgumentException("invalid status : " + status);

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java

@ -31,12 +31,13 @@ public enum TaskStateType {
/**
* convert task state to execute status integer array ;
*
* @param taskStateType task state type
* @return result of execution status
*/
public static int[] convert2ExecutStatusIntArray(TaskStateType taskStateType){
public static int[] convert2ExecutStatusIntArray(TaskStateType taskStateType) {
switch (taskStateType){
switch (taskStateType) {
case SUCCESS:
return new int[]{ExecutionStatus.SUCCESS.ordinal()};
case FAILED:
@ -51,6 +52,7 @@ public enum TaskStateType {
case RUNNING:
return new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
case WAITTING:

50
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java

@ -136,6 +136,11 @@ public class TaskNode {
@JsonSerialize(using = JSONUtils.JsonDataSerializer.class)
private String timeout;
/**
* delay execution time.
*/
private int delayTime;
public String getId() {
return id;
}
@ -310,24 +315,25 @@ public class TaskNode {
@Override
public String toString() {
return "TaskNode{" +
"id='" + id + '\'' +
", name='" + name + '\'' +
", desc='" + desc + '\'' +
", type='" + type + '\'' +
", runFlag='" + runFlag + '\'' +
", loc='" + loc + '\'' +
", maxRetryTimes=" + maxRetryTimes +
", retryInterval=" + retryInterval +
", params='" + params + '\'' +
", preTasks='" + preTasks + '\'' +
", extras='" + extras + '\'' +
", depList=" + depList +
", dependence='" + dependence + '\'' +
", taskInstancePriority=" + taskInstancePriority +
", timeout='" + timeout + '\'' +
", workerGroup='" + workerGroup + '\'' +
'}';
return "TaskNode{"
+ "id='" + id + '\''
+ ", name='" + name + '\''
+ ", desc='" + desc + '\''
+ ", type='" + type + '\''
+ ", runFlag='" + runFlag + '\''
+ ", loc='" + loc + '\''
+ ", maxRetryTimes=" + maxRetryTimes
+ ", retryInterval=" + retryInterval
+ ", params='" + params + '\''
+ ", preTasks='" + preTasks + '\''
+ ", extras='" + extras + '\''
+ ", depList=" + depList
+ ", dependence='" + dependence + '\''
+ ", taskInstancePriority=" + taskInstancePriority
+ ", timeout='" + timeout + '\''
+ ", workerGroup='" + workerGroup + '\''
+ ", delayTime=" + delayTime
+ '}';
}
public String getWorkerGroup() {
@ -353,4 +359,12 @@ public class TaskNode {
public void setWorkerGroupId(Integer workerGroupId) {
this.workerGroupId = workerGroupId;
}
public int getDelayTime() {
return delayTime;
}
public void setDelayTime(int delayTime) {
this.delayTime = delayTime;
}
}

26
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java

@ -14,13 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.beanutils.BeanMap;
import org.apache.commons.lang.StringUtils;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Provides utility methods and decorators for {@link Collection} instances.
@ -37,8 +43,9 @@ import java.util.*;
public class CollectionUtils {
private CollectionUtils() {
throw new IllegalStateException("CollectionUtils class");
throw new UnsupportedOperationException("Construct CollectionUtils");
}
/**
* Returns a new {@link Collection} containing <i>a</i> minus a subset of
* <i>b</i>. Only the elements of <i>b</i> that satisfy the predicate
@ -112,7 +119,6 @@ public class CollectionUtils {
return map;
}
/**
* Helper class to easily access cardinality properties of two collections.
*
@ -137,8 +143,8 @@ public class CollectionUtils {
* @param b the second collection
*/
public CardinalityHelper(final Iterable<? extends O> a, final Iterable<? extends O> b) {
cardinalityA = CollectionUtils.<O>getCardinalityMap(a);
cardinalityB = CollectionUtils.<O>getCardinalityMap(b);
cardinalityA = CollectionUtils.getCardinalityMap(a);
cardinalityB = CollectionUtils.getCardinalityMap(b);
}
/**
@ -239,9 +245,9 @@ public class CollectionUtils {
return count;
}
/**
* Removes certain attributes of each object in the list
*
* @param originList origin list
* @param exclusionSet exclusion set
* @param <T> T
@ -258,8 +264,8 @@ public class CollectionUtils {
Map<String, Object> instanceMap;
for (T instance : originList) {
Map<String, Object> dataMap = new BeanMap(instance);
instanceMap = new LinkedHashMap<>(16,0.75f,true);
for (Map.Entry<String, Object> entry: dataMap.entrySet()) {
instanceMap = new LinkedHashMap<>(16, 0.75f, true);
for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
if (exclusionSet.contains(entry.getKey())) {
continue;
}

65
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java

@ -14,19 +14,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.codec.binary.Base64;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* common utils
*/
@ -36,7 +39,7 @@ public class CommonUtils {
private static final Base64 BASE64 = new Base64();
private CommonUtils() {
throw new IllegalStateException("CommonUtils class");
throw new UnsupportedOperationException("Construct CommonUtils");
}
/**
@ -47,10 +50,10 @@ public class CommonUtils {
if (StringUtils.isEmpty(envPath)) {
URL envDefaultPath = CommonUtils.class.getClassLoader().getResource(Constants.ENV_PATH);
if (envDefaultPath != null){
if (envDefaultPath != null) {
envPath = envDefaultPath.getPath();
logger.debug("env path :{}", envPath);
}else{
} else {
envPath = "/etc/profile";
}
}
@ -59,31 +62,30 @@ public class CommonUtils {
}
/**
*
* @return is develop mode
*/
public static boolean isDevelopMode() {
return PropertyUtils.getBoolean(Constants.DEVELOPMENT_STATE, true);
}
/**
* if upload resource is HDFS and kerberos startup is true , else false
*
* @return true if upload resource is HDFS and kerberos startup
*/
public static boolean getKerberosStartupState(){
public static boolean getKerberosStartupState() {
String resUploadStartupType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE,false);
Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false);
return resUploadType == ResUploadType.HDFS && kerberosStartupState;
}
/**
* load kerberos configuration
*
* @throws Exception errors
*/
public static void loadKerberosConf()throws Exception{
public static void loadKerberosConf() throws Exception {
if (CommonUtils.getKerberosStartupState()) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
Configuration configuration = new Configuration();
@ -96,42 +98,45 @@ public class CommonUtils {
/**
* encode password
* @param password
* @return
*/
public static String encodePassword(String password) {
if(StringUtils.isEmpty(password)){return StringUtils.EMPTY; }
if (StringUtils.isEmpty(password)) {
return StringUtils.EMPTY;
}
//if encryption is not turned on, return directly
boolean encryptionEnable = PropertyUtils.getBoolean(Constants.DATASOURCE_ENCRYPTION_ENABLE,false);
if ( !encryptionEnable){ return password; }
boolean encryptionEnable = PropertyUtils.getBoolean(Constants.DATASOURCE_ENCRYPTION_ENABLE, false);
if (!encryptionEnable) {
return password;
}
// Using Base64 + salt to process password
String salt = PropertyUtils.getString(Constants.DATASOURCE_ENCRYPTION_SALT,Constants.DATASOURCE_ENCRYPTION_SALT_DEFAULT);
String passwordWithSalt = salt + new String(BASE64.encode(password.getBytes(StandardCharsets.UTF_8))) ;
String salt = PropertyUtils.getString(Constants.DATASOURCE_ENCRYPTION_SALT, Constants.DATASOURCE_ENCRYPTION_SALT_DEFAULT);
String passwordWithSalt = salt + new String(BASE64.encode(password.getBytes(StandardCharsets.UTF_8)));
return new String(BASE64.encode(passwordWithSalt.getBytes(StandardCharsets.UTF_8)));
}
/**
* decode password
* @param password
* @return
*/
public static String decodePassword(String password) {
if(StringUtils.isEmpty(password)){return StringUtils.EMPTY ; }
if (StringUtils.isEmpty(password)) {
return StringUtils.EMPTY;
}
//if encryption is not turned on, return directly
boolean encryptionEnable = PropertyUtils.getBoolean(Constants.DATASOURCE_ENCRYPTION_ENABLE,false);
if ( !encryptionEnable){ return password; }
boolean encryptionEnable = PropertyUtils.getBoolean(Constants.DATASOURCE_ENCRYPTION_ENABLE, false);
if (!encryptionEnable) {
return password;
}
// Using Base64 + salt to process password
String salt = PropertyUtils.getString(Constants.DATASOURCE_ENCRYPTION_SALT,Constants.DATASOURCE_ENCRYPTION_SALT_DEFAULT);
String passwordWithSalt = new String(BASE64.decode(password), StandardCharsets.UTF_8) ;
if(!passwordWithSalt.startsWith(salt)){
logger.warn("There is a password and salt mismatch: {} ",password);
String salt = PropertyUtils.getString(Constants.DATASOURCE_ENCRYPTION_SALT, Constants.DATASOURCE_ENCRYPTION_SALT_DEFAULT);
String passwordWithSalt = new String(BASE64.decode(password), StandardCharsets.UTF_8);
if (!passwordWithSalt.startsWith(salt)) {
logger.warn("There is a password and salt mismatch: {} ", password);
return password;
}
return new String(BASE64.decode(passwordWithSalt.substring(salt.length())), StandardCharsets.UTF_8) ;
return new String(BASE64.decode(passwordWithSalt.substring(salt.length())), StandardCharsets.UTF_8);
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save